summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
authorGitLab Bot <gitlab-bot@gitlab.com>2021-04-20 23:50:22 +0000
committerGitLab Bot <gitlab-bot@gitlab.com>2021-04-20 23:50:22 +0000
commit9dc93a4519d9d5d7be48ff274127136236a3adb3 (patch)
tree70467ae3692a0e35e5ea56bcb803eb512a10bedb /spec
parent4b0f34b6d759d6299322b3a54453e930c6121ff0 (diff)
downloadgitlab-ce-9dc93a4519d9d5d7be48ff274127136236a3adb3.tar.gz
Add latest changes from gitlab-org/gitlab@13-11-stable-eev13.11.0-rc43
Diffstat (limited to 'spec')
-rw-r--r--spec/benchmarks/banzai_benchmark.rb7
-rw-r--r--spec/config/mail_room_spec.rb37
-rw-r--r--spec/config/metrics/aggregates/aggregated_metrics_spec.rb (renamed from spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb)0
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb9
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb14
-rw-r--r--spec/controllers/admin/dev_ops_report_controller_spec.rb8
-rw-r--r--spec/controllers/admin/groups_controller_spec.rb6
-rw-r--r--spec/controllers/admin/impersonations_controller_spec.rb2
-rw-r--r--spec/controllers/admin/runners_controller_spec.rb4
-rw-r--r--spec/controllers/admin/services_controller_spec.rb2
-rw-r--r--spec/controllers/application_controller_spec.rb4
-rw-r--r--spec/controllers/boards/issues_controller_spec.rb2
-rw-r--r--spec/controllers/chaos_controller_spec.rb22
-rw-r--r--spec/controllers/concerns/enforces_admin_authentication_spec.rb6
-rw-r--r--spec/controllers/concerns/redis_tracking_spec.rb32
-rw-r--r--spec/controllers/concerns/renders_commits_spec.rb12
-rw-r--r--spec/controllers/dashboard/snippets_controller_spec.rb19
-rw-r--r--spec/controllers/explore/snippets_controller_spec.rb4
-rw-r--r--spec/controllers/graphql_controller_spec.rb32
-rw-r--r--spec/controllers/groups/boards_controller_spec.rb18
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb14
-rw-r--r--spec/controllers/groups/dependency_proxy_auth_controller_spec.rb2
-rw-r--r--spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb3
-rw-r--r--spec/controllers/groups/group_links_controller_spec.rb9
-rw-r--r--spec/controllers/groups/group_members_controller_spec.rb24
-rw-r--r--spec/controllers/groups/labels_controller_spec.rb18
-rw-r--r--spec/controllers/groups/milestones_controller_spec.rb10
-rw-r--r--spec/controllers/groups/registry/repositories_controller_spec.rb1
-rw-r--r--spec/controllers/groups/runners_controller_spec.rb16
-rw-r--r--spec/controllers/groups/settings/applications_controller_spec.rb219
-rw-r--r--spec/controllers/groups/uploads_controller_spec.rb2
-rw-r--r--spec/controllers/groups/variables_controller_spec.rb1
-rw-r--r--spec/controllers/groups_controller_spec.rb113
-rw-r--r--spec/controllers/invites_controller_spec.rb81
-rw-r--r--spec/controllers/oauth/authorizations_controller_spec.rb4
-rw-r--r--spec/controllers/omniauth_callbacks_controller_spec.rb8
-rw-r--r--spec/controllers/profiles/notifications_controller_spec.rb50
-rw-r--r--spec/controllers/projects/alerting/notifications_controller_spec.rb2
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb6
-rw-r--r--spec/controllers/projects/boards_controller_spec.rb18
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb14
-rw-r--r--spec/controllers/projects/commit_controller_spec.rb98
-rw-r--r--spec/controllers/projects/cycle_analytics/events_controller_spec.rb2
-rw-r--r--spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb12
-rw-r--r--spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb1
-rw-r--r--spec/controllers/projects/discussions_controller_spec.rb4
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb1
-rw-r--r--spec/controllers/projects/feature_flags_controller_spec.rb1
-rw-r--r--spec/controllers/projects/forks_controller_spec.rb45
-rw-r--r--spec/controllers/projects/group_links_controller_spec.rb11
-rw-r--r--spec/controllers/projects/imports_controller_spec.rb8
-rw-r--r--spec/controllers/projects/incidents_controller_spec.rb1
-rw-r--r--spec/controllers/projects/issues_controller_spec.rb68
-rw-r--r--spec/controllers/projects/jobs_controller_spec.rb1
-rw-r--r--spec/controllers/projects/labels_controller_spec.rb26
-rw-r--r--spec/controllers/projects/merge_requests/content_controller_spec.rb19
-rw-r--r--spec/controllers/projects/merge_requests/creations_controller_spec.rb32
-rw-r--r--spec/controllers/projects/merge_requests/drafts_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb49
-rw-r--r--spec/controllers/projects/milestones_controller_spec.rb12
-rw-r--r--spec/controllers/projects/notes_controller_spec.rb6
-rw-r--r--spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb1
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb64
-rw-r--r--spec/controllers/projects/pipelines_settings_controller_spec.rb1
-rw-r--r--spec/controllers/projects/project_members_controller_spec.rb8
-rw-r--r--spec/controllers/projects/raw_controller_spec.rb1
-rw-r--r--spec/controllers/projects/registry/repositories_controller_spec.rb58
-rw-r--r--spec/controllers/projects/registry/tags_controller_spec.rb7
-rw-r--r--spec/controllers/projects/releases/evidences_controller_spec.rb3
-rw-r--r--spec/controllers/projects/releases_controller_spec.rb1
-rw-r--r--spec/controllers/projects/repositories_controller_spec.rb22
-rw-r--r--spec/controllers/projects/runners_controller_spec.rb4
-rw-r--r--spec/controllers/projects/services_controller_spec.rb34
-rw-r--r--spec/controllers/projects/settings/access_tokens_controller_spec.rb23
-rw-r--r--spec/controllers/projects/settings/ci_cd_controller_spec.rb1
-rw-r--r--spec/controllers/projects/settings/operations_controller_spec.rb1
-rw-r--r--spec/controllers/projects/snippets_controller_spec.rb4
-rw-r--r--spec/controllers/projects/starrers_controller_spec.rb2
-rw-r--r--spec/controllers/projects/static_site_editor_controller_spec.rb1
-rw-r--r--spec/controllers/projects/todos_controller_spec.rb1
-rw-r--r--spec/controllers/projects/uploads_controller_spec.rb2
-rw-r--r--spec/controllers/projects_controller_spec.rb30
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb22
-rw-r--r--spec/controllers/root_controller_spec.rb20
-rw-r--r--spec/controllers/sessions_controller_spec.rb14
-rw-r--r--spec/db/schema_spec.rb7
-rw-r--r--spec/deprecation_toolkit_env.rb8
-rw-r--r--spec/experiments/application_experiment_spec.rb2
-rw-r--r--spec/experiments/members/invite_email_experiment_spec.rb69
-rw-r--r--spec/experiments/new_project_readme_experiment_spec.rb4
-rw-r--r--spec/experiments/strategy/round_robin_spec.rb68
-rw-r--r--spec/factories/atlassian_identities.rb2
-rw-r--r--spec/factories/bulk_import/trackers.rb14
-rw-r--r--spec/factories/ci/builds.rb15
-rw-r--r--spec/factories/ci/pipeline_artifacts.rb16
-rw-r--r--spec/factories/ci/reports/codequality_degradations.rb43
-rw-r--r--spec/factories/ci/unit_test.rb (renamed from spec/factories/ci/test_case.rb)4
-rw-r--r--spec/factories/ci/unit_test_failure.rb (renamed from spec/factories/ci/test_case_failure.rb)4
-rw-r--r--spec/factories/clusters/applications/helm.rb79
-rw-r--r--spec/factories/clusters/integrations/prometheus.rb12
-rw-r--r--spec/factories/draft_note.rb17
-rw-r--r--spec/factories/events.rb11
-rw-r--r--spec/factories/git_wiki_commit_details.rb2
-rw-r--r--spec/factories/gitaly/commit.rb4
-rw-r--r--spec/factories/gitlab/database/background_migration/batched_migrations.rb1
-rw-r--r--spec/factories/group_group_links.rb4
-rw-r--r--spec/factories/import_export_uploads.rb2
-rw-r--r--spec/factories/packages.rb8
-rw-r--r--spec/factories/packages/package_file.rb26
-rw-r--r--spec/factories/sequences.rb1
-rw-r--r--spec/factories/services.rb8
-rw-r--r--spec/factories/subscriptions.rb6
-rw-r--r--spec/factories/timelogs.rb19
-rw-r--r--spec/factories/users/in_product_marketing_email.rb10
-rw-r--r--spec/factories_spec.rb14
-rw-r--r--spec/features/admin/admin_abuse_reports_spec.rb2
-rw-r--r--spec/features/admin/admin_appearance_spec.rb2
-rw-r--r--spec/features/admin/admin_groups_spec.rb45
-rw-r--r--spec/features/admin/admin_labels_spec.rb6
-rw-r--r--spec/features/admin/admin_mode/login_spec.rb2
-rw-r--r--spec/features/admin/admin_mode/logout_spec.rb2
-rw-r--r--spec/features/admin/admin_mode_spec.rb8
-rw-r--r--spec/features/admin/admin_runners_spec.rb30
-rw-r--r--spec/features/admin/admin_search_settings_spec.rb6
-rw-r--r--spec/features/admin/admin_sees_project_statistics_spec.rb2
-rw-r--r--spec/features/admin/admin_sees_projects_statistics_spec.rb2
-rw-r--r--spec/features/admin/admin_settings_spec.rb18
-rw-r--r--spec/features/admin/admin_users_impersonation_tokens_spec.rb2
-rw-r--r--spec/features/admin/services/admin_activates_prometheus_spec.rb24
-rw-r--r--spec/features/admin/services/admin_visits_service_templates_spec.rb44
-rw-r--r--spec/features/alerts_settings/user_views_alerts_settings_spec.rb2
-rw-r--r--spec/features/boards/add_issues_modal_spec.rb270
-rw-r--r--spec/features/boards/boards_spec.rb94
-rw-r--r--spec/features/boards/focus_mode_spec.rb4
-rw-r--r--spec/features/boards/modal_filter_spec.rb228
-rw-r--r--spec/features/boards/multi_select_spec.rb4
-rw-r--r--spec/features/boards/multiple_boards_spec.rb1
-rw-r--r--spec/features/boards/new_issue_spec.rb34
-rw-r--r--spec/features/boards/reload_boards_on_browser_back_spec.rb4
-rw-r--r--spec/features/boards/sidebar_assignee_spec.rb122
-rw-r--r--spec/features/boards/sidebar_due_date_spec.rb46
-rw-r--r--spec/features/boards/sidebar_labels_spec.rb166
-rw-r--r--spec/features/boards/sidebar_milestones_spec.rb65
-rw-r--r--spec/features/boards/sidebar_spec.rb403
-rw-r--r--spec/features/boards/sub_group_project_spec.rb3
-rw-r--r--spec/features/boards/user_adds_lists_to_board_spec.rb11
-rw-r--r--spec/features/calendar_spec.rb4
-rw-r--r--spec/features/callouts/service_templates_deprecation_spec.rb59
-rw-r--r--spec/features/clusters/cluster_detail_page_spec.rb6
-rw-r--r--spec/features/clusters/cluster_health_dashboard_spec.rb2
-rw-r--r--spec/features/commits_spec.rb2
-rw-r--r--spec/features/dashboard/active_tab_spec.rb2
-rw-r--r--spec/features/dashboard/datetime_on_tooltips_spec.rb2
-rw-r--r--spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb2
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb4
-rw-r--r--spec/features/dashboard/milestones_spec.rb2
-rw-r--r--spec/features/dashboard/project_member_activity_index_spec.rb2
-rw-r--r--spec/features/dashboard/projects_spec.rb23
-rw-r--r--spec/features/dashboard/shortcuts_spec.rb6
-rw-r--r--spec/features/discussion_comments/merge_request_spec.rb2
-rw-r--r--spec/features/error_pages_spec.rb9
-rw-r--r--spec/features/error_tracking/user_filters_errors_by_status_spec.rb1
-rw-r--r--spec/features/error_tracking/user_searches_sentry_errors_spec.rb1
-rw-r--r--spec/features/error_tracking/user_sees_error_index_spec.rb3
-rw-r--r--spec/features/file_uploads/attachment_spec.rb44
-rw-r--r--spec/features/file_uploads/maven_package_spec.rb21
-rw-r--r--spec/features/file_uploads/nuget_package_spec.rb4
-rw-r--r--spec/features/file_uploads/rubygem_package_spec.rb45
-rw-r--r--spec/features/frequently_visited_projects_and_groups_spec.rb2
-rw-r--r--spec/features/gitlab_experiments_spec.rb44
-rw-r--r--spec/features/groups/board_spec.rb2
-rw-r--r--spec/features/groups/clusters/user_spec.rb2
-rw-r--r--spec/features/groups/group_page_with_external_authorization_service_spec.rb4
-rw-r--r--spec/features/groups/group_settings_spec.rb2
-rw-r--r--spec/features/groups/issues_spec.rb2
-rw-r--r--spec/features/groups/labels/index_spec.rb2
-rw-r--r--spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb1
-rw-r--r--spec/features/groups/members/request_access_spec.rb2
-rw-r--r--spec/features/groups/merge_requests_spec.rb2
-rw-r--r--spec/features/groups/milestone_spec.rb6
-rw-r--r--spec/features/groups/navbar_spec.rb6
-rw-r--r--spec/features/groups/settings/user_searches_in_settings_spec.rb23
-rw-r--r--spec/features/groups_spec.rb66
-rw-r--r--spec/features/ide/clientside_preview_csp_spec.rb10
-rw-r--r--spec/features/import/manifest_import_spec.rb11
-rw-r--r--spec/features/invites_spec.rb17
-rw-r--r--spec/features/issues/bulk_assignment_labels_spec.rb4
-rw-r--r--spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb6
-rw-r--r--spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb6
-rw-r--r--spec/features/issues/form_spec.rb2
-rw-r--r--spec/features/issues/gfm_autocomplete_spec.rb984
-rw-r--r--spec/features/issues/issue_sidebar_spec.rb215
-rw-r--r--spec/features/issues/markdown_toolbar_spec.rb28
-rw-r--r--spec/features/issues/note_polling_spec.rb2
-rw-r--r--spec/features/issues/spam_issues_spec.rb22
-rw-r--r--spec/features/issues/user_comments_on_issue_spec.rb18
-rw-r--r--spec/features/issues/user_creates_branch_and_merge_request_spec.rb2
-rw-r--r--spec/features/issues/user_creates_confidential_merge_request_spec.rb2
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb10
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb198
-rw-r--r--spec/features/issues/user_filters_issues_spec.rb2
-rw-r--r--spec/features/issues/user_interacts_with_awards_spec.rb4
-rw-r--r--spec/features/issues/user_invites_from_a_comment_spec.rb25
-rw-r--r--spec/features/issues/user_sees_live_update_spec.rb2
-rw-r--r--spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb5
-rw-r--r--spec/features/issues/user_sorts_issues_spec.rb46
-rw-r--r--spec/features/markdown/markdown_spec.rb11
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb120
-rw-r--r--spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb2
-rw-r--r--spec/features/merge_request/user_awards_emoji_spec.rb29
-rw-r--r--spec/features/merge_request/user_creates_merge_request_spec.rb6
-rw-r--r--spec/features/merge_request/user_invites_from_a_comment_spec.rb25
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb8
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb32
-rw-r--r--spec/features/merge_request/user_sees_breadcrumb_links_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb38
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb6
-rw-r--r--spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb4
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb12
-rw-r--r--spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb4
-rw-r--r--spec/features/merge_request/user_squashes_merge_request_spec.rb4
-rw-r--r--spec/features/merge_request/user_views_open_merge_request_spec.rb2
-rw-r--r--spec/features/milestones/user_creates_milestone_spec.rb2
-rw-r--r--spec/features/milestones/user_views_milestone_spec.rb4
-rw-r--r--spec/features/milestones/user_views_milestones_spec.rb3
-rw-r--r--spec/features/participants_autocomplete_spec.rb44
-rw-r--r--spec/features/profiles/emails_spec.rb8
-rw-r--r--spec/features/profiles/password_spec.rb2
-rw-r--r--spec/features/profiles/personal_access_tokens_spec.rb2
-rw-r--r--spec/features/profiles/user_edit_profile_spec.rb8
-rw-r--r--spec/features/profiles/user_search_settings_spec.rb6
-rw-r--r--spec/features/projects/active_tabs_spec.rb2
-rw-r--r--spec/features/projects/blobs/blob_line_permalink_updater_spec.rb22
-rw-r--r--spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb42
-rw-r--r--spec/features/projects/branches/user_creates_branch_spec.rb4
-rw-r--r--spec/features/projects/branches/user_deletes_branch_spec.rb9
-rw-r--r--spec/features/projects/branches_spec.rb29
-rw-r--r--spec/features/projects/clusters/gcp_spec.rb2
-rw-r--r--spec/features/projects/clusters/user_spec.rb2
-rw-r--r--spec/features/projects/clusters_spec.rb4
-rw-r--r--spec/features/projects/commit/cherry_pick_spec.rb13
-rw-r--r--spec/features/projects/commit/comments/user_edits_comments_spec.rb2
-rw-r--r--spec/features/projects/commit/mini_pipeline_graph_spec.rb50
-rw-r--r--spec/features/projects/commit/user_reverts_commit_spec.rb17
-rw-r--r--spec/features/projects/commits/user_browses_commits_spec.rb9
-rw-r--r--spec/features/projects/features_visibility_spec.rb2
-rw-r--r--spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb42
-rw-r--r--spec/features/projects/files/user_creates_directory_spec.rb2
-rw-r--r--spec/features/projects/files/user_uploads_files_spec.rb35
-rw-r--r--spec/features/projects/fork_spec.rb48
-rw-r--r--spec/features/projects/jobs/permissions_spec.rb2
-rw-r--r--spec/features/projects/jobs/user_browses_jobs_spec.rb9
-rw-r--r--spec/features/projects/jobs_spec.rb5
-rw-r--r--spec/features/projects/labels/user_removes_labels_spec.rb10
-rw-r--r--spec/features/projects/members/list_spec.rb2
-rw-r--r--spec/features/projects/members/user_requests_access_spec.rb2
-rw-r--r--spec/features/projects/merge_request_button_spec.rb4
-rw-r--r--spec/features/projects/navbar_spec.rb4
-rw-r--r--spec/features/projects/new_project_spec.rb66
-rw-r--r--spec/features/projects/pages/user_edits_settings_spec.rb1
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb10
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb89
-rw-r--r--spec/features/projects/releases/user_views_edit_release_spec.rb2
-rw-r--r--spec/features/projects/releases/user_views_release_spec.rb43
-rw-r--r--spec/features/projects/remote_mirror_spec.rb4
-rw-r--r--spec/features/projects/services/disable_triggers_spec.rb4
-rw-r--r--spec/features/projects/services/user_activates_asana_spec.rb2
-rw-r--r--spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb11
-rw-r--r--spec/features/projects/services/user_activates_hipchat_spec.rb40
-rw-r--r--spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb6
-rw-r--r--spec/features/projects/services/user_activates_jira_spec.rb71
-rw-r--r--spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb2
-rw-r--r--spec/features/projects/services/user_activates_pushover_spec.rb2
-rw-r--r--spec/features/projects/services/user_activates_slack_notifications_spec.rb2
-rw-r--r--spec/features/projects/services/user_views_services_spec.rb4
-rw-r--r--spec/features/projects/settings/access_tokens_spec.rb106
-rw-r--r--spec/features/projects/settings/forked_project_settings_spec.rb5
-rw-r--r--spec/features/projects/settings/operations_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/registry_settings_spec.rb4
-rw-r--r--spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb34
-rw-r--r--spec/features/projects/settings/user_searches_in_settings_spec.rb44
-rw-r--r--spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb8
-rw-r--r--spec/features/projects/show/user_sees_deletion_failure_message_spec.rb2
-rw-r--r--spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb5
-rw-r--r--spec/features/projects/show/user_uploads_files_spec.rb12
-rw-r--r--spec/features/projects/snippets/user_comments_on_snippet_spec.rb1
-rw-r--r--spec/features/projects/sub_group_issuables_spec.rb10
-rw-r--r--spec/features/projects/user_sees_sidebar_spec.rb2
-rw-r--r--spec/features/projects/user_sees_user_popover_spec.rb2
-rw-r--r--spec/features/projects/user_uses_shortcuts_spec.rb2
-rw-r--r--spec/features/projects_spec.rb20
-rw-r--r--spec/features/protected_branches_spec.rb12
-rw-r--r--spec/features/registrations/welcome_spec.rb21
-rw-r--r--spec/features/runners_spec.rb16
-rw-r--r--spec/features/search/user_uses_header_search_field_spec.rb2
-rw-r--r--spec/features/security/project/internal_access_spec.rb12
-rw-r--r--spec/features/security/project/private_access_spec.rb14
-rw-r--r--spec/features/security/project/public_access_spec.rb12
-rw-r--r--spec/features/snippets/notes_on_personal_snippets_spec.rb3
-rw-r--r--spec/features/users/anonymous_sessions_spec.rb2
-rw-r--r--spec/features/users/login_spec.rb6
-rw-r--r--spec/features/users/show_spec.rb6
-rw-r--r--spec/features/users/terms_spec.rb2
-rw-r--r--spec/features/whats_new_spec.rb62
-rw-r--r--spec/finders/applications_finder_spec.rb40
-rw-r--r--spec/finders/ci/variables_finder_spec.rb65
-rw-r--r--spec/finders/concerns/finder_with_group_hierarchy_spec.rb112
-rw-r--r--spec/finders/concerns/packages/finder_helper_spec.rb209
-rw-r--r--spec/finders/design_management/designs_finder_spec.rb1
-rw-r--r--spec/finders/design_management/versions_finder_spec.rb1
-rw-r--r--spec/finders/environments_by_deployments_finder_spec.rb127
-rw-r--r--spec/finders/environments_finder_spec.rb126
-rw-r--r--spec/finders/group_members_finder_spec.rb282
-rw-r--r--spec/finders/issues_finder_spec.rb18
-rw-r--r--spec/finders/merge_requests/oldest_per_commit_finder_spec.rb39
-rw-r--r--spec/finders/merge_requests_finder_spec.rb12
-rw-r--r--spec/finders/metrics/dashboards/annotations_finder_spec.rb1
-rw-r--r--spec/finders/metrics/users_starred_dashboards_finder_spec.rb1
-rw-r--r--spec/finders/notes_finder_spec.rb18
-rw-r--r--spec/finders/packages/go/package_finder_spec.rb71
-rw-r--r--spec/finders/packages/maven/package_finder_spec.rb157
-rw-r--r--spec/finders/pending_todos_finder_spec.rb12
-rw-r--r--spec/finders/projects_finder_spec.rb6
-rw-r--r--spec/finders/repositories/branch_names_finder_spec.rb25
-rw-r--r--spec/finders/repositories/changelog_tag_finder_spec.rb (renamed from spec/finders/repositories/previous_tag_finder_spec.rb)13
-rw-r--r--spec/finders/user_group_notification_settings_finder_spec.rb33
-rw-r--r--spec/fixtures/api/schemas/entities/member.json2
-rw-r--r--spec/fixtures/api/schemas/entities/member_user.json3
-rw-r--r--spec/fixtures/api/schemas/external_validation.json15
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json37
-rw-r--r--spec/fixtures/api/schemas/graphql/packages/package_details.json36
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/user/public.json3
-rw-r--r--spec/fixtures/ce_sample_schema.json0
-rw-r--r--spec/fixtures/config/mail_room_enabled_ms_graph.yml26
-rw-r--r--spec/fixtures/emails/update_commands_only.eml22
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml21
-rw-r--r--spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml22
-rw-r--r--spec/fixtures/lib/gitlab/performance_bar/peek_data.json2
-rw-r--r--spec/fixtures/packages/rubygems/package-0.0.1.gembin4096 -> 4608 bytes
-rw-r--r--spec/fixtures/packages/rubygems/package.gembin0 -> 4608 bytes
-rw-r--r--spec/fixtures/packages/rubygems/package.gemspec47
-rw-r--r--spec/fixtures/security_reports/master/gl-sast-report.json945
-rw-r--r--spec/fixtures/unsafe_javascript.xml10
-rw-r--r--spec/frontend/__helpers__/experimentation_helper.js13
-rw-r--r--spec/frontend/__helpers__/mock_apollo_helper.js12
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper.js73
-rw-r--r--spec/frontend/__helpers__/vue_test_utils_helper_spec.js208
-rw-r--r--spec/frontend/__helpers__/web_worker_fake.js71
-rw-r--r--spec/frontend/__helpers__/web_worker_mock.js10
-rw-r--r--spec/frontend/__helpers__/web_worker_transformer.js18
-rw-r--r--spec/frontend/__mocks__/vue/index.js7
-rw-r--r--spec/frontend/access_tokens/index_spec.js20
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js66
-rw-r--r--spec/frontend/admin/signup_restrictions/components/signup_form_spec.js331
-rw-r--r--spec/frontend/admin/signup_restrictions/mock_data.js41
-rw-r--r--spec/frontend/admin/signup_restrictions/utils.js19
-rw-r--r--spec/frontend/admin/signup_restrictions/utils_spec.js22
-rw-r--r--spec/frontend/admin/users/components/user_date_spec.js2
-rw-r--r--spec/frontend/admin/users/components/users_table_spec.js2
-rw-r--r--spec/frontend/admin/users/new_spec.js76
-rw-r--r--spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap524
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_form_spec.js258
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js120
-rw-r--r--spec/frontend/analytics/usage_trends/components/app_spec.js2
-rw-r--r--spec/frontend/api_spec.js68
-rw-r--r--spec/frontend/batch_comments/components/preview_item_spec.js12
-rw-r--r--spec/frontend/behaviors/markdown/render_mermaid_spec.js25
-rw-r--r--spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js2
-rw-r--r--spec/frontend/blob/file_template_selector_spec.js61
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js87
-rw-r--r--spec/frontend/boards/board_new_issue_deprecated_spec.js16
-rw-r--r--spec/frontend/boards/components/__snapshots__/board_blocked_icon_spec.js.snap30
-rw-r--r--spec/frontend/boards/components/board_add_new_column_form_spec.js32
-rw-r--r--spec/frontend/boards/components/board_add_new_column_spec.js10
-rw-r--r--spec/frontend/boards/components/board_blocked_icon_spec.js226
-rw-r--r--spec/frontend/boards/components/board_content_sidebar_spec.js140
-rw-r--r--spec/frontend/boards/components/board_content_spec.js13
-rw-r--r--spec/frontend/boards/components/board_form_spec.js2
-rw-r--r--spec/frontend/boards/components/board_new_issue_spec.js6
-rw-r--r--spec/frontend/boards/components/board_settings_sidebar_spec.js43
-rw-r--r--spec/frontend/boards/components/filtered_search_spec.js65
-rw-r--r--spec/frontend/boards/components/issue_time_estimate_spec.js6
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js14
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js28
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js58
-rw-r--r--spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js (renamed from spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js)36
-rw-r--r--spec/frontend/boards/mock_data.js130
-rw-r--r--spec/frontend/boards/modal_store_spec.js134
-rw-r--r--spec/frontend/boards/stores/actions_spec.js708
-rw-r--r--spec/frontend/boards/stores/getters_spec.js41
-rw-r--r--spec/frontend/boards/stores/mutations_spec.js239
-rw-r--r--spec/frontend/branches/components/sort_dropdown_spec.js91
-rw-r--r--spec/frontend/captcha/apollo_captcha_link_spec.js165
-rw-r--r--spec/frontend/cascading_settings/components/lock_popovers_spec.js152
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js43
-rw-r--r--spec/frontend/ci_variable_list/components/ci_variable_table_spec.js14
-rw-r--r--spec/frontend/clusters/components/application_row_spec.js6
-rw-r--r--spec/frontend/clusters/services/application_state_machine_spec.js30
-rw-r--r--spec/frontend/content_editor/components/content_editor_spec.js26
-rw-r--r--spec/frontend/content_editor/markdown_processing_examples.js19
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec.js12
-rw-r--r--spec/frontend/content_editor/services/create_editor_spec.js39
-rw-r--r--spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap17
-rw-r--r--spec/frontend/create_merge_request_dropdown_spec.js18
-rw-r--r--spec/frontend/cycle_analytics/banner_spec.js46
-rw-r--r--spec/frontend/cycle_analytics/total_time_component_spec.js34
-rw-r--r--spec/frontend/delete_label_modal_spec.js83
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js71
-rw-r--r--spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js24
-rw-r--r--spec/frontend/deploy_freeze/store/actions_spec.js82
-rw-r--r--spec/frontend/deploy_freeze/store/mutations_spec.js15
-rw-r--r--spec/frontend/deploy_tokens/components/revoke_button_spec.js108
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap2
-rw-r--r--spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap1
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap8
-rw-r--r--spec/frontend/diffs/components/app_spec.js34
-rw-r--r--spec/frontend/diffs/components/commit_item_spec.js130
-rw-r--r--spec/frontend/diffs/components/compare_versions_spec.js135
-rw-r--r--spec/frontend/diffs/components/diff_row_spec.js17
-rw-r--r--spec/frontend/diffs/components/inline_diff_table_row_spec.js13
-rw-r--r--spec/frontend/diffs/components/parallel_diff_table_row_spec.js23
-rw-r--r--spec/frontend/diffs/create_diffs_store.js2
-rw-r--r--spec/frontend/diffs/find_interop_attributes.js20
-rw-r--r--spec/frontend/diffs/store/actions_spec.js47
-rw-r--r--spec/frontend/diffs/store/getters_spec.js44
-rw-r--r--spec/frontend/diffs/store/mutations_spec.js14
-rw-r--r--spec/frontend/diffs/utils/interoperability_spec.js67
-rw-r--r--spec/frontend/editor/editor_lite_extension_base_spec.js271
-rw-r--r--spec/frontend/emoji/awards_app/store/actions_spec.js155
-rw-r--r--spec/frontend/emoji/awards_app/store/mutations_spec.js65
-rw-r--r--spec/frontend/environments/enable_review_app_modal_spec.js23
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js1
-rw-r--r--spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js12
-rw-r--r--spec/frontend/error_tracking_settings/store/getters_spec.js4
-rw-r--r--spec/frontend/experimentation/components/experiment_spec.js72
-rw-r--r--spec/frontend/experimentation/utils_spec.js101
-rw-r--r--spec/frontend/feature_flags/components/form_spec.js4
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_popover_spec.js1
-rw-r--r--spec/frontend/fixtures/api_markdown.rb34
-rw-r--r--spec/frontend/fixtures/api_markdown.yml50
-rw-r--r--spec/frontend/fixtures/autocomplete.rb41
-rw-r--r--spec/frontend/fixtures/issues.rb2
-rw-r--r--spec/frontend/fixtures/merge_requests_diffs.rb20
-rw-r--r--spec/frontend/fixtures/static/mini_dropdown_graph.html13
-rw-r--r--spec/frontend/fixtures/static/whats_new_notification.html2
-rw-r--r--spec/frontend/flash_spec.js62
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js35
-rw-r--r--spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap2
-rw-r--r--spec/frontend/ide/components/cannot_push_code_alert_spec.js72
-rw-r--r--spec/frontend/ide/components/commit_sidebar/form_spec.js6
-rw-r--r--spec/frontend/ide/components/ide_spec.js28
-rw-r--r--spec/frontend/ide/stores/getters_spec.js126
-rw-r--r--spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap4
-rw-r--r--spec/frontend/integrations/edit/components/dynamic_field_spec.js11
-rw-r--r--spec/frontend/integrations/edit/components/jira_issues_fields_spec.js49
-rw-r--r--spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js106
-rw-r--r--spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js30
-rw-r--r--spec/frontend/integrations/edit/components/trigger_fields_spec.js4
-rw-r--r--spec/frontend/integrations/index/components/integrations_list_spec.js26
-rw-r--r--spec/frontend/integrations/index/components/integrations_table_spec.js53
-rw-r--r--spec/frontend/integrations/index/mock_data.js50
-rw-r--r--spec/frontend/invite_member/components/invite_member_modal_spec.js2
-rw-r--r--spec/frontend/invite_member/components/invite_member_trigger_spec.js2
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js48
-rw-r--r--spec/frontend/invite_members/components/invite_members_trigger_spec.js82
-rw-r--r--spec/frontend/issuable/components/csv_export_modal_spec.js6
-rw-r--r--spec/frontend/issuable/components/csv_import_export_buttons_spec.js9
-rw-r--r--spec/frontend/issuable_list/components/issuable_list_root_spec.js124
-rw-r--r--spec/frontend/issuable_list/components/issuable_tabs_spec.js15
-rw-r--r--spec/frontend/issuable_list/mock_data.js2
-rw-r--r--spec/frontend/issuable_show/mock_data.js1
-rw-r--r--spec/frontend/issuable_type_selector/components/__snapshots__/info_popover_spec.js.snap52
-rw-r--r--spec/frontend/issuable_type_selector/components/info_popover_spec.js20
-rw-r--r--spec/frontend/issue_show/components/edit_actions_spec.js8
-rw-r--r--spec/frontend/issues_list/components/issues_list_app_spec.js540
-rw-r--r--spec/frontend/jira_connect/api_spec.js17
-rw-r--r--spec/frontend/jira_connect/components/__snapshots__/group_item_name_spec.js.snap44
-rw-r--r--spec/frontend/jira_connect/components/app_spec.js30
-rw-r--r--spec/frontend/jira_connect/components/group_item_name_spec.js28
-rw-r--r--spec/frontend/jira_connect/components/groups_list_item_spec.js57
-rw-r--r--spec/frontend/jira_connect/components/groups_list_spec.js103
-rw-r--r--spec/frontend/jira_connect/components/subscriptions_list_spec.js122
-rw-r--r--spec/frontend/jira_connect/index_spec.js34
-rw-r--r--spec/frontend/jira_connect/mock_data.js6
-rw-r--r--spec/frontend/jira_connect/utils_spec.js114
-rw-r--r--spec/frontend/jobs/components/commit_block_spec.js105
-rw-r--r--spec/frontend/jobs/components/job_sidebar_details_container_spec.js15
-rw-r--r--spec/frontend/jobs/components/manual_variables_form_spec.js153
-rw-r--r--spec/frontend/jobs/components/sidebar_spec.js30
-rw-r--r--spec/frontend/jobs/components/stages_dropdown_spec.js155
-rw-r--r--spec/frontend/jobs/components/table/jobs_table_spec.js31
-rw-r--r--spec/frontend/jobs/components/table/jobs_table_tabs_spec.js42
-rw-r--r--spec/frontend/jobs/mock_data.js215
-rw-r--r--spec/frontend/lib/utils/color_utils_spec.js27
-rw-r--r--spec/frontend/lib/utils/common_utils_spec.js10
-rw-r--r--spec/frontend/lib/utils/datetime_utility_spec.js80
-rw-r--r--spec/frontend/lib/utils/forms_spec.js163
-rw-r--r--spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js1
-rw-r--r--spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js15
-rw-r--r--spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js11
-rw-r--r--spec/frontend/members/components/action_buttons/remove_member_button_spec.js21
-rw-r--r--spec/frontend/members/components/action_buttons/resend_invite_button_spec.js15
-rw-r--r--spec/frontend/members/components/action_buttons/user_action_buttons_spec.js46
-rw-r--r--spec/frontend/members/components/app_spec.js25
-rw-r--r--spec/frontend/members/components/avatars/user_avatar_spec.js16
-rw-r--r--spec/frontend/members/components/filter_sort/filter_sort_container_spec.js27
-rw-r--r--spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js50
-rw-r--r--spec/frontend/members/components/filter_sort/sort_dropdown_spec.js29
-rw-r--r--spec/frontend/members/components/modals/leave_modal_spec.js42
-rw-r--r--spec/frontend/members/components/modals/remove_group_link_modal_spec.js22
-rw-r--r--spec/frontend/members/components/table/expiration_datepicker_spec.js10
-rw-r--r--spec/frontend/members/components/table/members_table_cell_spec.js12
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js32
-rw-r--r--spec/frontend/members/components/table/role_dropdown_spec.js10
-rw-r--r--spec/frontend/members/index_spec.js47
-rw-r--r--spec/frontend/members/mock_data.js2
-rw-r--r--spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js131
-rw-r--r--spec/frontend/merge_conflicts/mock_data.js340
-rw-r--r--spec/frontend/merge_conflicts/store/actions_spec.js125
-rw-r--r--spec/frontend/merge_conflicts/store/getters_spec.js187
-rw-r--r--spec/frontend/merge_conflicts/store/mutations_spec.js99
-rw-r--r--spec/frontend/merge_conflicts/utils_spec.js106
-rw-r--r--spec/frontend/merge_request/components/status_box_spec.js2
-rw-r--r--spec/frontend/mini_pipeline_graph_dropdown_spec.js104
-rw-r--r--spec/frontend/mocks/ce/diffs/workers/tree_worker.js1
-rw-r--r--spec/frontend/mocks/ce/ide/lib/diff/diff_worker.js1
-rw-r--r--spec/frontend/mr_notes/stores/actions_spec.js92
-rw-r--r--spec/frontend/mr_notes/stores/mutations_spec.js27
-rw-r--r--spec/frontend/notebook/cells/markdown_spec.js106
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js67
-rw-r--r--spec/frontend/notes/components/discussion_navigator_spec.js13
-rw-r--r--spec/frontend/notes/components/note_actions_spec.js16
-rw-r--r--spec/frontend/notes/components/note_body_spec.js17
-rw-r--r--spec/frontend/notes/components/noteable_discussion_spec.js9
-rw-r--r--spec/frontend/notes/components/noteable_note_spec.js133
-rw-r--r--spec/frontend/notes/components/notes_app_spec.js30
-rw-r--r--spec/frontend/notes/mock_data.js16
-rw-r--r--spec/frontend/notes/stores/getters_spec.js69
-rw-r--r--spec/frontend/packages/details/store/getters_spec.js2
-rw-r--r--spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap10
-rw-r--r--spec/frontend/packages/list/components/packages_list_app_spec.js133
-rw-r--r--spec/frontend/packages/list/components/packages_search_spec.js29
-rw-r--r--spec/frontend/packages/list/components/packages_title_spec.js18
-rw-r--r--spec/frontend/packages/list/utils_spec.js11
-rw-r--r--spec/frontend/packages/mock_data.js17
-rw-r--r--spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap15
-rw-r--r--spec/frontend/packages/shared/components/package_icon_and_name_spec.js32
-rw-r--r--spec/frontend/packages/shared/components/package_list_row_spec.js41
-rw-r--r--spec/frontend/packages/shared/utils_spec.js1
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name_spec.js28
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_search_spec.js135
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_title_spec.js75
-rw-r--r--spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js5
-rw-r--r--spec/frontend/packages_and_registries/shared/utils_spec.js59
-rw-r--r--spec/frontend/pager_spec.js71
-rw-r--r--spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap4
-rw-r--r--spec/frontend/pages/admin/users/components/delete_user_modal_spec.js8
-rw-r--r--spec/frontend/pages/admin/users/new/index_spec.js41
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js35
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap374
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap70
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap67
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js30
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_b_spec.js6
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js26
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js49
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/mock_data.js5
-rw-r--r--spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js45
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_alert_spec.js (renamed from spec/frontend/pages/shared/wikis/wiki_alert_spec.js)0
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_form_spec.js222
-rw-r--r--spec/frontend/performance_bar/components/detailed_metric_spec.js304
-rw-r--r--spec/frontend/performance_bar/stores/performance_bar_store_spec.js40
-rw-r--r--spec/frontend/pipeline_editor/components/code_snippet_alert/code_snippet_alert_spec.js61
-rw-r--r--spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js21
-rw-r--r--spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js123
-rw-r--r--spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js49
-rw-r--r--spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js16
-rw-r--r--spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js55
-rw-r--r--spec/frontend/pipeline_editor/components/header/validation_segment_spec.js93
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js5
-rw-r--r--spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js17
-rw-r--r--spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js54
-rw-r--r--spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js57
-rw-r--r--spec/frontend/pipeline_editor/graphql/resolvers_spec.js18
-rw-r--r--spec/frontend/pipeline_editor/mock_data.js14
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_app_spec.js91
-rw-r--r--spec/frontend/pipeline_editor/pipeline_editor_home_spec.js7
-rw-r--r--spec/frontend/pipelines/blank_state_spec.js20
-rw-r--r--spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js9
-rw-r--r--spec/frontend/pipelines/graph/action_component_spec.js2
-rw-r--r--spec/frontend/pipelines/graph/graph_component_spec.js31
-rw-r--r--spec/frontend/pipelines/graph/graph_component_wrapper_spec.js172
-rw-r--r--spec/frontend/pipelines/graph/job_group_dropdown_spec.js8
-rw-r--r--spec/frontend/pipelines/graph/job_item_spec.js4
-rw-r--r--spec/frontend/pipelines/graph/job_name_component_spec.js2
-rw-r--r--spec/frontend/pipelines/graph/linked_pipelines_column_spec.js33
-rw-r--r--spec/frontend/pipelines/graph/mock_data.js18
-rw-r--r--spec/frontend/pipelines/graph/stage_column_component_spec.js30
-rw-r--r--spec/frontend/pipelines/graph_shared/links_inner_spec.js2
-rw-r--r--spec/frontend/pipelines/graph_shared/links_layer_spec.js13
-rw-r--r--spec/frontend/pipelines/nav_controls_spec.js60
-rw-r--r--spec/frontend/pipelines/notification/pipeline_notification_spec.js79
-rw-r--r--spec/frontend/pipelines/pipeline_graph/mock_data.js36
-rw-r--r--spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js60
-rw-r--r--spec/frontend/pipelines/pipelines_ci_templates_spec.js111
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js45
-rw-r--r--spec/frontend/pipelines/pipelines_table_row_spec.js239
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js56
-rw-r--r--spec/frontend/pipelines/time_ago_spec.js69
-rw-r--r--spec/frontend/pipelines/unwrapping_utils_spec.js4
-rw-r--r--spec/frontend/projects/commit/components/branches_dropdown_spec.js6
-rw-r--r--spec/frontend/projects/commit/components/commit_comments_button_spec.js42
-rw-r--r--spec/frontend/projects/commit/components/commit_options_dropdown_spec.js123
-rw-r--r--spec/frontend/projects/commit/components/form_modal_spec.js11
-rw-r--r--spec/frontend/projects/commit/components/form_trigger_spec.js44
-rw-r--r--spec/frontend/projects/commit/mock_data.js2
-rw-r--r--spec/frontend/projects/commit/store/mutations_spec.js6
-rw-r--r--spec/frontend/projects/commit_box/info/load_branches_spec.js41
-rw-r--r--spec/frontend/projects/compare/components/app_legacy_spec.js55
-rw-r--r--spec/frontend/projects/compare/components/repo_dropdown_spec.js22
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js25
-rw-r--r--spec/frontend/projects/compare/components/revision_dropdown_spec.js41
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/app_spec.js52
-rw-r--r--spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js31
-rw-r--r--spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap6
-rw-r--r--spec/frontend/projects/pipelines/charts/components/app_spec.js57
-rw-r--r--spec/frontend/registry/explorer/pages/list_spec.js91
-rw-r--r--spec/frontend/registry/settings/components/settings_form_spec.js82
-rw-r--r--spec/frontend/releases/components/app_edit_new_spec.js10
-rw-r--r--spec/frontend/releases/components/app_index_spec.js8
-rw-r--r--spec/frontend/releases/components/app_show_spec.js189
-rw-r--r--spec/frontend/releases/components/asset_links_form_spec.js2
-rw-r--r--spec/frontend/releases/components/release_block_milestone_info_spec.js2
-rw-r--r--spec/frontend/releases/components/releases_pagination_graphql_spec.js16
-rw-r--r--spec/frontend/releases/components/releases_pagination_rest_spec.js14
-rw-r--r--spec/frontend/releases/components/releases_sort_spec.js12
-rw-r--r--spec/frontend/releases/components/tag_field_exsting_spec.js6
-rw-r--r--spec/frontend/releases/components/tag_field_new_spec.js14
-rw-r--r--spec/frontend/releases/components/tag_field_spec.js6
-rw-r--r--spec/frontend/releases/stores/modules/detail/actions_spec.js10
-rw-r--r--spec/frontend/releases/stores/modules/detail/getters_spec.js4
-rw-r--r--spec/frontend/releases/stores/modules/detail/mutations_spec.js8
-rw-r--r--spec/frontend/releases/stores/modules/list/actions_spec.js6
-rw-r--r--spec/frontend/releases/stores/modules/list/helpers.js2
-rw-r--r--spec/frontend/releases/stores/modules/list/mutations_spec.js6
-rw-r--r--spec/frontend/reports/components/report_section_spec.js22
-rw-r--r--spec/frontend/reports/grouped_test_report/components/modal_spec.js9
-rw-r--r--spec/frontend/reports/grouped_test_report/components/test_issue_body_spec.js2
-rw-r--r--spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js41
-rw-r--r--spec/frontend/reports/grouped_test_report/store/actions_spec.js17
-rw-r--r--spec/frontend/reports/grouped_test_report/store/mutations_spec.js8
-rw-r--r--spec/frontend/reports/grouped_test_report/store/utils_spec.js14
-rw-r--r--spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap40
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js86
-rw-r--r--spec/frontend/repository/components/breadcrumbs_spec.js84
-rw-r--r--spec/frontend/repository/components/table/row_spec.js5
-rw-r--r--spec/frontend/repository/pages/blob_spec.js25
-rw-r--r--spec/frontend/repository/router_spec.js2
-rw-r--r--spec/frontend/runner/runner_detail/runner_detail_app_spec.js29
-rw-r--r--spec/frontend/security_configuration/configuration_table_spec.js2
-rw-r--r--spec/frontend/security_configuration/manage_sast_spec.js2
-rw-r--r--spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap4
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js58
-rw-r--r--spec/frontend/sidebar/assignees_realtime_spec.js15
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js558
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js33
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js59
-rw-r--r--spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js43
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js4
-rw-r--r--spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js4
-rw-r--r--spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js19
-rw-r--r--spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js106
-rw-r--r--spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js70
-rw-r--r--spec/frontend/sidebar/issuable_assignees_spec.js19
-rw-r--r--spec/frontend/sidebar/mock_data.js156
-rw-r--r--spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap2
-rw-r--r--spec/frontend/snippets/components/edit_spec.js71
-rw-r--r--spec/frontend/tags/components/sort_dropdown_spec.js81
-rw-r--r--spec/frontend/tracking_spec.js76
-rw-r--r--spec/frontend/users_select/index_spec.js223
-rw-r--r--spec/frontend/vue_alerts_spec.js8
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js43
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js156
-rw-r--r--spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js91
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js29
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js19
-rw-r--r--spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js166
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_details_spec.js10
-rw-r--r--spec/frontend/vue_shared/alert_details/alert_status_spec.js19
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js16
-rw-r--r--spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js31
-rw-r--r--spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap2
-rw-r--r--spec/frontend/vue_shared/components/alert_details_table_spec.js83
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap141
-rw-r--r--spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js41
-rw-r--r--spec/frontend/vue_shared/components/delete_label_modal_spec.js64
-rw-r--r--spec/frontend/vue_shared/components/deprecated_modal_spec.js73
-rw-r--r--spec/frontend/vue_shared/components/ensure_data_spec.js145
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js47
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js217
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js180
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js114
-rw-r--r--spec/frontend/vue_shared/components/help_popover_spec.js14
-rw-r--r--spec/frontend/vue_shared/components/lib/utils/props_utils_spec.js91
-rw-r--r--spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js34
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_spec.js64
-rw-r--r--spec/frontend/vue_shared/components/recaptcha_eventhub_spec.js21
-rw-r--r--spec/frontend/vue_shared/components/recaptcha_modal_spec.js35
-rw-r--r--spec/frontend/vue_shared/components/registry/registry_search_spec.js57
-rw-r--r--spec/frontend/vue_shared/components/remove_member_modal_spec.js61
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/mock_data.js16
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js184
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js110
-rw-r--r--spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js74
-rw-r--r--spec/frontend/vue_shared/components/url_sync_spec.js97
-rw-r--r--spec/frontend/vue_shared/components/user_popover/user_popover_spec.js6
-rw-r--r--spec/frontend/vue_shared/oncall_schedules_list_spec.js87
-rw-r--r--spec/frontend/whats_new/components/app_spec.js59
-rw-r--r--spec/frontend/whats_new/store/actions_spec.js11
-rw-r--r--spec/frontend/whats_new/utils/notification_spec.js23
-rw-r--r--spec/frontend/wikis_spec.js153
-rw-r--r--spec/frontend_integration/diffs/diffs_interopability_api.js25
-rw-r--r--spec/frontend_integration/diffs/diffs_interopability_spec.js161
-rw-r--r--spec/frontend_integration/test_helpers/fixtures.js6
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/graphql.js11
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/index.js2
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/diffs.js22
-rw-r--r--spec/frontend_integration/test_helpers/mock_server/routes/index.js1
-rw-r--r--spec/graphql/features/authorization_spec.rb134
-rw-r--r--spec/graphql/gitlab_schema_spec.rb39
-rw-r--r--spec/graphql/mutations/boards/issues/issue_move_list_spec.rb66
-rw-r--r--spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb46
-rw-r--r--spec/graphql/mutations/design_management/upload_spec.rb14
-rw-r--r--spec/graphql/mutations/issues/set_assignees_spec.rb7
-rw-r--r--spec/graphql/mutations/merge_requests/set_assignees_spec.rb7
-rw-r--r--spec/graphql/mutations/release_asset_links/delete_spec.rb58
-rw-r--r--spec/graphql/mutations/release_asset_links/update_spec.rb2
-rw-r--r--spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb20
-rw-r--r--spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb39
-rw-r--r--spec/graphql/resolvers/blobs_resolver_spec.rb74
-rw-r--r--spec/graphql/resolvers/board_list_issues_resolver_spec.rb18
-rw-r--r--spec/graphql/resolvers/ci/jobs_resolver_spec.rb17
-rw-r--r--spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb77
-rw-r--r--spec/graphql/resolvers/ci/test_report_summary_resolver_spec.rb46
-rw-r--r--spec/graphql/resolvers/ci/test_suite_resolver_spec.rb54
-rw-r--r--spec/graphql/resolvers/concerns/looks_ahead_spec.rb9
-rw-r--r--spec/graphql/resolvers/group_milestones_resolver_spec.rb51
-rw-r--r--spec/graphql/resolvers/issue_status_counts_resolver_spec.rb8
-rw-r--r--spec/graphql/resolvers/issues_resolver_spec.rb45
-rw-r--r--spec/graphql/resolvers/merge_requests_resolver_spec.rb64
-rw-r--r--spec/graphql/resolvers/namespace_projects_resolver_spec.rb2
-rw-r--r--spec/graphql/resolvers/project_jobs_resolver_spec.rb51
-rw-r--r--spec/graphql/resolvers/project_pipeline_resolver_spec.rb15
-rw-r--r--spec/graphql/resolvers/repository_branch_names_resolver_spec.rb36
-rw-r--r--spec/graphql/resolvers/timelog_resolver_spec.rb168
-rw-r--r--spec/graphql/resolvers/users/snippets_resolver_spec.rb14
-rw-r--r--spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb3
-rw-r--r--spec/graphql/types/alert_management/prometheus_integration_type_spec.rb24
-rw-r--r--spec/graphql/types/base_enum_spec.rb48
-rw-r--r--spec/graphql/types/base_object_spec.rb432
-rw-r--r--spec/graphql/types/board_type_spec.rb14
-rw-r--r--spec/graphql/types/boards/board_issue_input_type_spec.rb4
-rw-r--r--spec/graphql/types/ci/job_status_enum_spec.rb13
-rw-r--r--spec/graphql/types/ci/job_type_spec.rb28
-rw-r--r--spec/graphql/types/ci/pipeline_type_spec.rb5
-rw-r--r--spec/graphql/types/ci/recent_failures_type_spec.rb15
-rw-r--r--spec/graphql/types/ci/stage_type_spec.rb1
-rw-r--r--spec/graphql/types/ci/test_case_status_enum_spec.rb13
-rw-r--r--spec/graphql/types/ci/test_case_type_spec.rb15
-rw-r--r--spec/graphql/types/ci/test_report_summary_type_spec.rb15
-rw-r--r--spec/graphql/types/ci/test_report_total_type_spec.rb15
-rw-r--r--spec/graphql/types/ci/test_suite_summary_type_spec.rb15
-rw-r--r--spec/graphql/types/ci/test_suite_type_spec.rb15
-rw-r--r--spec/graphql/types/global_id_type_spec.rb1
-rw-r--r--spec/graphql/types/issue_type_spec.rb2
-rw-r--r--spec/graphql/types/merge_request_review_state_enum_spec.rb18
-rw-r--r--spec/graphql/types/merge_requests/reviewer_type_spec.rb50
-rw-r--r--spec/graphql/types/milestone_type_spec.rb2
-rw-r--r--spec/graphql/types/packages/conan/file_metadatum_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/conan/metadatum_file_type_enum_spec.rb13
-rw-r--r--spec/graphql/types/packages/conan/metadatum_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/package_details_type_spec.rb (renamed from spec/graphql/types/packages/package_without_versions_type_spec.rb)4
-rw-r--r--spec/graphql/types/packages/package_file_type_spec.rb13
-rw-r--r--spec/graphql/types/packages/package_type_spec.rb2
-rw-r--r--spec/graphql/types/project_type_spec.rb32
-rw-r--r--spec/graphql/types/query_type_spec.rb2
-rw-r--r--spec/graphql/types/repository/blob_type_spec.rb9
-rw-r--r--spec/graphql/types/repository_type_spec.rb4
-rw-r--r--spec/graphql/types/snippet_type_spec.rb2
-rw-r--r--spec/graphql/types/timelog_type_spec.rb35
-rw-r--r--spec/graphql/types/user_merge_request_interaction_type_spec.rb116
-rw-r--r--spec/helpers/application_helper_spec.rb18
-rw-r--r--spec/helpers/avatars_helper_spec.rb24
-rw-r--r--spec/helpers/blob_helper_spec.rb10
-rw-r--r--spec/helpers/boards_helper_spec.rb24
-rw-r--r--spec/helpers/broadcast_messages_helper_spec.rb1
-rw-r--r--spec/helpers/button_helper_spec.rb7
-rw-r--r--spec/helpers/ci/pipeline_editor_helper_spec.rb32
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb30
-rw-r--r--spec/helpers/commits_helper_spec.rb127
-rw-r--r--spec/helpers/diff_helper_spec.rb38
-rw-r--r--spec/helpers/graph_helper_spec.rb4
-rw-r--r--spec/helpers/groups_helper_spec.rb161
-rw-r--r--spec/helpers/ide_helper_spec.rb8
-rw-r--r--spec/helpers/invite_members_helper_spec.rb3
-rw-r--r--spec/helpers/issuables_helper_spec.rb54
-rw-r--r--spec/helpers/issues_helper_spec.rb51
-rw-r--r--spec/helpers/jira_connect_helper_spec.rb1
-rw-r--r--spec/helpers/labels_helper_spec.rb1
-rw-r--r--spec/helpers/learn_gitlab_helper_spec.rb1
-rw-r--r--spec/helpers/markup_helper_spec.rb25
-rw-r--r--spec/helpers/namespaces_helper_spec.rb71
-rw-r--r--spec/helpers/nav_helper_spec.rb6
-rw-r--r--spec/helpers/notes_helper_spec.rb2
-rw-r--r--spec/helpers/page_layout_helper_spec.rb52
-rw-r--r--spec/helpers/preferences_helper_spec.rb2
-rw-r--r--spec/helpers/profiles_helper_spec.rb40
-rw-r--r--spec/helpers/projects/alert_management_helper_spec.rb1
-rw-r--r--spec/helpers/projects/issues/service_desk_helper_spec.rb1
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb2
-rw-r--r--spec/helpers/projects/terraform_helper_spec.rb1
-rw-r--r--spec/helpers/projects_helper_spec.rb1
-rw-r--r--spec/helpers/search_helper_spec.rb1
-rw-r--r--spec/helpers/services_helper_spec.rb14
-rw-r--r--spec/helpers/sidebars_helper_spec.rb45
-rw-r--r--spec/helpers/snippets_helper_spec.rb28
-rw-r--r--spec/helpers/tab_helper_spec.rb115
-rw-r--r--spec/helpers/timeboxes_helper_spec.rb36
-rw-r--r--spec/helpers/todos_helper_spec.rb20
-rw-r--r--spec/helpers/tracking_helper_spec.rb2
-rw-r--r--spec/helpers/user_callouts_helper_spec.rb42
-rw-r--r--spec/helpers/whats_new_helper_spec.rb48
-rw-r--r--spec/helpers/wiki_helper_spec.rb2
-rw-r--r--spec/initializers/active_record_locking_spec.rb4
-rw-r--r--spec/initializers/fog_google_https_private_urls_spec.rb2
-rw-r--r--spec/initializers/json_validator_patch_spec.rb39
-rw-r--r--spec/initializers/pages_storage_check_spec.rb94
-rw-r--r--spec/knapsack_env.rb11
-rw-r--r--spec/lib/api/entities/clusters/agent_spec.rb16
-rw-r--r--spec/lib/api/entities/design_management/design_spec.rb1
-rw-r--r--spec/lib/api/entities/merge_request_changes_spec.rb1
-rw-r--r--spec/lib/api/entities/project_import_failed_relation_spec.rb2
-rw-r--r--spec/lib/api/entities/release_spec.rb1
-rw-r--r--spec/lib/api/helpers/authentication_spec.rb15
-rw-r--r--spec/lib/api/helpers/caching_spec.rb139
-rw-r--r--spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb10
-rw-r--r--spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb3
-rw-r--r--spec/lib/api/helpers/variables_helpers_spec.rb43
-rw-r--r--spec/lib/api/helpers_spec.rb109
-rw-r--r--spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb2
-rw-r--r--spec/lib/banzai/filter/commit_trailers_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/gollum_tags_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb3
-rw-r--r--spec/lib/banzai/filter/math_filter_spec.rb10
-rw-r--r--spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/abstract_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/alert_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/alert_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/commit_range_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/commit_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/commit_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/design_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/design_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/external_issue_reference_filter_spec.rb)3
-rw-r--r--spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/feature_flag_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/issue_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/issue_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/label_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/label_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/merge_request_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/milestone_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/project_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/project_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/reference_filter_spec.rb (renamed from spec/lib/banzai/filter/reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/snippet_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/references/user_reference_filter_spec.rb (renamed from spec/lib/banzai/filter/user_reference_filter_spec.rb)2
-rw-r--r--spec/lib/banzai/filter/suggestion_filter_spec.rb4
-rw-r--r--spec/lib/banzai/filter/syntax_highlight_filter_spec.rb12
-rw-r--r--spec/lib/banzai/filter/upload_link_filter_spec.rb2
-rw-r--r--spec/lib/banzai/filter/wiki_link_filter_spec.rb9
-rw-r--r--spec/lib/banzai/pipeline/gfm_pipeline_spec.rb3
-rw-r--r--spec/lib/banzai/pipeline/wiki_pipeline_spec.rb25
-rw-r--r--spec/lib/banzai/reference_parser/external_issue_parser_spec.rb2
-rw-r--r--spec/lib/banzai/reference_redactor_spec.rb2
-rw-r--r--spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb31
-rw-r--r--spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb28
-rw-r--r--spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb3
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb6
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb4
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb4
-rw-r--r--spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb4
-rw-r--r--spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb11
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb116
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb37
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb14
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb87
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb14
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb90
-rw-r--r--spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb30
-rw-r--r--spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb22
-rw-r--r--spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb23
-rw-r--r--spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb3
-rw-r--r--spec/lib/bulk_imports/importers/group_importer_spec.rb57
-rw-r--r--spec/lib/bulk_imports/pipeline/context_spec.rb45
-rw-r--r--spec/lib/bulk_imports/pipeline/extracted_data_spec.rb2
-rw-r--r--spec/lib/bulk_imports/pipeline/runner_spec.rb65
-rw-r--r--spec/lib/bulk_imports/pipeline_spec.rb10
-rw-r--r--spec/lib/constraints/admin_constrainer_spec.rb6
-rw-r--r--spec/lib/feature_spec.rb92
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb30
-rw-r--r--spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb70
-rw-r--r--spec/lib/gitlab/alert_management/alert_status_counts_spec.rb1
-rw-r--r--spec/lib/gitlab/alert_management/payload/base_spec.rb1
-rw-r--r--spec/lib/gitlab/alert_management/payload/generic_spec.rb1
-rw-r--r--spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb2
-rw-r--r--spec/lib/gitlab/alert_management/payload/prometheus_spec.rb1
-rw-r--r--spec/lib/gitlab/alert_management/payload_spec.rb1
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb1
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb44
-rw-r--r--spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb2
-rw-r--r--spec/lib/gitlab/analytics/unique_visits_spec.rb22
-rw-r--r--spec/lib/gitlab/application_context_spec.rb16
-rw-r--r--spec/lib/gitlab/asciidoc_spec.rb6
-rw-r--r--spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/otp/strategies/devise_spec.rb1
-rw-r--r--spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb1
-rw-r--r--spec/lib/gitlab/auth/otp/strategies/forti_token_cloud_spec.rb1
-rw-r--r--spec/lib/gitlab/auth_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb8
-rw-r--r--spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb43
-rw-r--r--spec/lib/gitlab/bullet/exclusions_spec.rb155
-rw-r--r--spec/lib/gitlab/bullet_spec.rb51
-rw-r--r--spec/lib/gitlab/changelog/config_spec.rb4
-rw-r--r--spec/lib/gitlab/checks/project_created_spec.rb1
-rw-r--r--spec/lib/gitlab/checks/project_moved_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/ansi2json/style_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/bridge_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/jobs_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb26
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/config/external/processor_spec.rb35
-rw-r--r--spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/lint_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/command_spec.rb21
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb20
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb61
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb13
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/pipeline/process_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb159
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb201
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build_spec.rb139
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb109
-rw-r--r--spec/lib/gitlab/ci/reports/codequality_reports_spec.rb34
-rw-r--r--spec/lib/gitlab/ci/reports/test_failure_history_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/runner_instructions_spec.rb116
-rw-r--r--spec/lib/gitlab/ci/status/build/common_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/status/composite_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/trace_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/variables/helpers_spec.rb46
-rw-r--r--spec/lib/gitlab/ci/yaml_processor/result_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb139
-rw-r--r--spec/lib/gitlab/composer/version_index_spec.rb5
-rw-r--r--spec/lib/gitlab/conflict/file_spec.rb6
-rw-r--r--spec/lib/gitlab/crypto_helper_spec.rb58
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb3
-rw-r--r--spec/lib/gitlab/data_builder/pipeline_spec.rb1
-rw-r--r--spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb31
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb198
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_spec.rb51
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb94
-rw-r--r--spec/lib/gitlab/database/background_migration/scheduler_spec.rb182
-rw-r--r--spec/lib/gitlab/database/batch_count_spec.rb76
-rw-r--r--spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb71
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb241
-rw-r--r--spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb28
-rw-r--r--spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb66
-rw-r--r--spec/lib/gitlab/database/pg_class_spec.rb37
-rw-r--r--spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb3
-rw-r--r--spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb191
-rw-r--r--spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb2
-rw-r--r--spec/lib/gitlab/database_spec.rb4
-rw-r--r--spec/lib/gitlab/diff/char_diff_spec.rb6
-rw-r--r--spec/lib/gitlab/diff/highlight_cache_spec.rb26
-rw-r--r--spec/lib/gitlab/diff/highlight_spec.rb20
-rw-r--r--spec/lib/gitlab/diff/inline_diff_spec.rb74
-rw-r--r--spec/lib/gitlab/diff/line_spec.rb35
-rw-r--r--spec/lib/gitlab/diff/lines_unfolder_spec.rb18
-rw-r--r--spec/lib/gitlab/diff/suggestions_parser_spec.rb3
-rw-r--r--spec/lib/gitlab/downtime_check/message_spec.rb41
-rw-r--r--spec/lib/gitlab/downtime_check_spec.rb116
-rw-r--r--spec/lib/gitlab/email/handler/create_note_handler_spec.rb8
-rw-r--r--spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb93
-rw-r--r--spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb90
-rw-r--r--spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb96
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb108
-rw-r--r--spec/lib/gitlab/experimentation_spec.rb1
-rw-r--r--spec/lib/gitlab/git/diff_collection_spec.rb86
-rw-r--r--spec/lib/gitlab/git/repository_spec.rb40
-rw-r--r--spec/lib/gitlab/git/tag_spec.rb13
-rw-r--r--spec/lib/gitlab/gitaly_client/blob_service_spec.rb43
-rw-r--r--spec/lib/gitlab/gitaly_client/call_spec.rb13
-rw-r--r--spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb4
-rw-r--r--spec/lib/gitlab/gitaly_client/operation_service_spec.rb26
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb17
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb1
-rw-r--r--spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb1
-rw-r--r--spec/lib/gitlab/github_import/milestone_finder_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb253
-rw-r--r--spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb80
-rw-r--r--spec/lib/gitlab/graphql/authorize/object_authorization_spec.rb64
-rw-r--r--spec/lib/gitlab/graphql/batch_key_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/deprecation_spec.rb213
-rw-r--r--spec/lib/gitlab/graphql/docs/renderer_spec.rb80
-rw-r--r--spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/markdown_field_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/negatable_arguments_spec.rb45
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb1
-rw-r--r--spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb47
-rw-r--r--spec/lib/gitlab/highlight_spec.rb15
-rw-r--r--spec/lib/gitlab/hook_data/issue_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/hook_data/merge_request_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/hook_data/release_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/hook_data/user_builder_spec.rb90
-rw-r--r--spec/lib/gitlab/http_connection_adapter_spec.rb125
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml5
-rw-r--r--spec/lib/gitlab/import_export/design_repo_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/project/export_task_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/project/tree_restorer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/repo_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb1
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb47
-rw-r--r--spec/lib/gitlab/jira_import/base_importer_spec.rb1
-rw-r--r--spec/lib/gitlab/jira_import/handle_labels_service_spec.rb1
-rw-r--r--spec/lib/gitlab/jira_import_spec.rb1
-rw-r--r--spec/lib/gitlab/json_spec.rb60
-rw-r--r--spec/lib/gitlab/legacy_github_import/importer_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb2
-rw-r--r--spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb26
-rw-r--r--spec/lib/gitlab/markdown_cache/redis/extension_spec.rb3
-rw-r--r--spec/lib/gitlab/markdown_cache/redis/store_spec.rb4
-rw-r--r--spec/lib/gitlab/marker_range_spec.rb33
-rw-r--r--spec/lib/gitlab/metrics/background_transaction_spec.rb53
-rw-r--r--spec/lib/gitlab/metrics/subscribers/active_record_spec.rb171
-rw-r--r--spec/lib/gitlab/metrics/subscribers/external_http_spec.rb57
-rw-r--r--spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb94
-rw-r--r--spec/lib/gitlab/object_hierarchy_spec.rb97
-rw-r--r--spec/lib/gitlab/pages/settings_spec.rb42
-rw-r--r--spec/lib/gitlab/pages/stores/local_store_spec.rb25
-rw-r--r--spec/lib/gitlab/pages_transfer_spec.rb4
-rw-r--r--spec/lib/gitlab/pagination/keyset/order_spec.rb55
-rw-r--r--spec/lib/gitlab/pagination/offset_header_builder_with_controller_spec.rb54
-rw-r--r--spec/lib/gitlab/phabricator_import/cache/map_spec.rb1
-rw-r--r--spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb1
-rw-r--r--spec/lib/gitlab/profiler_spec.rb9
-rw-r--r--spec/lib/gitlab/prometheus/adapter_spec.rb16
-rw-r--r--spec/lib/gitlab/query_limiting/transaction_spec.rb10
-rw-r--r--spec/lib/gitlab/query_limiting_spec.rb82
-rw-r--r--spec/lib/gitlab/quick_actions/command_definition_spec.rb8
-rw-r--r--spec/lib/gitlab/regex_spec.rb9
-rw-r--r--spec/lib/gitlab/repository_cache_adapter_spec.rb31
-rw-r--r--spec/lib/gitlab/repository_set_cache_spec.rb23
-rw-r--r--spec/lib/gitlab/sanitizers/exif_spec.rb2
-rw-r--r--spec/lib/gitlab/search_context/builder_spec.rb29
-rw-r--r--spec/lib/gitlab/sidekiq_cluster/cli_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb84
-rw-r--r--spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb129
-rw-r--r--spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb97
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb4
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb153
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb101
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb318
-rw-r--r--spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb2
-rw-r--r--spec/lib/gitlab/sidekiq_middleware_spec.rb6
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb1
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb1
-rw-r--r--spec/lib/gitlab/slash_commands/presenters/issue_new_spec.rb13
-rw-r--r--spec/lib/gitlab/slash_commands/run_spec.rb20
-rw-r--r--spec/lib/gitlab/snippet_search_results_spec.rb1
-rw-r--r--spec/lib/gitlab/sourcegraph_spec.rb1
-rw-r--r--spec/lib/gitlab/sql/cte_spec.rb20
-rw-r--r--spec/lib/gitlab/sql/recursive_cte_spec.rb13
-rw-r--r--spec/lib/gitlab/subscription_portal_spec.rb56
-rw-r--r--spec/lib/gitlab/template/finders/repo_template_finders_spec.rb1
-rw-r--r--spec/lib/gitlab/tracking/destinations/snowplow_spec.rb23
-rw-r--r--spec/lib/gitlab/tracking/standard_context_spec.rb12
-rw-r--r--spec/lib/gitlab/tracking_spec.rb32
-rw-r--r--spec/lib/gitlab/tree_summary_spec.rb1
-rw-r--r--spec/lib/gitlab/untrusted_regexp_spec.rb18
-rw-r--r--spec/lib/gitlab/url_builder_spec.rb1
-rw-r--r--spec/lib/gitlab/usage/metric_definition_spec.rb85
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb1
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb51
-rw-r--r--spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb40
-rw-r--r--spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb36
-rw-r--r--spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb18
-rw-r--r--spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb20
-rw-r--r--spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb55
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb32
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb42
-rw-r--r--spec/lib/gitlab/utils/lazy_attributes_spec.rb6
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb19
-rw-r--r--spec/lib/gitlab/utils_spec.rb8
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/global_spec.rb1
-rw-r--r--spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb5
-rw-r--r--spec/lib/gitlab/word_diff/chunk_collection_spec.rb23
-rw-r--r--spec/lib/gitlab/word_diff/parser_spec.rb40
-rw-r--r--spec/lib/gitlab/workhorse_spec.rb1
-rw-r--r--spec/lib/gitlab_spec.rb118
-rw-r--r--spec/lib/kramdown/kramdown_spec.rb (renamed from spec/initializers/kramdown_patch_spec.rb)2
-rw-r--r--spec/lib/marginalia_spec.rb52
-rw-r--r--spec/lib/mattermost/command_spec.rb2
-rw-r--r--spec/lib/mattermost/session_spec.rb2
-rw-r--r--spec/lib/mattermost/team_spec.rb2
-rw-r--r--spec/lib/peek/views/active_record_spec.rb27
-rw-r--r--spec/lib/peek/views/external_http_spec.rb33
-rw-r--r--spec/lib/rouge/formatters/html_gitlab_spec.rb40
-rw-r--r--spec/mailers/devise_mailer_spec.rb32
-rw-r--r--spec/mailers/emails/in_product_marketing_spec.rb34
-rw-r--r--spec/mailers/emails/merge_requests_spec.rb24
-rw-r--r--spec/mailers/emails/profile_spec.rb100
-rw-r--r--spec/mailers/emails/projects_spec.rb1
-rw-r--r--spec/mailers/emails/releases_spec.rb1
-rw-r--r--spec/mailers/emails/service_desk_spec.rb1
-rw-r--r--spec/mailers/notify_spec.rb96
-rw-r--r--spec/migrations/20210226141517_dedup_issue_metrics_spec.rb66
-rw-r--r--spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb43
-rw-r--r--spec/migrations/20210413132500_reschedule_artifact_expiry_backfill_again_spec.rb38
-rw-r--r--spec/migrations/add_new_trail_plans_spec.rb95
-rw-r--r--spec/migrations/backfill_operations_feature_flags_active_spec.rb4
-rw-r--r--spec/migrations/backfill_operations_feature_flags_iid_spec.rb4
-rw-r--r--spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb2
-rw-r--r--spec/migrations/confirm_support_bot_user_spec.rb86
-rw-r--r--spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb4
-rw-r--r--spec/migrations/delete_security_findings_without_uuid_spec.rb35
-rw-r--r--spec/migrations/migrate_bot_type_to_user_type_spec.rb2
-rw-r--r--spec/migrations/migrate_elastic_index_settings_spec.rb44
-rw-r--r--spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb4
-rw-r--r--spec/migrations/move_container_registry_enabled_to_project_features2_spec.rb (renamed from spec/migrations/move_container_registry_enabled_to_project_features_spec.rb)8
-rw-r--r--spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb61
-rw-r--r--spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb27
-rw-r--r--spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb46
-rw-r--r--spec/models/application_record_spec.rb36
-rw-r--r--spec/models/audit_event_archived_spec.rb52
-rw-r--r--spec/models/blob_viewer/gitlab_ci_yml_spec.rb1
-rw-r--r--spec/models/blob_viewer/metrics_dashboard_yml_spec.rb1
-rw-r--r--spec/models/bulk_imports/entity_spec.rb64
-rw-r--r--spec/models/bulk_imports/stage_spec.rb50
-rw-r--r--spec/models/bulk_imports/tracker_spec.rb56
-rw-r--r--spec/models/ci/artifact_blob_spec.rb1
-rw-r--r--spec/models/ci/bridge_spec.rb2
-rw-r--r--spec/models/ci/build_spec.rb162
-rw-r--r--spec/models/ci/build_trace_chunk_spec.rb1
-rw-r--r--spec/models/ci/daily_build_group_report_result_spec.rb1
-rw-r--r--spec/models/ci/job_artifact_spec.rb16
-rw-r--r--spec/models/ci/pipeline_schedule_spec.rb12
-rw-r--r--spec/models/ci/pipeline_spec.rb102
-rw-r--r--spec/models/ci/runner_spec.rb1
-rw-r--r--spec/models/ci/stage_spec.rb13
-rw-r--r--spec/models/ci/test_case_failure_spec.rb73
-rw-r--r--spec/models/ci/test_case_spec.rb31
-rw-r--r--spec/models/ci/unit_test_failure_spec.rb73
-rw-r--r--spec/models/ci/unit_test_spec.rb87
-rw-r--r--spec/models/clusters/agent_token_spec.rb49
-rw-r--r--spec/models/clusters/applications/prometheus_spec.rb94
-rw-r--r--spec/models/clusters/cluster_spec.rb76
-rw-r--r--spec/models/clusters/integrations/prometheus_spec.rb57
-rw-r--r--spec/models/commit_spec.rb2
-rw-r--r--spec/models/commit_status_spec.rb118
-rw-r--r--spec/models/concerns/batch_destroy_dependent_associations_spec.rb1
-rw-r--r--spec/models/concerns/cache_markdown_field_spec.rb10
-rw-r--r--spec/models/concerns/cacheable_attributes_spec.rb2
-rw-r--r--spec/models/concerns/cascading_namespace_setting_attribute_spec.rb320
-rw-r--r--spec/models/concerns/ci/artifactable_spec.rb28
-rw-r--r--spec/models/concerns/featurable_spec.rb3
-rw-r--r--spec/models/concerns/has_timelogs_report_spec.rb51
-rw-r--r--spec/models/concerns/issuable_spec.rb21
-rw-r--r--spec/models/concerns/milestoneable_spec.rb14
-rw-r--r--spec/models/concerns/milestoneish_spec.rb71
-rw-r--r--spec/models/concerns/participable_spec.rb71
-rw-r--r--spec/models/concerns/safe_url_spec.rb12
-rw-r--r--spec/models/concerns/sidebars/container_with_html_options_spec.rb21
-rw-r--r--spec/models/concerns/sidebars/positionable_list_spec.rb59
-rw-r--r--spec/models/concerns/sortable_spec.rb25
-rw-r--r--spec/models/concerns/subscribable_spec.rb51
-rw-r--r--spec/models/concerns/token_authenticatable_spec.rb4
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb77
-rw-r--r--spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb27
-rw-r--r--spec/models/deploy_keys_project_spec.rb6
-rw-r--r--spec/models/deploy_token_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb115
-rw-r--r--spec/models/environment_spec.rb33
-rw-r--r--spec/models/event_spec.rb8
-rw-r--r--spec/models/experiment_spec.rb29
-rw-r--r--spec/models/group_spec.rb367
-rw-r--r--spec/models/hooks/system_hook_spec.rb4
-rw-r--r--spec/models/hooks/web_hook_spec.rb8
-rw-r--r--spec/models/integration_spec.rb11
-rw-r--r--spec/models/internal_id_spec.rb61
-rw-r--r--spec/models/issue/metrics_spec.rb14
-rw-r--r--spec/models/issue_spec.rb38
-rw-r--r--spec/models/key_spec.rb22
-rw-r--r--spec/models/label_spec.rb22
-rw-r--r--spec/models/list_spec.rb13
-rw-r--r--spec/models/member_spec.rb28
-rw-r--r--spec/models/members/group_member_spec.rb6
-rw-r--r--spec/models/members/last_group_owner_assigner_spec.rb98
-rw-r--r--spec/models/members/project_member_spec.rb4
-rw-r--r--spec/models/merge_request_spec.rb136
-rw-r--r--spec/models/namespace/admin_note_spec.rb16
-rw-r--r--spec/models/namespace/traversal_hierarchy_spec.rb56
-rw-r--r--spec/models/namespace_setting_spec.rb31
-rw-r--r--spec/models/namespace_spec.rb169
-rw-r--r--spec/models/note_spec.rb105
-rw-r--r--spec/models/notification_setting_spec.rb14
-rw-r--r--spec/models/packages/debian/file_entry_spec.rb98
-rw-r--r--spec/models/packages/dependency_spec.rb2
-rw-r--r--spec/models/packages/go/module_version_spec.rb14
-rw-r--r--spec/models/packages/maven/metadatum_spec.rb14
-rw-r--r--spec/models/packages/package_spec.rb41
-rw-r--r--spec/models/pages/lookup_path_spec.rb10
-rw-r--r--spec/models/pages_deployment_spec.rb26
-rw-r--r--spec/models/preloaders/labels_preloader_spec.rb52
-rw-r--r--spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb37
-rw-r--r--spec/models/project_feature_usage_spec.rb90
-rw-r--r--spec/models/project_services/chat_message/alert_message_spec.rb1
-rw-r--r--spec/models/project_services/chat_message/merge_message_spec.rb26
-rw-r--r--spec/models/project_services/emails_on_push_service_spec.rb46
-rw-r--r--spec/models/project_services/jira_service_spec.rb89
-rw-r--r--spec/models/project_services/jira_tracker_data_spec.rb13
-rw-r--r--spec/models/project_services/prometheus_service_spec.rb2
-rw-r--r--spec/models/project_services/slack_service_spec.rb8
-rw-r--r--spec/models/project_spec.rb348
-rw-r--r--spec/models/protected_tag_spec.rb2
-rw-r--r--spec/models/raw_usage_data_spec.rb10
-rw-r--r--spec/models/release_highlight_spec.rb45
-rw-r--r--spec/models/release_spec.rb2
-rw-r--r--spec/models/remote_mirror_spec.rb24
-rw-r--r--spec/models/repository_spec.rb83
-rw-r--r--spec/models/sent_notification_spec.rb12
-rw-r--r--spec/models/service_spec.rb10
-rw-r--r--spec/models/sidebars/menu_spec.rb67
-rw-r--r--spec/models/sidebars/panel_spec.rb34
-rw-r--r--spec/models/sidebars/projects/context_spec.rb13
-rw-r--r--spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb31
-rw-r--r--spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb38
-rw-r--r--spec/models/sidebars/projects/menus/project_overview/menu_spec.rb18
-rw-r--r--spec/models/sidebars/projects/menus/repository/menu_spec.rb38
-rw-r--r--spec/models/sidebars/projects/panel_spec.rb14
-rw-r--r--spec/models/timelog_spec.rb12
-rw-r--r--spec/models/todo_spec.rb24
-rw-r--r--spec/models/user_callout_spec.rb38
-rw-r--r--spec/models/user_spec.rb78
-rw-r--r--spec/models/users/in_product_marketing_email_spec.rb131
-rw-r--r--spec/models/users/merge_request_interaction_spec.rb97
-rw-r--r--spec/policies/application_setting/term_policy_spec.rb1
-rw-r--r--spec/policies/ci/build_policy_spec.rb1
-rw-r--r--spec/policies/design_management/design_policy_spec.rb3
-rw-r--r--spec/policies/group_deploy_keys_group_policy_spec.rb1
-rw-r--r--spec/policies/group_policy_spec.rb51
-rw-r--r--spec/policies/project_policy_spec.rb52
-rw-r--r--spec/policies/project_snippet_policy_spec.rb1
-rw-r--r--spec/policies/service_policy_spec.rb1
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb87
-rw-r--r--spec/presenters/ci/trigger_presenter_spec.rb2
-rw-r--r--spec/presenters/clusters/cluster_presenter_spec.rb4
-rw-r--r--spec/presenters/packages/composer/packages_presenter_spec.rb9
-rw-r--r--spec/presenters/packages/conan/package_presenter_spec.rb2
-rw-r--r--spec/presenters/packages/detail/package_presenter_spec.rb1
-rw-r--r--spec/presenters/project_hook_presenter_spec.rb4
-rw-r--r--spec/presenters/projects/settings/deploy_keys_presenter_spec.rb45
-rw-r--r--spec/requests/admin/clusters/integrations_controller_spec.rb25
-rw-r--r--spec/requests/api/api_spec.rb10
-rw-r--r--spec/requests/api/applications_spec.rb8
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb42
-rw-r--r--spec/requests/api/ci/runner/jobs_put_spec.rb16
-rw-r--r--spec/requests/api/ci/runner/jobs_request_post_spec.rb33
-rw-r--r--spec/requests/api/ci/runner/jobs_trace_spec.rb18
-rw-r--r--spec/requests/api/commit_statuses_spec.rb8
-rw-r--r--spec/requests/api/commits_spec.rb36
-rw-r--r--spec/requests/api/composer_packages_spec.rb1
-rw-r--r--spec/requests/api/conan_project_packages_spec.rb2
-rw-r--r--spec/requests/api/deploy_keys_spec.rb31
-rw-r--r--spec/requests/api/deployments_spec.rb39
-rw-r--r--spec/requests/api/environments_spec.rb2
-rw-r--r--spec/requests/api/files_spec.rb15
-rw-r--r--spec/requests/api/generic_packages_spec.rb33
-rw-r--r--spec/requests/api/go_proxy_spec.rb4
-rw-r--r--spec/requests/api/graphql/ci/groups_spec.rb24
-rw-r--r--spec/requests/api/graphql/ci/job_spec.rb100
-rw-r--r--spec/requests/api/graphql/custom_emoji_query_spec.rb10
-rw-r--r--spec/requests/api/graphql/gitlab_schema_spec.rb22
-rw-r--r--spec/requests/api/graphql/group/milestones_spec.rb36
-rw-r--r--spec/requests/api/graphql/group/timelogs_spec.rb121
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb10
-rw-r--r--spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb37
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb55
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/release_asset_links/delete_spec.rb49
-rw-r--r--spec/requests/api/graphql/mutations/snippets/create_spec.rb4
-rw-r--r--spec/requests/api/graphql/mutations/snippets/update_spec.rb7
-rw-r--r--spec/requests/api/graphql/packages/package_spec.rb121
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/alert_management/integrations_spec.rb98
-rw-r--r--spec/requests/api/graphql/project/container_repositories_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb84
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb288
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb13
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb240
-rw-r--r--spec/requests/api/graphql/project/repository/blobs_spec.rb36
-rw-r--r--spec/requests/api/graphql/user_spec.rb8
-rw-r--r--spec/requests/api/graphql_spec.rb95
-rw-r--r--spec/requests/api/group_import_spec.rb2
-rw-r--r--spec/requests/api/group_milestones_spec.rb24
-rw-r--r--spec/requests/api/group_variables_spec.rb3
-rw-r--r--spec/requests/api/internal/base_spec.rb37
-rw-r--r--spec/requests/api/internal/kubernetes_spec.rb18
-rw-r--r--spec/requests/api/invitations_spec.rb9
-rw-r--r--spec/requests/api/issue_links_spec.rb28
-rw-r--r--spec/requests/api/issues/get_group_issues_spec.rb2
-rw-r--r--spec/requests/api/issues/post_projects_issues_spec.rb2
-rw-r--r--spec/requests/api/jobs_spec.rb24
-rw-r--r--spec/requests/api/labels_spec.rb6
-rw-r--r--spec/requests/api/lint_spec.rb12
-rw-r--r--spec/requests/api/maven_packages_spec.rb724
-rw-r--r--spec/requests/api/members_spec.rb30
-rw-r--r--spec/requests/api/merge_request_diffs_spec.rb8
-rw-r--r--spec/requests/api/merge_requests_spec.rb19
-rw-r--r--spec/requests/api/namespaces_spec.rb73
-rw-r--r--spec/requests/api/notes_spec.rb2
-rw-r--r--spec/requests/api/npm_project_packages_spec.rb38
-rw-r--r--spec/requests/api/nuget_group_packages_spec.rb41
-rw-r--r--spec/requests/api/nuget_project_packages_spec.rb8
-rw-r--r--spec/requests/api/project_attributes.yml1
-rw-r--r--spec/requests/api/project_import_spec.rb4
-rw-r--r--spec/requests/api/projects_spec.rb172
-rw-r--r--spec/requests/api/pypi_packages_spec.rb2
-rw-r--r--spec/requests/api/repositories_spec.rb23
-rw-r--r--spec/requests/api/resource_access_tokens_spec.rb17
-rw-r--r--spec/requests/api/settings_spec.rb7
-rw-r--r--spec/requests/api/tags_spec.rb230
-rw-r--r--spec/requests/api/triggers_spec.rb35
-rw-r--r--spec/requests/api/usage_data_non_sql_metrics_spec.rb67
-rw-r--r--spec/requests/api/usage_data_queries_spec.rb67
-rw-r--r--spec/requests/api/usage_data_spec.rb19
-rw-r--r--spec/requests/api/users_preferences_spec.rb65
-rw-r--r--spec/requests/api/users_spec.rb12
-rw-r--r--spec/requests/api/v3/github_spec.rb53
-rw-r--r--spec/requests/customers_dot/proxy_controller_spec.rb37
-rw-r--r--spec/requests/groups/clusters/integrations_controller_spec.rb40
-rw-r--r--spec/requests/groups/email_campaigns_controller_spec.rb59
-rw-r--r--spec/requests/groups/milestones_controller_spec.rb24
-rw-r--r--spec/requests/ide_controller_spec.rb53
-rw-r--r--spec/requests/jwt_controller_spec.rb7
-rw-r--r--spec/requests/projects/clusters/integrations_controller_spec.rb38
-rw-r--r--spec/requests/projects/cycle_analytics_events_spec.rb4
-rw-r--r--spec/requests/projects/issue_links_controller_spec.rb2
-rw-r--r--spec/requests/projects/merge_requests_discussions_spec.rb4
-rw-r--r--spec/requests/rack_attack_global_spec.rb25
-rw-r--r--spec/requests/users_controller_spec.rb16
-rw-r--r--spec/requests/whats_new_controller_spec.rb11
-rw-r--r--spec/routing/project_routing_spec.rb68
-rw-r--r--spec/rubocop/cop/gitlab/delegate_predicate_methods_spec.rb40
-rw-r--r--spec/rubocop/cop/gitlab/feature_available_usage_spec.rb96
-rw-r--r--spec/rubocop/cop/graphql/descriptions_spec.rb16
-rw-r--r--spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb9
-rw-r--r--spec/rubocop/cop/migration/add_timestamps_spec.rb8
-rw-r--r--spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb10
-rw-r--r--spec/rubocop/cop/migration/datetime_spec.rb20
-rw-r--r--spec/rubocop/cop/migration/prevent_strings_spec.rb12
-rw-r--r--spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb4
-rw-r--r--spec/rubocop/cop/migration/timestamps_spec.rb8
-rw-r--r--spec/rubocop/cop/rspec/env_assignment_spec.rb4
-rw-r--r--spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb131
-rw-r--r--spec/rubocop/cop/user_admin_spec.rb24
-rw-r--r--spec/serializers/admin/user_entity_spec.rb3
-rw-r--r--spec/serializers/admin/user_serializer_spec.rb2
-rw-r--r--spec/serializers/build_artifact_entity_spec.rb25
-rw-r--r--spec/serializers/ci/dag_pipeline_entity_spec.rb4
-rw-r--r--spec/serializers/ci/group_variable_entity_spec.rb2
-rw-r--r--spec/serializers/ci/lint/result_serializer_spec.rb1
-rw-r--r--spec/serializers/ci/pipeline_entity_spec.rb3
-rw-r--r--spec/serializers/container_repository_entity_spec.rb1
-rw-r--r--spec/serializers/container_tag_entity_spec.rb1
-rw-r--r--spec/serializers/deployment_serializer_spec.rb1
-rw-r--r--spec/serializers/diff_file_entity_spec.rb10
-rw-r--r--spec/serializers/environment_serializer_spec.rb50
-rw-r--r--spec/serializers/evidences/evidence_entity_spec.rb1
-rw-r--r--spec/serializers/fork_namespace_entity_spec.rb1
-rw-r--r--spec/serializers/group_link/group_group_link_entity_spec.rb1
-rw-r--r--spec/serializers/group_link/project_group_link_entity_spec.rb1
-rw-r--r--spec/serializers/issue_board_entity_spec.rb1
-rw-r--r--spec/serializers/member_entity_spec.rb1
-rw-r--r--spec/serializers/member_serializer_spec.rb65
-rw-r--r--spec/serializers/member_user_entity_spec.rb1
-rw-r--r--spec/serializers/merge_request_diff_entity_spec.rb1
-rw-r--r--spec/serializers/merge_request_poll_cached_widget_entity_spec.rb88
-rw-r--r--spec/serializers/merge_request_poll_widget_entity_spec.rb146
-rw-r--r--spec/serializers/merge_request_user_entity_spec.rb1
-rw-r--r--spec/serializers/merge_requests/pipeline_entity_spec.rb2
-rw-r--r--spec/serializers/namespace_basic_entity_spec.rb1
-rw-r--r--spec/serializers/pipeline_details_entity_spec.rb1
-rw-r--r--spec/serializers/pipeline_serializer_spec.rb21
-rw-r--r--spec/serializers/project_import_entity_spec.rb1
-rw-r--r--spec/serializers/project_serializer_spec.rb1
-rw-r--r--spec/serializers/review_app_setup_entity_spec.rb1
-rw-r--r--spec/serializers/runner_entity_spec.rb1
-rw-r--r--spec/serializers/service_event_entity_spec.rb4
-rw-r--r--spec/serializers/service_field_entity_spec.rb12
-rw-r--r--spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb295
-rw-r--r--spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb6
-rw-r--r--spec/services/award_emojis/add_service_spec.rb1
-rw-r--r--spec/services/award_emojis/destroy_service_spec.rb1
-rw-r--r--spec/services/award_emojis/toggle_service_spec.rb1
-rw-r--r--spec/services/boards/destroy_service_spec.rb23
-rw-r--r--spec/services/boards/issues/move_service_spec.rb1
-rw-r--r--spec/services/boards/lists/list_service_spec.rb46
-rw-r--r--spec/services/boards/lists/update_service_spec.rb41
-rw-r--r--spec/services/bulk_create_integration_service_spec.rb2
-rw-r--r--spec/services/bulk_update_integration_service_spec.rb4
-rw-r--r--spec/services/ci/abort_pipelines_service_spec.rb94
-rw-r--r--spec/services/ci/abort_project_pipelines_service_spec.rb42
-rw-r--r--spec/services/ci/after_requeue_job_service_spec.rb38
-rw-r--r--spec/services/ci/archive_trace_service_spec.rb46
-rw-r--r--spec/services/ci/cancel_user_pipelines_service_spec.rb35
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb4
-rw-r--r--spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/needs_spec.rb2
-rw-r--r--spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb1
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb152
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb37
-rw-r--r--spec/services/ci/create_web_ide_terminal_service_spec.rb19
-rw-r--r--spec/services/ci/disable_user_pipeline_schedules_service_spec.rb19
-rw-r--r--spec/services/ci/drop_pipeline_service_spec.rb60
-rw-r--r--spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb2
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb (renamed from spec/services/ci/create_job_artifacts_service_spec.rb)2
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb (renamed from spec/services/ci/destroy_expired_job_artifacts_service_spec.rb)16
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb (renamed from spec/services/ci/job_artifacts_destroy_batch_service_spec.rb)2
-rw-r--r--spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb (renamed from spec/services/ci/pipeline_artifacts/destroy_expired_artifacts_service_spec.rb)10
-rw-r--r--spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb8
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_build_test_two_manual_review_test_staging_production.yml171
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml4
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_fails.yml (renamed from spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml)21
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_succeeds.yml66
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml2
-rw-r--r--spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml4
-rw-r--r--spec/services/ci/pipeline_trigger_service_spec.rb11
-rw-r--r--spec/services/ci/play_bridge_service_spec.rb22
-rw-r--r--spec/services/ci/play_build_service_spec.rb22
-rw-r--r--spec/services/ci/process_build_service_spec.rb23
-rw-r--r--spec/services/ci/process_pipeline_service_spec.rb52
-rw-r--r--spec/services/ci/register_job_service_spec.rb39
-rw-r--r--spec/services/ci/retry_build_service_spec.rb2
-rw-r--r--spec/services/ci/retry_pipeline_service_spec.rb2
-rw-r--r--spec/services/ci/test_failure_history_service_spec.rb42
-rw-r--r--spec/services/clusters/integrations/create_service_spec.rb90
-rw-r--r--spec/services/deployments/link_merge_requests_service_spec.rb11
-rw-r--r--spec/services/design_management/copy_design_collection/copy_service_spec.rb1
-rw-r--r--spec/services/design_management/delete_designs_service_spec.rb1
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb3
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb2
-rw-r--r--spec/services/environments/auto_stop_service_spec.rb2
-rw-r--r--spec/services/environments/canary_ingress/update_service_spec.rb1
-rw-r--r--spec/services/environments/reset_auto_stop_service_spec.rb1
-rw-r--r--spec/services/groups/auto_devops_service_spec.rb1
-rw-r--r--spec/services/groups/group_links/update_service_spec.rb1
-rw-r--r--spec/services/groups/merge_requests_count_service_spec.rb29
-rw-r--r--spec/services/groups/open_issues_count_service_spec.rb48
-rw-r--r--spec/services/groups/transfer_service_spec.rb3
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb1
-rw-r--r--spec/services/ide/base_config_service_spec.rb1
-rw-r--r--spec/services/ide/schemas_config_service_spec.rb1
-rw-r--r--spec/services/ide/terminal_config_service_spec.rb3
-rw-r--r--spec/services/issuable/destroy_service_spec.rb21
-rw-r--r--spec/services/issuable/process_assignees_spec.rb32
-rw-r--r--spec/services/issue_links/create_service_spec.rb6
-rw-r--r--spec/services/issues/after_create_service_spec.rb52
-rw-r--r--spec/services/issues/build_service_spec.rb1
-rw-r--r--spec/services/issues/clone_service_spec.rb1
-rw-r--r--spec/services/issues/create_service_spec.rb28
-rw-r--r--spec/services/issues/export_csv_service_spec.rb1
-rw-r--r--spec/services/issues/move_service_spec.rb1
-rw-r--r--spec/services/issues/related_branches_service_spec.rb1
-rw-r--r--spec/services/keys/expiry_notification_service_spec.rb97
-rw-r--r--spec/services/members/create_service_spec.rb4
-rw-r--r--spec/services/members/destroy_service_spec.rb157
-rw-r--r--spec/services/members/invite_service_spec.rb79
-rw-r--r--spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb6
-rw-r--r--spec/services/merge_requests/after_create_service_spec.rb104
-rw-r--r--spec/services/merge_requests/base_service_spec.rb1
-rw-r--r--spec/services/merge_requests/create_pipeline_service_spec.rb2
-rw-r--r--spec/services/merge_requests/create_service_spec.rb85
-rw-r--r--spec/services/merge_requests/export_csv_service_spec.rb2
-rw-r--r--spec/services/merge_requests/handle_assignees_change_service_spec.rb114
-rw-r--r--spec/services/merge_requests/merge_orchestration_service_spec.rb1
-rw-r--r--spec/services/merge_requests/merge_service_spec.rb15
-rw-r--r--spec/services/merge_requests/merge_to_ref_service_spec.rb7
-rw-r--r--spec/services/merge_requests/push_options_handler_service_spec.rb208
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb16
-rw-r--r--spec/services/merge_requests/resolve_todos_service_spec.rb49
-rw-r--r--spec/services/merge_requests/update_assignees_service_spec.rb79
-rw-r--r--spec/services/merge_requests/update_service_spec.rb244
-rw-r--r--spec/services/milestones/destroy_service_spec.rb14
-rw-r--r--spec/services/milestones/merge_requests_count_service_spec.rb21
-rw-r--r--spec/services/milestones/transfer_service_spec.rb8
-rw-r--r--spec/services/namespace_settings/update_service_spec.rb31
-rw-r--r--spec/services/namespaces/in_product_marketing_emails_service_spec.rb133
-rw-r--r--spec/services/notes/create_service_spec.rb18
-rw-r--r--spec/services/notification_recipients/builder/default_spec.rb146
-rw-r--r--spec/services/notification_service_spec.rb47
-rw-r--r--spec/services/packages/create_dependency_service_spec.rb2
-rw-r--r--spec/services/packages/debian/extract_changes_metadata_service_spec.rb160
-rw-r--r--spec/services/packages/debian/extract_deb_metadata_service_spec.rb22
-rw-r--r--spec/services/packages/debian/extract_metadata_service_spec.rb10
-rw-r--r--spec/services/packages/debian/parse_debian822_service_spec.rb74
-rw-r--r--spec/services/packages/debian/process_changes_service_spec.rb58
-rw-r--r--spec/services/packages/go/create_package_service_spec.rb73
-rw-r--r--spec/services/packages/go/sync_packages_service_spec.rb40
-rw-r--r--spec/services/packages/maven/metadata/sync_service_spec.rb15
-rw-r--r--spec/services/packages/rubygems/create_dependencies_service_spec.rb33
-rw-r--r--spec/services/packages/rubygems/create_gemspec_service_spec.rb28
-rw-r--r--spec/services/packages/rubygems/metadata_extraction_service_spec.rb50
-rw-r--r--spec/services/packages/rubygems/process_gem_service_spec.rb134
-rw-r--r--spec/services/pages/delete_service_spec.rb (renamed from spec/services/pages/delete_services_spec.rb)14
-rw-r--r--spec/services/pages/migrate_from_legacy_storage_service_spec.rb157
-rw-r--r--spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb48
-rw-r--r--spec/services/pages/zip_directory_service_spec.rb44
-rw-r--r--spec/services/projects/create_service_spec.rb14
-rw-r--r--spec/services/projects/destroy_service_spec.rb22
-rw-r--r--spec/services/projects/update_pages_configuration_service_spec.rb2
-rw-r--r--spec/services/projects/update_pages_service_spec.rb19
-rw-r--r--spec/services/projects/update_remote_mirror_service_spec.rb16
-rw-r--r--spec/services/quick_actions/interpret_service_spec.rb200
-rw-r--r--spec/services/repositories/changelog_service_spec.rb19
-rw-r--r--spec/services/security/ci_configuration/sast_parser_service_spec.rb4
-rw-r--r--spec/services/spam/spam_action_service_spec.rb57
-rw-r--r--spec/services/submit_usage_ping_service_spec.rb35
-rw-r--r--spec/services/submodules/update_service_spec.rb2
-rw-r--r--spec/services/system_hooks_service_spec.rb28
-rw-r--r--spec/services/system_notes/alert_management_service_spec.rb2
-rw-r--r--spec/services/todo_service_spec.rb51
-rw-r--r--spec/services/todos/destroy/destroyed_issuable_service_spec.rb34
-rw-r--r--spec/services/todos/destroy/entity_leave_service_spec.rb27
-rw-r--r--spec/services/upload_service_spec.rb23
-rw-r--r--spec/services/user_preferences/update_service_spec.rb33
-rw-r--r--spec/services/users/refresh_authorized_projects_service_spec.rb164
-rw-r--r--spec/services/users/update_todo_count_cache_service_spec.rb61
-rw-r--r--spec/spam/concerns/has_spam_action_response_fields_spec.rb8
-rw-r--r--spec/spec_helper.rb55
-rw-r--r--spec/support/helpers/board_helpers.rb9
-rw-r--r--spec/support/helpers/ci_artifact_metadata_generator.rb2
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb8
-rw-r--r--spec/support/helpers/graphql_helpers.rb16
-rw-r--r--spec/support/helpers/javascript_fixtures_helpers.rb2
-rw-r--r--spec/support/helpers/jira_service_helper.rb2
-rw-r--r--spec/support/helpers/key_generator_helper.rb3
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb12
-rw-r--r--spec/support/helpers/next_instance_of.rb15
-rw-r--r--spec/support/helpers/query_recorder.rb40
-rw-r--r--spec/support/helpers/reload_helpers.rb12
-rw-r--r--spec/support/helpers/rubygems_helpers.rb11
-rw-r--r--spec/support/helpers/seed_repo.rb60
-rw-r--r--spec/support/helpers/stub_env.rb2
-rw-r--r--spec/support/helpers/stub_requests.rb2
-rw-r--r--spec/support/helpers/test_env.rb11
-rw-r--r--spec/support/helpers/usage_data_helpers.rb24
-rw-r--r--spec/support/import_export/project_tree_expectations.rb4
-rw-r--r--spec/support/matchers/exceed_query_limit.rb5
-rw-r--r--spec/support/matchers/graphql_matchers.rb14
-rw-r--r--spec/support/matchers/track_self_describing_event_matcher.rb12
-rw-r--r--spec/support/shared_contexts/email_shared_context.rb11
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb87
-rw-r--r--spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb92
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb5
-rw-r--r--spec/support/shared_contexts/project_service_jira_context.rb5
-rw-r--r--spec/support/shared_contexts/project_service_shared_context.rb5
-rw-r--r--spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb7
-rw-r--r--spec/support/shared_contexts/requests/api/go_modules_shared_context.rb14
-rw-r--r--spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/services_shared_context.rb2
-rw-r--r--spec/support/shared_examples/boards/destroy_service_shared_examples.rb30
-rw-r--r--spec/support/shared_examples/boards/lists/update_service_shared_examples.rb43
-rw-r--r--spec/support/shared_examples/controllers/snippet_blob_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/controllers/snippet_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/controllers/trackable_shared_examples.rb39
-rw-r--r--spec/support/shared_examples/controllers/unique_visits_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/features/cascading_settings_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/features/creatable_merge_request_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/discussion_comments_shared_example.rb4
-rw-r--r--spec/support/shared_examples/features/error_tracking_shared_example.rb2
-rw-r--r--spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/project_upload_files_shared_examples.rb73
-rw-r--r--spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/search_settings_shared_examples.rb27
-rw-r--r--spec/support/shared_examples/features/sidebar_shared_examples.rb165
-rw-r--r--spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/user_git_access_wiki_page_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb6
-rw-r--r--spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb6
-rw-r--r--spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/graphql/spam_protection_shared_examples.rb85
-rw-r--r--spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/helpers/groups_shared_examples.rb53
-rw-r--r--spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/lib/api/internal_base_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb44
-rw-r--r--spec/support/shared_examples/lib/gitlab/sidekiq_middleware/metrics_middleware_with_worker_attribution_shared_examples.rb132
-rw-r--r--spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/mailers/notify_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb14
-rw-r--r--spec/support/shared_examples/models/boards/listable_shared_examples.rb25
-rw-r--r--spec/support/shared_examples/models/cluster_application_status_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/models/cluster_application_version_shared_examples.rb11
-rw-r--r--spec/support/shared_examples/models/clusters/prometheus_client_shared.rb86
-rw-r--r--spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/packages/debian/component_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/models/wiki_shared_examples.rb95
-rw-r--r--spec/support/shared_examples/namespaces/namespace_traversal_examples.rb (renamed from spec/support/shared_examples/namespaces/recursive_traversal_examples.rb)48
-rw-r--r--spec/support/shared_examples/nav_sidebar_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/policies/resource_access_token_shared_examples.rb76
-rw-r--r--spec/support/shared_examples/querying_shared_examples.rb23
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/requests/api/graphql/projects/alert_management/integrations_shared_examples.rb49
-rw-r--r--spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/requests/api/packages_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb46
-rw-r--r--spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb29
-rw-r--r--spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb38
-rw-r--r--spec/support/shared_examples/services/clusters/parse_cluster_applications_artifact_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/services/groups_count_service_shared_examples.rb55
-rw-r--r--spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb31
-rw-r--r--spec/support/shared_examples/services/merge_request_shared_examples.rb90
-rw-r--r--spec/support/shared_examples/services/notification_service_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/snippets_shared_examples.rb41
-rw-r--r--spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb15
-rw-r--r--spec/support/shared_examples/workers/worker_with_data_consistency_shared_example.rb27
-rw-r--r--spec/support/sidekiq_middleware.rb18
-rw-r--r--spec/support_specs/helpers/active_record/query_recorder_spec.rb70
-rw-r--r--spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb10
-rw-r--r--spec/tasks/gitlab/gitaly_rake_spec.rb11
-rw-r--r--spec/tasks/gitlab/pages_rake_spec.rb116
-rw-r--r--spec/tasks/gitlab/usage_data_rake_spec.rb16
-rw-r--r--spec/tooling/danger/changelog_spec.rb153
-rw-r--r--spec/tooling/danger/project_helper_spec.rb38
-rw-r--r--spec/tooling/lib/tooling/kubernetes_client_spec.rb10
-rw-r--r--spec/tooling/merge_request_spec.rb31
-rw-r--r--spec/tooling/quality/test_level_spec.rb (renamed from spec/lib/quality/test_level_spec.rb)4
-rw-r--r--spec/uploaders/object_storage_spec.rb19
-rw-r--r--spec/validators/json_schema_validator_spec.rb30
-rw-r--r--spec/validators/x509_certificate_credentials_validator_spec.rb4
-rw-r--r--spec/views/admin/dashboard/index.html.haml_spec.rb2
-rw-r--r--spec/views/admin/services/index.html.haml_spec.rb30
-rw-r--r--spec/views/dashboard/projects/index.html.haml_spec.rb28
-rw-r--r--spec/views/groups/settings/_remove.html.haml_spec.rb17
-rw-r--r--spec/views/layouts/_search.html.haml_spec.rb68
-rw-r--r--spec/views/layouts/header/_new_dropdown.haml_spec.rb1
-rw-r--r--spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb1
-rw-r--r--spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb1
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb132
-rw-r--r--spec/views/layouts/profile.html.haml_spec.rb19
-rw-r--r--spec/views/profiles/keys/_form.html.haml_spec.rb50
-rw-r--r--spec/views/profiles/keys/_key.html.haml_spec.rb123
-rw-r--r--spec/views/projects/commit/_commit_box.html.haml_spec.rb34
-rw-r--r--spec/views/projects/empty.html.haml_spec.rb10
-rw-r--r--spec/views/projects/pipelines/_stage.html.haml_spec.rb73
-rw-r--r--spec/views/projects/settings/operations/show.html.haml_spec.rb4
-rw-r--r--spec/views/projects/tags/index.html.haml_spec.rb1
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb26
-rw-r--r--spec/views/search/_results.html.haml_spec.rb15
-rw-r--r--spec/views/shared/nav/_sidebar.html.haml_spec.rb44
-rw-r--r--spec/views/shared/runners/show.html.haml_spec.rb6
-rw-r--r--spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb63
-rw-r--r--spec/workers/background_migration_worker_spec.rb2
-rw-r--r--spec/workers/build_finished_worker_spec.rb38
-rw-r--r--spec/workers/build_hooks_worker_spec.rb5
-rw-r--r--spec/workers/bulk_import_worker_spec.rb22
-rw-r--r--spec/workers/bulk_imports/entity_worker_spec.rb124
-rw-r--r--spec/workers/bulk_imports/pipeline_worker_spec.rb125
-rw-r--r--spec/workers/ci/drop_pipeline_worker_spec.rb36
-rw-r--r--spec/workers/ci/initial_pipeline_process_worker_spec.rb21
-rw-r--r--spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb53
-rw-r--r--spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/ci/test_failure_history_worker_spec.rb4
-rw-r--r--spec/workers/concerns/worker_attributes_spec.rb74
-rw-r--r--spec/workers/concerns/worker_context_spec.rb2
-rw-r--r--spec/workers/container_expiration_policy_worker_spec.rb123
-rw-r--r--spec/workers/database/batched_background_migration_worker_spec.rb121
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb2
-rw-r--r--spec/workers/expire_build_artifacts_worker_spec.rb2
-rw-r--r--spec/workers/expire_job_cache_worker_spec.rb21
-rw-r--r--spec/workers/expire_pipeline_cache_worker_spec.rb17
-rw-r--r--spec/workers/merge_requests/assignees_change_worker_spec.rb59
-rw-r--r--spec/workers/merge_requests/create_pipeline_worker_spec.rb61
-rw-r--r--spec/workers/merge_requests/handle_assignees_change_worker_spec.rb62
-rw-r--r--spec/workers/merge_requests/resolve_todos_worker_spec.rb41
-rw-r--r--spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb56
-rw-r--r--spec/workers/new_issue_worker_spec.rb11
-rw-r--r--spec/workers/new_merge_request_worker_spec.rb2
-rw-r--r--spec/workers/packages/go/sync_packages_worker_spec.rb101
-rw-r--r--spec/workers/packages/maven/metadata/sync_worker_spec.rb6
-rw-r--r--spec/workers/packages/rubygems/extraction_worker_spec.rb54
-rw-r--r--spec/workers/pages_update_configuration_worker_spec.rb2
-rw-r--r--spec/workers/post_receive_spec.rb5
-rw-r--r--spec/workers/projects/post_creation_worker_spec.rb86
-rw-r--r--spec/workers/remove_expired_members_worker_spec.rb18
-rw-r--r--spec/workers/repository_check/dispatch_worker_spec.rb7
-rw-r--r--spec/workers/ssh_keys/expired_notification_worker_spec.rb58
-rw-r--r--spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb66
-rw-r--r--spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb15
1741 files changed, 49184 insertions, 17175 deletions
diff --git a/spec/benchmarks/banzai_benchmark.rb b/spec/benchmarks/banzai_benchmark.rb
index 4cf079b2130..05c41eed889 100644
--- a/spec/benchmarks/banzai_benchmark.rb
+++ b/spec/benchmarks/banzai_benchmark.rb
@@ -54,9 +54,10 @@ RSpec.describe 'GitLab Markdown Benchmark', :aggregate_failures do
context 'pipelines' do
it 'benchmarks several pipelines' do
- path = 'images/example.jpg'
- gitaly_wiki_file = Gitlab::GitalyClient::WikiFile.new(path: path)
- allow(wiki).to receive(:find_file).with(path, load_content: false).and_return(Gitlab::Git::WikiFile.new(gitaly_wiki_file))
+ name = 'example.jpg'
+ path = "images/#{name}"
+ blob = double(name: name, path: path, mime_type: 'image/jpeg', data: nil)
+ allow(wiki).to receive(:find_file).with(path, load_content: false).and_return(Gitlab::Git::WikiFile.new(blob))
allow(wiki).to receive(:wiki_base_path) { '/namespace1/gitlabhq/wikis' }
puts "\n--> Benchmarking Full, Wiki, and Plain pipelines\n"
diff --git a/spec/config/mail_room_spec.rb b/spec/config/mail_room_spec.rb
index 289e18be0d7..6265b54931a 100644
--- a/spec/config/mail_room_spec.rb
+++ b/spec/config/mail_room_spec.rb
@@ -16,7 +16,9 @@ RSpec.describe 'mail_room.yml' do
}
cmd = "puts ERB.new(File.read(#{absolute_path(mailroom_config_path).inspect})).result"
- output, status = Gitlab::Popen.popen(%W(ruby -rerb -e #{cmd}), absolute_path('config'), vars)
+ result = Gitlab::Popen.popen_with_detail(%W(ruby -rerb -e #{cmd}), absolute_path('config'), vars)
+ output = result.stdout
+ status = result.status
raise "Error interpreting #{mailroom_config_path}: #{output}" unless status == 0
YAML.load(output)
@@ -68,6 +70,39 @@ RSpec.describe 'mail_room.yml' do
end
end
+ context 'when both incoming email and service desk email are enabled for Microsoft Graph' do
+ let(:gitlab_config_path) { 'spec/fixtures/config/mail_room_enabled_ms_graph.yml' }
+ let(:queues_config_path) { 'spec/fixtures/config/redis_queues_new_format_host.yml' }
+ let(:gitlab_redis_queues) { Gitlab::Redis::Queues.new(Rails.env) }
+
+ it 'contains the intended configuration' do
+ expected_mailbox = {
+ email: 'gitlab-incoming@gmail.com',
+ name: 'inbox',
+ idle_timeout: 60,
+ expunge_deleted: true
+ }
+ expected_options = {
+ redis_url: gitlab_redis_queues.url,
+ sentinels: gitlab_redis_queues.sentinels
+ }
+ expected_inbox_options = {
+ tenant_id: '12345',
+ client_id: 'MY-CLIENT-ID',
+ client_secret: 'MY-CLIENT-SECRET',
+ poll_interval: 60
+ }
+
+ expect(configuration[:mailboxes].length).to eq(2)
+ expect(configuration[:mailboxes]).to all(include(expected_mailbox))
+ expect(configuration[:mailboxes].map { |m| m[:inbox_method] }).to all(eq('microsoft_graph'))
+ expect(configuration[:mailboxes].map { |m| m[:inbox_options] }).to all(eq(expected_inbox_options))
+ expect(configuration[:mailboxes].map { |m| m[:delivery_options] }).to all(include(expected_options))
+ expect(configuration[:mailboxes].map { |m| m[:delivery_options] }).to all(include(expected_options))
+ expect(configuration[:mailboxes].map { |m| m[:arbitration_options] }).to all(include(expected_options))
+ end
+ end
+
def clear_queues_raw_config
Gitlab::Redis::Queues.remove_instance_variable(:@_raw_config)
rescue NameError
diff --git a/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb b/spec/config/metrics/aggregates/aggregated_metrics_spec.rb
index 9aba86cdaf2..9aba86cdaf2 100644
--- a/spec/lib/gitlab/usage_data_counters/aggregated_metrics_spec.rb
+++ b/spec/config/metrics/aggregates/aggregated_metrics_spec.rb
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 2b562e2dd64..6258dd30438 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Admin::ApplicationSettingsController do
+RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_setting do
include StubENV
include UsageDataHelpers
@@ -164,6 +164,13 @@ RSpec.describe Admin::ApplicationSettingsController do
expect(ApplicationSetting.current.default_branch_name).to eq("example_branch_name")
end
+ it "updates admin_mode setting" do
+ put :update, params: { application_setting: { admin_mode: true } }
+
+ expect(response).to redirect_to(general_admin_application_settings_path)
+ expect(ApplicationSetting.current.admin_mode).to be(true)
+ end
+
context "personal access token prefix settings" do
let(:application_settings) { ApplicationSetting.current }
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index 85aa77d8473..bd0c2965906 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -546,20 +546,30 @@ RSpec.describe Admin::ClustersController do
describe 'GET #show' do
let(:cluster) { create(:cluster, :provided_by_gcp, :instance) }
- def get_show
+ def get_show(tab: nil)
get :show,
params: {
- id: cluster
+ id: cluster,
+ tab: tab
}
end
describe 'functionality' do
+ render_views
+
it 'responds successfully' do
get_show
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:cluster)).to eq(cluster)
end
+
+ it 'renders integration tab view' do
+ get_show(tab: 'integrations')
+
+ expect(response).to render_template('clusters/clusters/_integrations')
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
describe 'security' do
diff --git a/spec/controllers/admin/dev_ops_report_controller_spec.rb b/spec/controllers/admin/dev_ops_report_controller_spec.rb
index 913921b9630..142db175a15 100644
--- a/spec/controllers/admin/dev_ops_report_controller_spec.rb
+++ b/spec/controllers/admin/dev_ops_report_controller_spec.rb
@@ -5,7 +5,13 @@ require 'spec_helper'
RSpec.describe Admin::DevOpsReportController do
describe 'show_adoption?' do
it 'is always false' do
- expect(controller.show_adoption?).to be false
+ expect(controller.show_adoption?).to be_falsey
+ end
+ end
+
+ describe 'should_track_devops_score?' do
+ it 'is always true' do
+ expect(controller.should_track_devops_score?).to be_truthy
end
end
diff --git a/spec/controllers/admin/groups_controller_spec.rb b/spec/controllers/admin/groups_controller_spec.rb
index 38f4ce54e5c..8441a52b454 100644
--- a/spec/controllers/admin/groups_controller_spec.rb
+++ b/spec/controllers/admin/groups_controller_spec.rb
@@ -37,6 +37,12 @@ RSpec.describe Admin::GroupsController do
post :create, params: { group: { path: 'test', name: 'test' } }
end.to change { NamespaceSetting.count }.by(1)
end
+
+ it 'creates admin_note for group' do
+ expect do
+ post :create, params: { group: { path: 'test', name: 'test', admin_note_attributes: { note: 'test' } } }
+ end.to change { Namespace::AdminNote.count }.by(1)
+ end
end
describe 'PUT #members_update' do
diff --git a/spec/controllers/admin/impersonations_controller_spec.rb b/spec/controllers/admin/impersonations_controller_spec.rb
index 326003acaf8..744c0712d6b 100644
--- a/spec/controllers/admin/impersonations_controller_spec.rb
+++ b/spec/controllers/admin/impersonations_controller_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Admin::ImpersonationsController do
context "when the impersonator is not admin (anymore)" do
before do
impersonator.admin = false
- impersonator.save
+ impersonator.save!
end
it "responds with status 404" do
diff --git a/spec/controllers/admin/runners_controller_spec.rb b/spec/controllers/admin/runners_controller_spec.rb
index cba25dbff95..45ea8949bf2 100644
--- a/spec/controllers/admin/runners_controller_spec.rb
+++ b/spec/controllers/admin/runners_controller_spec.rb
@@ -125,7 +125,7 @@ RSpec.describe Admin::RunnersController do
describe '#resume' do
it 'marks the runner as active and ticks the queue' do
- runner.update(active: false)
+ runner.update!(active: false)
expect do
post :resume, params: { id: runner.id }
@@ -140,7 +140,7 @@ RSpec.describe Admin::RunnersController do
describe '#pause' do
it 'marks the runner as inactive and ticks the queue' do
- runner.update(active: true)
+ runner.update!(active: true)
expect do
post :pause, params: { id: runner.id }
diff --git a/spec/controllers/admin/services_controller_spec.rb b/spec/controllers/admin/services_controller_spec.rb
index 8e78cc75369..d5ec9907b48 100644
--- a/spec/controllers/admin/services_controller_spec.rb
+++ b/spec/controllers/admin/services_controller_spec.rb
@@ -44,7 +44,7 @@ RSpec.describe Admin::ServicesController do
describe "#update" do
let(:project) { create(:project) }
let!(:service_template) do
- RedmineService.create(
+ RedmineService.create!(
project: nil,
active: false,
template: true,
diff --git a/spec/controllers/application_controller_spec.rb b/spec/controllers/application_controller_spec.rb
index 4a729008e67..3d34db6c2c0 100644
--- a/spec/controllers/application_controller_spec.rb
+++ b/spec/controllers/application_controller_spec.rb
@@ -56,8 +56,6 @@ RSpec.describe ApplicationController do
end
end
- it_behaves_like 'a Trackable Controller'
-
describe '#add_gon_variables' do
before do
Gon.clear
@@ -900,7 +898,7 @@ RSpec.describe ApplicationController do
feature_category :issue_tracking
def index
- Labkit::Context.with_context do |context|
+ Gitlab::ApplicationContext.with_raw_context do |context|
render json: context.to_h
end
end
diff --git a/spec/controllers/boards/issues_controller_spec.rb b/spec/controllers/boards/issues_controller_spec.rb
index a7f3ab0089f..d23f099e382 100644
--- a/spec/controllers/boards/issues_controller_spec.rb
+++ b/spec/controllers/boards/issues_controller_spec.rb
@@ -404,7 +404,7 @@ RSpec.describe Boards::IssuesController do
list_id: list.try(:to_param)
}
- unless board.try(:parent)&.is_a?(Group)
+ unless board.try(:parent).is_a?(Group)
params[:namespace_id] = project.namespace.to_param
params[:project_id] = project
end
diff --git a/spec/controllers/chaos_controller_spec.rb b/spec/controllers/chaos_controller_spec.rb
index cb4f12ff829..26ae4a6b693 100644
--- a/spec/controllers/chaos_controller_spec.rb
+++ b/spec/controllers/chaos_controller_spec.rb
@@ -109,7 +109,7 @@ RSpec.describe ChaosController do
describe '#kill' do
it 'calls synchronously' do
- expect(Gitlab::Chaos).to receive(:kill).with(no_args)
+ expect(Gitlab::Chaos).to receive(:kill).with('KILL')
get :kill
@@ -117,7 +117,7 @@ RSpec.describe ChaosController do
end
it 'calls asynchronously' do
- expect(Chaos::KillWorker).to receive(:perform_async).with(no_args)
+ expect(Chaos::KillWorker).to receive(:perform_async).with('KILL')
get :kill, params: { async: 1 }
@@ -125,6 +125,24 @@ RSpec.describe ChaosController do
end
end
+ describe '#quit' do
+ it 'calls synchronously' do
+ expect(Gitlab::Chaos).to receive(:kill).with('QUIT')
+
+ get :quit
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'calls asynchronously' do
+ expect(Chaos::KillWorker).to receive(:perform_async).with('QUIT')
+
+ get :quit, params: { async: 1 }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
describe '#gc' do
let(:gc_stat) { GC.stat.stringify_keys }
diff --git a/spec/controllers/concerns/enforces_admin_authentication_spec.rb b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
index c6ad1a00484..106b1d53fd2 100644
--- a/spec/controllers/concerns/enforces_admin_authentication_spec.rb
+++ b/spec/controllers/concerns/enforces_admin_authentication_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe EnforcesAdminAuthentication do
end
end
- context 'feature flag :user_mode_in_session is enabled' do
+ context 'application setting :admin_mode is enabled' do
describe 'authenticate_admin!' do
context 'as an admin' do
let(:user) { create(:admin) }
@@ -61,9 +61,9 @@ RSpec.describe EnforcesAdminAuthentication do
end
end
- context 'feature flag :user_mode_in_session is disabled' do
+ context 'application setting :admin_mode is disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
describe 'authenticate_admin!' do
diff --git a/spec/controllers/concerns/redis_tracking_spec.rb b/spec/controllers/concerns/redis_tracking_spec.rb
index 53b49dd30a6..4077f4f5cce 100644
--- a/spec/controllers/concerns/redis_tracking_spec.rb
+++ b/spec/controllers/concerns/redis_tracking_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe RedisTracking do
include RedisTracking
skip_before_action :authenticate_user!, only: :show
- track_redis_hll_event :index, :show, name: 'g_compliance_approval_rules',
- if: [:custom_condition_one?, :custom_condition_two?]
+ track_redis_hll_event(:index, :show, name: 'g_compliance_approval_rules',
+ if: [:custom_condition_one?, :custom_condition_two?]) { |controller| controller.get_custom_id }
def index
render html: 'index'
@@ -24,6 +24,10 @@ RSpec.describe RedisTracking do
render html: 'show'
end
+ def get_custom_id
+ 'some_custom_id'
+ end
+
private
def custom_condition_one?
@@ -92,19 +96,15 @@ RSpec.describe RedisTracking do
end
end
- context 'when user is not logged in and there is a visitor_id' do
+ context 'when user is not logged in' do
let(:visitor_id) { SecureRandom.uuid }
- before do
- routes.draw { get 'show' => 'anonymous#show' }
- end
-
- it 'tracks the event' do
+ it 'tracks the event when there is a visitor id' do
cookies[:visitor_id] = { value: visitor_id, expires: 24.months }
expect_tracking
- get :show
+ get :show, params: { id: 1 }
end
end
@@ -114,5 +114,19 @@ RSpec.describe RedisTracking do
get :index
end
+
+ it 'tracks the event when there is custom id' do
+ expect_tracking
+
+ get :show, params: { id: 1 }
+ end
+
+ it 'does not track the event when there is no custom id' do
+ expect(controller).to receive(:get_custom_id).and_return(nil)
+
+ expect_no_tracking
+
+ get :show, params: { id: 2 }
+ end
end
end
diff --git a/spec/controllers/concerns/renders_commits_spec.rb b/spec/controllers/concerns/renders_commits_spec.rb
index 7be5f75c19d..7b241fc29af 100644
--- a/spec/controllers/concerns/renders_commits_spec.rb
+++ b/spec/controllers/concerns/renders_commits_spec.rb
@@ -57,4 +57,16 @@ RSpec.describe RendersCommits do
end.not_to exceed_all_query_limit(control_count)
end
end
+
+ describe '.prepare_commits_for_rendering' do
+ it 'avoids N+1' do
+ control = ActiveRecord::QueryRecorder.new do
+ subject.prepare_commits_for_rendering(merge_request.commits.take(1))
+ end
+
+ expect do
+ subject.prepare_commits_for_rendering(merge_request.commits)
+ end.not_to exceed_all_query_limit(control.count)
+ end
+ end
end
diff --git a/spec/controllers/dashboard/snippets_controller_spec.rb b/spec/controllers/dashboard/snippets_controller_spec.rb
index 016a9f53129..180369447b4 100644
--- a/spec/controllers/dashboard/snippets_controller_spec.rb
+++ b/spec/controllers/dashboard/snippets_controller_spec.rb
@@ -29,23 +29,6 @@ RSpec.describe Dashboard::SnippetsController do
it_behaves_like 'snippets sort order'
- context 'when views are rendered' do
- render_views
-
- it 'avoids N+1 database queries' do
- # Warming call to load everything non snippet related
- get(:index)
-
- project = create(:project, namespace: user.namespace)
- create(:project_snippet, project: project, author: user)
-
- control_count = ActiveRecord::QueryRecorder.new { get(:index) }.count
-
- project = create(:project, namespace: user.namespace)
- create(:project_snippet, project: project, author: user)
-
- expect { get(:index) }.not_to exceed_query_limit(control_count)
- end
- end
+ it_behaves_like 'snippets views'
end
end
diff --git a/spec/controllers/explore/snippets_controller_spec.rb b/spec/controllers/explore/snippets_controller_spec.rb
index f7bd2ba917e..e93e8502dfc 100644
--- a/spec/controllers/explore/snippets_controller_spec.rb
+++ b/spec/controllers/explore/snippets_controller_spec.rb
@@ -32,5 +32,9 @@ RSpec.describe Explore::SnippetsController do
expect(assigns(:snippets)).to all(be_a(PersonalSnippet))
expect(response).to have_gitlab_http_status(:ok)
end
+
+ it_behaves_like 'snippets views' do
+ let_it_be(:user) { create(:user) }
+ end
end
end
diff --git a/spec/controllers/graphql_controller_spec.rb b/spec/controllers/graphql_controller_spec.rb
index f10fbf5ef2c..f2d86b1b166 100644
--- a/spec/controllers/graphql_controller_spec.rb
+++ b/spec/controllers/graphql_controller_spec.rb
@@ -175,22 +175,44 @@ RSpec.describe GraphqlController do
end
describe '#append_info_to_payload' do
- let(:graphql_query) { graphql_query_for('project', { 'fullPath' => 'foo' }, %w(id name)) }
- let(:mock_store) { { graphql_logs: { foo: :bar } } }
+ let(:query_1) { { query: graphql_query_for('project', { 'fullPath' => 'foo' }, %w(id name), 'getProject_1') } }
+ let(:query_2) { { query: graphql_query_for('project', { 'fullPath' => 'bar' }, %w(id), 'getProject_2') } }
+ let(:graphql_queries) { [query_1, query_2] }
let(:log_payload) { {} }
+ let(:expected_logs) do
+ [
+ {
+ operation_name: 'getProject_1',
+ complexity: 3,
+ depth: 2,
+ used_deprecated_fields: [],
+ used_fields: ['Project.id', 'Project.name', 'Query.project'],
+ variables: '{}'
+ },
+ {
+ operation_name: 'getProject_2',
+ complexity: 2,
+ depth: 2,
+ used_deprecated_fields: [],
+ used_fields: ['Project.id', 'Query.project'],
+ variables: '{}'
+ }
+ ]
+ end
before do
- allow(RequestStore).to receive(:store).and_return(mock_store)
+ RequestStore.clear!
+
allow(controller).to receive(:append_info_to_payload).and_wrap_original do |method, *|
method.call(log_payload)
end
end
it 'appends metadata for logging' do
- post :execute, params: { query: graphql_query, operationName: 'Foo' }
+ post :execute, params: { _json: graphql_queries }
expect(controller).to have_received(:append_info_to_payload)
- expect(log_payload.dig(:metadata, :graphql)).to eq({ operation_name: 'Foo', foo: :bar })
+ expect(log_payload.dig(:metadata, :graphql)).to match_array(expected_logs)
end
end
end
diff --git a/spec/controllers/groups/boards_controller_spec.rb b/spec/controllers/groups/boards_controller_spec.rb
index 6201cddecb0..ca4931bdc90 100644
--- a/spec/controllers/groups/boards_controller_spec.rb
+++ b/spec/controllers/groups/boards_controller_spec.rb
@@ -16,6 +16,15 @@ RSpec.describe Groups::BoardsController do
expect { list_boards }.to change(group.boards, :count).by(1)
end
+ it 'pushes swimlanes_buffered_rendering feature flag' do
+ allow(controller).to receive(:push_frontend_feature_flag).and_call_original
+
+ expect(controller).to receive(:push_frontend_feature_flag)
+ .with(:swimlanes_buffered_rendering, group, default_enabled: :yaml)
+
+ list_boards
+ end
+
context 'when format is HTML' do
it 'renders template' do
list_boards
@@ -98,6 +107,15 @@ RSpec.describe Groups::BoardsController do
describe 'GET show' do
let!(:board) { create(:board, group: group) }
+ it 'pushes swimlanes_buffered_rendering feature flag' do
+ allow(controller).to receive(:push_frontend_feature_flag).and_call_original
+
+ expect(controller).to receive(:push_frontend_feature_flag)
+ .with(:swimlanes_buffered_rendering, group, default_enabled: :yaml)
+
+ read_board board: board
+ end
+
context 'when format is HTML' do
it 'renders template' do
expect { read_board board: board }.to change(BoardGroupRecentVisit, :count).by(1)
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index 1334372a1f5..93c560b4753 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -641,21 +641,31 @@ RSpec.describe Groups::ClustersController do
describe 'GET show' do
let(:cluster) { create(:cluster, :provided_by_gcp, cluster_type: :group_type, groups: [group]) }
- def go
+ def go(tab: nil)
get :show,
params: {
group_id: group,
- id: cluster
+ id: cluster,
+ tab: tab
}
end
describe 'functionality' do
+ render_views
+
it 'renders view' do
go
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:cluster)).to eq(cluster)
end
+
+ it 'renders integration tab view', :aggregate_failures do
+ go(tab: 'integrations')
+
+ expect(response).to render_template('clusters/clusters/_integrations')
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
describe 'security' do
diff --git a/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb b/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
index 857e0570621..f67b2022219 100644
--- a/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_auth_controller_spec.rb
@@ -31,6 +31,7 @@ RSpec.describe Groups::DependencyProxyAuthController do
context 'with valid JWT' do
let_it_be(:user) { create(:user) }
+
let(:jwt) { build_jwt(user) }
let(:token_header) { "Bearer #{jwt.encoded}" }
@@ -65,6 +66,7 @@ RSpec.describe Groups::DependencyProxyAuthController do
context 'expired token' do
let_it_be(:user) { create(:user) }
+
let(:jwt) { build_jwt(user, expire_time: Time.zone.now - 1.hour) }
let(:token_header) { "Bearer #{jwt.encoded}" }
diff --git a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
index 83775dcdbdf..9f30a850ca2 100644
--- a/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
+++ b/spec/controllers/groups/dependency_proxy_for_containers_controller_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
include DependencyProxyHelpers
let_it_be(:user) { create(:user) }
+
let(:group) { create(:group) }
let(:token_response) { { status: :success, token: 'abcd1234' } }
let(:jwt) { build_jwt(user) }
@@ -102,6 +103,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
describe 'GET #manifest' do
let_it_be(:manifest) { create(:dependency_proxy_manifest) }
+
let(:pull_response) { { status: :success, manifest: manifest } }
before do
@@ -182,6 +184,7 @@ RSpec.describe Groups::DependencyProxyForContainersController do
describe 'GET #blob' do
let_it_be(:blob) { create(:dependency_proxy_blob) }
+
let(:blob_sha) { blob.file_name.sub('.gz', '') }
let(:blob_response) { { status: :success, blob: blob } }
diff --git a/spec/controllers/groups/group_links_controller_spec.rb b/spec/controllers/groups/group_links_controller_spec.rb
index a2f7161ca41..94d3c1ffa0f 100644
--- a/spec/controllers/groups/group_links_controller_spec.rb
+++ b/spec/controllers/groups/group_links_controller_spec.rb
@@ -9,16 +9,17 @@ RSpec.describe Groups::GroupLinksController do
let(:group_member) { create(:user) }
let!(:project) { create(:project, group: shared_group) }
- around do |example|
- travel_to DateTime.new(2019, 4, 1) { example.run }
- end
-
before do
+ travel_to DateTime.new(2019, 4, 1)
sign_in(user)
shared_with_group.add_developer(group_member)
end
+ after do
+ travel_back
+ end
+
shared_examples 'placeholder is passed as `id` parameter' do |action|
it 'returns a 404' do
post(
diff --git a/spec/controllers/groups/group_members_controller_spec.rb b/spec/controllers/groups/group_members_controller_spec.rb
index ff7a7f55863..19655687028 100644
--- a/spec/controllers/groups/group_members_controller_spec.rb
+++ b/spec/controllers/groups/group_members_controller_spec.rb
@@ -9,8 +9,12 @@ RSpec.describe Groups::GroupMembersController do
let(:group) { create(:group, :public) }
let(:membership) { create(:group_member, group: group) }
- around do |example|
- travel_to DateTime.new(2019, 4, 1) { example.run }
+ before do
+ travel_to DateTime.new(2019, 4, 1)
+ end
+
+ after do
+ travel_back
end
describe 'GET index' do
@@ -288,7 +292,9 @@ RSpec.describe Groups::GroupMembersController do
end
describe 'DELETE destroy' do
- let(:member) { create(:group_member, :developer, group: group) }
+ let(:sub_group) { create(:group, parent: group) }
+ let!(:member) { create(:group_member, :developer, group: group) }
+ let!(:sub_member) { create(:group_member, :developer, group: sub_group, user: member.user) }
before do
sign_in(user)
@@ -324,9 +330,19 @@ RSpec.describe Groups::GroupMembersController do
it '[HTML] removes user from members' do
delete :destroy, params: { group_id: group, id: member }
- expect(response).to set_flash.to 'User was successfully removed from group and any subresources.'
+ expect(response).to set_flash.to 'User was successfully removed from group.'
+ expect(response).to redirect_to(group_group_members_path(group))
+ expect(group.members).not_to include member
+ expect(sub_group.members).to include sub_member
+ end
+
+ it '[HTML] removes user from members including subgroups and projects' do
+ delete :destroy, params: { group_id: group, id: member, remove_sub_memberships: true }
+
+ expect(response).to set_flash.to 'User was successfully removed from group and any subgroups and projects.'
expect(response).to redirect_to(group_group_members_path(group))
expect(group.members).not_to include member
+ expect(sub_group.members).not_to include sub_member
end
it '[JS] removes user from members' do
diff --git a/spec/controllers/groups/labels_controller_spec.rb b/spec/controllers/groups/labels_controller_spec.rb
index b2320615778..90da40cd5f0 100644
--- a/spec/controllers/groups/labels_controller_spec.rb
+++ b/spec/controllers/groups/labels_controller_spec.rb
@@ -46,6 +46,24 @@ RSpec.describe Groups::LabelsController do
it_behaves_like 'disabled when using an external authorization service'
end
+
+ context 'with views rendered' do
+ render_views
+
+ before do
+ get :index, params: { group_id: group.to_param }
+ end
+
+ it 'avoids N+1 queries' do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { get :index, params: { group_id: group.to_param } }
+
+ create_list(:group_label, 3, group: group)
+
+ # some n+1 queries still exist
+ expect { get :index, params: { group_id: group.to_param } }.not_to exceed_all_query_limit(control.count).with_threshold(10)
+ expect(assigns(:labels).count).to eq(4)
+ end
+ end
end
describe 'POST #toggle_subscription' do
diff --git a/spec/controllers/groups/milestones_controller_spec.rb b/spec/controllers/groups/milestones_controller_spec.rb
index 05e93da18e7..a3c4c47ab15 100644
--- a/spec/controllers/groups/milestones_controller_spec.rb
+++ b/spec/controllers/groups/milestones_controller_spec.rb
@@ -322,7 +322,7 @@ RSpec.describe Groups::MilestonesController do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'old-path') }
it 'redirects to the canonical path' do
get :merge_requests, params: { group_id: redirect_route.path, id: title }
@@ -350,7 +350,7 @@ RSpec.describe Groups::MilestonesController do
end
context 'when the old group path is a substring of the scheme or host' do
- let(:redirect_route) { group.redirect_routes.create(path: 'http') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'http') }
it 'does not modify the requested host' do
get :merge_requests, params: { group_id: redirect_route.path, id: title }
@@ -362,7 +362,7 @@ RSpec.describe Groups::MilestonesController do
context 'when the old group path is substring of groups' do
# I.e. /groups/oups should not become /grfoo/oups
- let(:redirect_route) { group.redirect_routes.create(path: 'oups') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'oups') }
it 'does not modify the /groups part of the path' do
get :merge_requests, params: { group_id: redirect_route.path, id: title }
@@ -374,7 +374,7 @@ RSpec.describe Groups::MilestonesController do
context 'when the old group path is substring of groups plus the new path' do
# I.e. /groups/oups/oup should not become /grfoos
- let(:redirect_route) { group.redirect_routes.create(path: 'oups/oup') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'oups/oup') }
it 'does not modify the /groups part of the path' do
get :merge_requests, params: { group_id: redirect_route.path, id: title }
@@ -411,7 +411,7 @@ RSpec.describe Groups::MilestonesController do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'old-path') }
it 'returns not found' do
post :create,
diff --git a/spec/controllers/groups/registry/repositories_controller_spec.rb b/spec/controllers/groups/registry/repositories_controller_spec.rb
index 70125087f30..35c9a80266e 100644
--- a/spec/controllers/groups/registry/repositories_controller_spec.rb
+++ b/spec/controllers/groups/registry/repositories_controller_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Groups::Registry::RepositoriesController do
let_it_be(:user) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:group, reload: true) { create(:group) }
+
let(:additional_parameters) { {} }
subject do
diff --git a/spec/controllers/groups/runners_controller_spec.rb b/spec/controllers/groups/runners_controller_spec.rb
index 91ff0a53ec7..d6da9a4e8d0 100644
--- a/spec/controllers/groups/runners_controller_spec.rb
+++ b/spec/controllers/groups/runners_controller_spec.rb
@@ -220,7 +220,7 @@ RSpec.describe Groups::RunnersController do
end
it 'marks the runner as active, ticks the queue, and redirects' do
- runner.update(active: false)
+ runner.update!(active: false)
expect do
post :resume, params: params
@@ -231,7 +231,7 @@ RSpec.describe Groups::RunnersController do
end
it 'marks the project runner as active, ticks the queue, and redirects' do
- runner_project.update(active: false)
+ runner_project.update!(active: false)
expect do
post :resume, params: params_runner_project
@@ -248,7 +248,7 @@ RSpec.describe Groups::RunnersController do
end
it 'responds 404 and does not activate the runner' do
- runner.update(active: false)
+ runner.update!(active: false)
expect do
post :resume, params: params
@@ -259,7 +259,7 @@ RSpec.describe Groups::RunnersController do
end
it 'responds 404 and does not activate the project runner' do
- runner_project.update(active: false)
+ runner_project.update!(active: false)
expect do
post :resume, params: params_runner_project
@@ -278,7 +278,7 @@ RSpec.describe Groups::RunnersController do
end
it 'marks the runner as inactive, ticks the queue, and redirects' do
- runner.update(active: true)
+ runner.update!(active: true)
expect do
post :pause, params: params
@@ -289,7 +289,7 @@ RSpec.describe Groups::RunnersController do
end
it 'marks the project runner as inactive, ticks the queue, and redirects' do
- runner_project.update(active: true)
+ runner_project.update!(active: true)
expect do
post :pause, params: params_runner_project
@@ -306,7 +306,7 @@ RSpec.describe Groups::RunnersController do
end
it 'responds 404 and does not update the runner or queue' do
- runner.update(active: true)
+ runner.update!(active: true)
expect do
post :pause, params: params
@@ -317,7 +317,7 @@ RSpec.describe Groups::RunnersController do
end
it 'responds 404 and does not update the project runner or queue' do
- runner_project.update(active: true)
+ runner_project.update!(active: true)
expect do
post :pause, params: params
diff --git a/spec/controllers/groups/settings/applications_controller_spec.rb b/spec/controllers/groups/settings/applications_controller_spec.rb
new file mode 100644
index 00000000000..0804a5536e0
--- /dev/null
+++ b/spec/controllers/groups/settings/applications_controller_spec.rb
@@ -0,0 +1,219 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::Settings::ApplicationsController do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:application) { create(:oauth_application, owner_id: group.id, owner_type: 'Namespace') }
+
+ before do
+ sign_in(user)
+ end
+
+ describe 'GET #index' do
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'renders the application form' do
+ get :index, params: { group_id: group }
+
+ expect(response).to render_template :index
+ expect(assigns[:scopes]).to be_kind_of(Doorkeeper::OAuth::Scopes)
+ end
+ end
+
+ context 'when user is not owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'renders a 404' do
+ get :index, params: { group_id: group }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'GET #edit' do
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'renders the application form' do
+ get :edit, params: { group_id: group, id: application.id }
+
+ expect(response).to render_template :edit
+ expect(assigns[:scopes]).to be_kind_of(Doorkeeper::OAuth::Scopes)
+ end
+ end
+
+ context 'when user is not owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'renders a 404' do
+ get :edit, params: { group_id: group, id: application.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'POST #create' do
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'creates the application' do
+ create_params = attributes_for(:application, trusted: false, confidential: false, scopes: ['api'])
+
+ expect do
+ post :create, params: { group_id: group, doorkeeper_application: create_params }
+ end.to change { Doorkeeper::Application.count }.by(1)
+
+ application = Doorkeeper::Application.last
+
+ expect(response).to redirect_to(group_settings_application_path(group, application))
+ expect(application).to have_attributes(create_params.except(:uid, :owner_type))
+ end
+
+ it 'renders the application form on errors' do
+ expect do
+ post :create, params: { group_id: group, doorkeeper_application: attributes_for(:application).merge(redirect_uri: nil) }
+ end.not_to change { Doorkeeper::Application.count }
+
+ expect(response).to render_template :index
+ expect(assigns[:scopes]).to be_kind_of(Doorkeeper::OAuth::Scopes)
+ end
+
+ context 'when the params are for a confidential application' do
+ it 'creates a confidential application' do
+ create_params = attributes_for(:application, confidential: true, scopes: ['read_user'])
+
+ expect do
+ post :create, params: { group_id: group, doorkeeper_application: create_params }
+ end.to change { Doorkeeper::Application.count }.by(1)
+
+ application = Doorkeeper::Application.last
+
+ expect(response).to redirect_to(group_settings_application_path(group, application))
+ expect(application).to have_attributes(create_params.except(:uid, :owner_type))
+ end
+ end
+
+ context 'when scopes are not present' do
+ it 'renders the application form on errors' do
+ create_params = attributes_for(:application, trusted: true, confidential: false)
+
+ expect do
+ post :create, params: { group_id: group, doorkeeper_application: create_params }
+ end.not_to change { Doorkeeper::Application.count }
+
+ expect(response).to render_template :index
+ end
+ end
+ end
+
+ context 'when user is not owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'renders a 404' do
+ create_params = attributes_for(:application, trusted: true, confidential: false, scopes: ['api'])
+
+ post :create, params: { group_id: group, doorkeeper_application: create_params }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'PATCH #update' do
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'updates the application' do
+ doorkeeper_params = { redirect_uri: 'http://example.com/', trusted: true, confidential: false }
+
+ patch :update, params: { group_id: group, id: application.id, doorkeeper_application: doorkeeper_params }
+
+ application.reload
+
+ expect(response).to redirect_to(group_settings_application_path(group, application))
+ expect(application)
+ .to have_attributes(redirect_uri: 'http://example.com/', trusted: false, confidential: false)
+ end
+
+ it 'renders the application form on errors' do
+ patch :update, params: { group_id: group, id: application.id, doorkeeper_application: { redirect_uri: nil } }
+
+ expect(response).to render_template :edit
+ expect(assigns[:scopes]).to be_kind_of(Doorkeeper::OAuth::Scopes)
+ end
+
+ context 'when updating the application to be confidential' do
+ it 'successfully sets the application to confidential' do
+ doorkeeper_params = { confidential: true }
+
+ patch :update, params: { group_id: group, id: application.id, doorkeeper_application: doorkeeper_params }
+
+ application.reload
+
+ expect(response).to redirect_to(group_settings_application_path(group, application))
+ expect(application).to be_confidential
+ end
+ end
+ end
+
+ context 'when user is not owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'renders a 404' do
+ doorkeeper_params = { redirect_uri: 'http://example.com/', trusted: true, confidential: false }
+
+ patch :update, params: { group_id: group, id: application.id, doorkeeper_application: doorkeeper_params }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+
+ describe 'DELETE #destroy' do
+ context 'when user is owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'deletes the application' do
+ delete :destroy, params: { group_id: group, id: application.id }
+
+ expect(Doorkeeper::Application.exists?(application.id)).to be_falsy
+ expect(response).to redirect_to(group_settings_applications_url(group))
+ end
+ end
+
+ context 'when user is not owner' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'renders a 404' do
+ delete :destroy, params: { group_id: group, id: application.id }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+end
diff --git a/spec/controllers/groups/uploads_controller_spec.rb b/spec/controllers/groups/uploads_controller_spec.rb
index ea6a5ce8841..7dafb813545 100644
--- a/spec/controllers/groups/uploads_controller_spec.rb
+++ b/spec/controllers/groups/uploads_controller_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe Groups::UploadsController do
let!(:upload) { create(:upload, :issuable_upload, :with_file, model: model) }
let(:group) { model }
let(:old_path) { group.to_param + 'old' }
- let!(:redirect_route) { model.redirect_routes.create(path: old_path) }
+ let!(:redirect_route) { model.redirect_routes.create!(path: old_path) }
let(:upload_path) { File.basename(upload.path) }
it 'redirects to a file with the proper extension' do
diff --git a/spec/controllers/groups/variables_controller_spec.rb b/spec/controllers/groups/variables_controller_spec.rb
index a450a4afb02..8c0aa83b9c4 100644
--- a/spec/controllers/groups/variables_controller_spec.rb
+++ b/spec/controllers/groups/variables_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Groups::VariablesController do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
let_it_be(:variable) { create(:ci_group_variable, group: group) }
+
let(:access_level) { :owner }
before do
diff --git a/spec/controllers/groups_controller_spec.rb b/spec/controllers/groups_controller_spec.rb
index cce61c4534b..f47eac7ac25 100644
--- a/spec/controllers/groups_controller_spec.rb
+++ b/spec/controllers/groups_controller_spec.rb
@@ -312,6 +312,64 @@ RSpec.describe GroupsController, factory_default: :keep do
end
end
end
+
+ context 'when creating a group with captcha protection' do
+ before do
+ sign_in(user)
+
+ stub_application_setting(recaptcha_enabled: true)
+ end
+
+ after do
+ # Avoid test ordering issue and ensure `verify_recaptcha` returns true
+ unless Recaptcha.configuration.skip_verify_env.include?('test')
+ Recaptcha.configuration.skip_verify_env << 'test'
+ end
+ end
+
+ it 'displays an error when the reCAPTCHA is not solved' do
+ allow(controller).to receive(:verify_recaptcha).and_return(false)
+
+ post :create, params: { group: { name: 'new_group', path: "new_group" } }
+
+ expect(response).to render_template(:new)
+ expect(flash[:alert]).to eq(_('There was an error with the reCAPTCHA. Please solve the reCAPTCHA again.'))
+ end
+
+ it 'allows creating a group when the reCAPTCHA is solved' do
+ expect do
+ post :create, params: { group: { name: 'new_group', path: "new_group" } }
+ end.to change { Group.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+
+ it 'allows creating a sub-group without checking the captcha' do
+ expect(controller).not_to receive(:verify_recaptcha)
+
+ expect do
+ post :create, params: { group: { name: 'new_group', path: "new_group", parent_id: group.id } }
+ end.to change { Group.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+
+ context 'with feature flag switched off' do
+ before do
+ stub_feature_flags(recaptcha_on_top_level_group_creation: false)
+ end
+
+ it 'allows creating a group without the reCAPTCHA' do
+ expect(controller).not_to receive(:verify_recaptcha)
+
+ expect do
+ post :create, params: { group: { name: 'new_group', path: "new_group" } }
+ end.to change { Group.count }.by(1)
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+ end
end
describe 'GET #index' do
@@ -556,6 +614,43 @@ RSpec.describe GroupsController, factory_default: :keep do
end
end
+ context "updating :resource_access_token_creation_allowed" do
+ subject do
+ put :update,
+ params: {
+ id: group.to_param,
+ group: { resource_access_token_creation_allowed: false }
+ }
+ end
+
+ context 'when user is a group owner' do
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ it "updates the attribute" do
+ expect { subject }
+ .to change { group.namespace_settings.reload.resource_access_token_creation_allowed }
+ .from(true)
+ .to(false)
+
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
+ context 'when not a group owner' do
+ before do
+ group.add_developer(user)
+ sign_in(user)
+ end
+
+ it "does not update the attribute" do
+ expect { subject }.not_to change { group.namespace_settings.reload.resource_access_token_creation_allowed }
+ end
+ end
+ end
+
describe '#ensure_canonical_path' do
before do
sign_in(user)
@@ -578,7 +673,7 @@ RSpec.describe GroupsController, factory_default: :keep do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'old-path') }
let(:group_full_path) { redirect_route.path }
it 'redirects to the canonical path' do
@@ -587,7 +682,7 @@ RSpec.describe GroupsController, factory_default: :keep do
end
context 'when the old group path is a substring of the scheme or host' do
- let(:redirect_route) { group.redirect_routes.create(path: 'http') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'http') }
it 'does not modify the requested host' do
expect(response).to redirect_to(group)
@@ -597,7 +692,7 @@ RSpec.describe GroupsController, factory_default: :keep do
context 'when the old group path is substring of groups' do
# I.e. /groups/oups should not become /grfoo/oups
- let(:redirect_route) { group.redirect_routes.create(path: 'oups') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'oups') }
it 'does not modify the /groups part of the path' do
expect(response).to redirect_to(group)
@@ -649,7 +744,7 @@ RSpec.describe GroupsController, factory_default: :keep do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'old-path') }
it 'redirects to the canonical path' do
get :issues, params: { id: redirect_route.path }
@@ -659,7 +754,7 @@ RSpec.describe GroupsController, factory_default: :keep do
end
context 'when the old group path is a substring of the scheme or host' do
- let(:redirect_route) { group.redirect_routes.create(path: 'http') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'http') }
it 'does not modify the requested host' do
get :issues, params: { id: redirect_route.path }
@@ -671,7 +766,7 @@ RSpec.describe GroupsController, factory_default: :keep do
context 'when the old group path is substring of groups' do
# I.e. /groups/oups should not become /grfoo/oups
- let(:redirect_route) { group.redirect_routes.create(path: 'oups') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'oups') }
it 'does not modify the /groups part of the path' do
get :issues, params: { id: redirect_route.path }
@@ -683,7 +778,7 @@ RSpec.describe GroupsController, factory_default: :keep do
context 'when the old group path is substring of groups plus the new path' do
# I.e. /groups/oups/oup should not become /grfoos
- let(:redirect_route) { group.redirect_routes.create(path: 'oups/oup') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'oups/oup') }
it 'does not modify the /groups part of the path' do
get :issues, params: { id: redirect_route.path }
@@ -711,7 +806,7 @@ RSpec.describe GroupsController, factory_default: :keep do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'old-path') }
it 'returns not found' do
post :update, params: { id: redirect_route.path, group: { path: 'new_path' } }
@@ -737,7 +832,7 @@ RSpec.describe GroupsController, factory_default: :keep do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { group.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { group.redirect_routes.create!(path: 'old-path') }
it 'returns not found' do
delete :destroy, params: { id: redirect_route.path }
diff --git a/spec/controllers/invites_controller_spec.rb b/spec/controllers/invites_controller_spec.rb
index a8d38d12f23..5195f482084 100644
--- a/spec/controllers/invites_controller_spec.rb
+++ b/spec/controllers/invites_controller_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe InvitesController do
let_it_be(:user) { create(:user) }
- let(:member) { create(:project_member, :invited, invite_email: user.email) }
+ let_it_be(:member, reload: true) { create(:project_member, :invited, invite_email: user.email) }
let(:raw_invite_token) { member.raw_invite_token }
let(:project_members) { member.source.users }
let(:md5_member_global_id) { Digest::MD5.hexdigest(member.to_global_id.to_s) }
@@ -77,10 +77,83 @@ RSpec.describe InvitesController do
context 'when not logged in' do
context 'when inviter is a member' do
- it 'is redirected to a new session with invite email param' do
- request
+ context 'when instance allows sign up' do
+ it 'indicates an account can be created in notice' do
+ request
+
+ expect(flash[:notice]).to include('or create an account')
+ end
+
+ context 'when user exists with the invited email' do
+ it 'is redirected to a new session with invite email param' do
+ request
+
+ expect(response).to redirect_to(new_user_session_path(invite_email: member.invite_email))
+ end
+ end
+
+ context 'when user exists with the invited email as secondary email' do
+ before do
+ secondary_email = create(:email, user: user, email: 'foo@example.com')
+ member.update!(invite_email: secondary_email.email)
+ end
+
+ it 'is redirected to a new session with invite email param' do
+ request
+
+ expect(response).to redirect_to(new_user_session_path(invite_email: member.invite_email))
+ end
+ end
+
+ context 'when user does not exist with the invited email' do
+ before do
+ member.update!(invite_email: 'bogus_email@example.com')
+ end
+
+ it 'indicates an account can be created in notice' do
+ request
+
+ expect(flash[:notice]).to include('create an account or sign in')
+ end
+
+ it 'is redirected to a new registration with invite email param' do
+ request
+
+ expect(response).to redirect_to(new_user_registration_path(invite_email: member.invite_email))
+ end
+ end
+ end
+
+ context 'when instance does not allow sign up' do
+ before do
+ stub_application_setting(allow_signup?: false)
+ end
+
+ it 'does not indicate an account can be created in notice' do
+ request
+
+ expect(flash[:notice]).not_to include('or create an account')
+ end
+
+ context 'when user exists with the invited email' do
+ it 'is redirected to a new session with invite email param' do
+ request
+
+ expect(response).to redirect_to(new_user_session_path(invite_email: member.invite_email))
+ end
+ end
+
+ context 'when user does not exist with the invited email' do
+ before do
+ member.update!(invite_email: 'bogus_email@example.com')
+ end
+
+ it 'is redirected to a new session with invite email param' do
+ request
- expect(response).to redirect_to(new_user_session_path(invite_email: member.invite_email))
+ expect(response).to redirect_to(new_user_session_path(invite_email: member.invite_email))
+ end
+ end
end
end
diff --git a/spec/controllers/oauth/authorizations_controller_spec.rb b/spec/controllers/oauth/authorizations_controller_spec.rb
index 2df94a06b3e..21124299b25 100644
--- a/spec/controllers/oauth/authorizations_controller_spec.rb
+++ b/spec/controllers/oauth/authorizations_controller_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Oauth::AuthorizationsController do
shared_examples "Implicit grant can't be used in confidential application" do
context 'when application is confidential' do
before do
- application.update(confidential: true)
+ application.update!(confidential: true)
params[:response_type] = 'token'
end
@@ -96,7 +96,7 @@ RSpec.describe Oauth::AuthorizationsController do
end
it 'deletes session.user_return_to and redirects when skip authorization' do
- application.update(trusted: true)
+ application.update!(trusted: true)
request.session['user_return_to'] = 'http://example.com'
subject
diff --git a/spec/controllers/omniauth_callbacks_controller_spec.rb b/spec/controllers/omniauth_callbacks_controller_spec.rb
index edd587389cb..4a47a4a2a53 100644
--- a/spec/controllers/omniauth_callbacks_controller_spec.rb
+++ b/spec/controllers/omniauth_callbacks_controller_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller do
let(:extern_uid) { 'my-uid' }
before do
- user.update(failed_attempts: User.maximum_attempts.pred)
+ user.update!(failed_attempts: User.maximum_attempts.pred)
subject.response = ActionDispatch::Response.new
end
@@ -233,7 +233,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
settings = Gitlab::CurrentSettings.current_application_settings
- settings.update(disabled_oauth_sign_in_sources: [provider.to_s])
+ settings.update!(disabled_oauth_sign_in_sources: [provider.to_s])
end
it 'prevents login via POST' do
@@ -299,7 +299,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller do
before do
stub_omniauth_setting(enabled: true, auto_link_user: true, allow_single_sign_on: ['atlassian_oauth2'])
- user.destroy
+ user.destroy!
end
it 'denies sign-in if sign-up is enabled, but block_auto_created_users is set' do
@@ -381,7 +381,7 @@ RSpec.describe OmniauthCallbacksController, type: :controller do
context 'sign up' do
before do
- user.destroy
+ user.destroy!
end
it 'denies login if sign up is enabled, but block_auto_created_users is set' do
diff --git a/spec/controllers/profiles/notifications_controller_spec.rb b/spec/controllers/profiles/notifications_controller_spec.rb
index 03749366703..1ebf4363ba6 100644
--- a/spec/controllers/profiles/notifications_controller_spec.rb
+++ b/spec/controllers/profiles/notifications_controller_spec.rb
@@ -5,8 +5,8 @@ require 'spec_helper'
RSpec.describe Profiles::NotificationsController do
let(:user) do
create(:user) do |user|
- user.emails.create(email: 'original@example.com', confirmed_at: Time.current)
- user.emails.create(email: 'new@example.com', confirmed_at: Time.current)
+ user.emails.create!(email: 'original@example.com', confirmed_at: Time.current)
+ user.emails.create!(email: 'new@example.com', confirmed_at: Time.current)
user.notification_email = 'original@example.com'
user.save!
end
@@ -21,6 +21,30 @@ RSpec.describe Profiles::NotificationsController do
expect(response).to render_template :show
end
+ context 'when personal projects are present', :request_store do
+ let!(:personal_project_1) { create(:project, namespace: user.namespace) }
+
+ context 'N+1 query check' do
+ render_views
+
+ it 'does not have an N+1' do
+ sign_in(user)
+
+ get :show
+
+ control = ActiveRecord::QueryRecorder.new do
+ get :show
+ end
+
+ create_list(:project, 2, namespace: user.namespace)
+
+ expect do
+ get :show
+ end.not_to exceed_query_limit(control)
+ end
+ end
+ end
+
context 'with groups that do not have notification preferences' do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
@@ -37,18 +61,24 @@ RSpec.describe Profiles::NotificationsController do
expect(assigns(:group_notifications).map(&:source_id)).to include(subgroup.id)
end
- it 'does not have an N+1' do
- sign_in(user)
+ context 'N+1 query check' do
+ render_views
+
+ it 'does not have an N+1' do
+ sign_in(user)
- control = ActiveRecord::QueryRecorder.new do
get :show
- end
- create_list(:group, 2, parent: group)
+ control = ActiveRecord::QueryRecorder.new do
+ get :show
+ end
- expect do
- get :show
- end.not_to exceed_query_limit(control)
+ create_list(:group, 2, parent: group)
+
+ expect do
+ get :show
+ end.not_to exceed_query_limit(control)
+ end
end
end
diff --git a/spec/controllers/projects/alerting/notifications_controller_spec.rb b/spec/controllers/projects/alerting/notifications_controller_spec.rb
index 3656cfbcc30..fe0c4ce00bf 100644
--- a/spec/controllers/projects/alerting/notifications_controller_spec.rb
+++ b/spec/controllers/projects/alerting/notifications_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::Alerting::NotificationsController do
let_it_be(:project) { create(:project) }
let_it_be(:environment) { create(:environment, project: project) }
+
let(:params) { project_params }
describe 'POST #create' do
@@ -68,6 +69,7 @@ RSpec.describe Projects::Alerting::NotificationsController do
context 'with a corresponding integration' do
context 'with integration parameters specified' do
let_it_be_with_reload(:integration) { create(:alert_management_http_integration, project: project) }
+
let(:params) { project_params(endpoint_identifier: integration.endpoint_identifier, name: integration.name) }
context 'the integration is active' do
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 69ab9873b90..754b0ddfb94 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -448,7 +448,7 @@ RSpec.describe Projects::ArtifactsController do
context 'with regular branch' do
before do
- pipeline.update(ref: 'master',
+ pipeline.update!(ref: 'master',
sha: project.commit('master').sha)
get :latest_succeeded, params: params_from_ref('master')
@@ -459,7 +459,7 @@ RSpec.describe Projects::ArtifactsController do
context 'with branch name containing slash' do
before do
- pipeline.update(ref: 'improve/awesome',
+ pipeline.update!(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
get :latest_succeeded, params: params_from_ref('improve/awesome')
@@ -470,7 +470,7 @@ RSpec.describe Projects::ArtifactsController do
context 'with branch name and path containing slashes' do
before do
- pipeline.update(ref: 'improve/awesome',
+ pipeline.update!(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
get :latest_succeeded, params: params_from_ref('improve/awesome', job.name, 'file/README.md')
diff --git a/spec/controllers/projects/boards_controller_spec.rb b/spec/controllers/projects/boards_controller_spec.rb
index cde3a8d4761..48a12a27911 100644
--- a/spec/controllers/projects/boards_controller_spec.rb
+++ b/spec/controllers/projects/boards_controller_spec.rb
@@ -22,6 +22,15 @@ RSpec.describe Projects::BoardsController do
expect(assigns(:boards_endpoint)).to eq project_boards_path(project)
end
+ it 'pushes swimlanes_buffered_rendering feature flag' do
+ allow(controller).to receive(:push_frontend_feature_flag).and_call_original
+
+ expect(controller).to receive(:push_frontend_feature_flag)
+ .with(:swimlanes_buffered_rendering, project, default_enabled: :yaml)
+
+ list_boards
+ end
+
context 'when format is HTML' do
it 'renders template' do
list_boards
@@ -116,6 +125,15 @@ RSpec.describe Projects::BoardsController do
describe 'GET show' do
let!(:board) { create(:board, project: project) }
+ it 'pushes swimlanes_buffered_rendering feature flag' do
+ allow(controller).to receive(:push_frontend_feature_flag).and_call_original
+
+ expect(controller).to receive(:push_frontend_feature_flag)
+ .with(:swimlanes_buffered_rendering, project, default_enabled: :yaml)
+
+ read_board board: board
+ end
+
it 'sets boards_endpoint instance variable to a boards path' do
read_board board: board
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index dd3440f7660..2a8feb09780 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -674,22 +674,32 @@ RSpec.describe Projects::ClustersController do
describe 'GET show' do
let(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
- def go
+ def go(tab: nil)
get :show,
params: {
namespace_id: project.namespace,
project_id: project,
- id: cluster
+ id: cluster,
+ tab: tab
}
end
describe 'functionality' do
+ render_views
+
it "renders view" do
go
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:cluster)).to eq(cluster)
end
+
+ it 'renders integration tab view' do
+ go(tab: 'integrations')
+
+ expect(response).to render_template('clusters/clusters/_integrations')
+ expect(response).to have_gitlab_http_status(:ok)
+ end
end
describe 'security' do
diff --git a/spec/controllers/projects/commit_controller_spec.rb b/spec/controllers/projects/commit_controller_spec.rb
index 2d7f036be21..a231b54419e 100644
--- a/spec/controllers/projects/commit_controller_spec.rb
+++ b/spec/controllers/projects/commit_controller_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Projects::CommitController do
+ include ProjectForksHelper
+
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -295,6 +297,102 @@ RSpec.describe Projects::CommitController do
expect(flash[:alert]).to match('Sorry, we cannot cherry-pick this commit automatically.')
end
end
+
+ context 'when a project has a fork' do
+ let(:project) { create(:project, :repository) }
+ let(:forked_project) { fork_project(project, user, namespace: user.namespace, repository: true) }
+ let(:target_project) { project }
+ let(:create_merge_request) { nil }
+
+ def send_request
+ post(:cherry_pick,
+ params: {
+ namespace_id: forked_project.namespace,
+ project_id: forked_project,
+ target_project_id: target_project.id,
+ start_branch: 'feature',
+ id: forked_project.commit.id,
+ create_merge_request: create_merge_request
+ })
+ end
+
+ def merge_request_url(source_project, branch)
+ project_new_merge_request_path(
+ source_project,
+ merge_request: {
+ source_project_id: source_project.id,
+ target_project_id: project.id,
+ source_branch: branch,
+ target_branch: 'feature'
+ }
+ )
+ end
+
+ before do
+ forked_project.add_maintainer(user)
+ end
+
+ it 'successfully cherry picks a commit from fork to upstream project' do
+ send_request
+
+ expect(response).to redirect_to project_commits_path(project, 'feature')
+ expect(flash[:notice]).to eq('The commit has been successfully cherry-picked into feature.')
+ expect(project.commit('feature').message).to include(forked_project.commit.id)
+ end
+
+ context 'when the cherry pick is performed via merge request' do
+ let(:create_merge_request) { true }
+
+ it 'successfully cherry picks a commit from fork to a cherry pick branch' do
+ branch = forked_project.commit.cherry_pick_branch_name
+ send_request
+
+ expect(response).to redirect_to merge_request_url(project, branch)
+ expect(flash[:notice]).to start_with("The commit has been successfully cherry-picked into #{branch}")
+ expect(project.commit(branch).message).to include(forked_project.commit.id)
+ end
+ end
+
+ context 'when a user cannot push to upstream project' do
+ let(:create_merge_request) { true }
+
+ before do
+ project.add_reporter(user)
+ end
+
+ it 'cherry picks a commit to the fork' do
+ branch = forked_project.commit.cherry_pick_branch_name
+ send_request
+
+ expect(response).to redirect_to merge_request_url(forked_project, branch)
+ expect(flash[:notice]).to start_with("The commit has been successfully cherry-picked into #{branch}")
+ expect(project.commit('feature').message).not_to include(forked_project.commit.id)
+ expect(forked_project.commit(branch).message).to include(forked_project.commit.id)
+ end
+ end
+
+ context 'when a user do not have access to the target project' do
+ let(:target_project) { create(:project, :private) }
+
+ it 'cherry picks a commit to the fork' do
+ send_request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'disable pick_into_project feature flag' do
+ before do
+ stub_feature_flags(pick_into_project: false)
+ end
+
+ it 'does not cherry pick a commit from fork to upstream' do
+ send_request
+
+ expect(project.commit('feature').message).not_to include(forked_project.commit.id)
+ end
+ end
+ end
end
describe 'GET diff_for_path' do
diff --git a/spec/controllers/projects/cycle_analytics/events_controller_spec.rb b/spec/controllers/projects/cycle_analytics/events_controller_spec.rb
index f940da7ea35..6bbdda89b14 100644
--- a/spec/controllers/projects/cycle_analytics/events_controller_spec.rb
+++ b/spec/controllers/projects/cycle_analytics/events_controller_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe Projects::CycleAnalytics::EventsController do
let(:issue) { create(:issue, project: project, created_at: 9.days.ago) }
before do
- issue.update(milestone: milestone)
+ issue.update!(milestone: milestone)
end
it 'is not empty' do
diff --git a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
index e0f86876f67..c78b838d0df 100644
--- a/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
+++ b/spec/controllers/projects/design_management/designs/raw_images_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Projects::DesignManagement::Designs::RawImagesController do
let_it_be(:project) { create(:project, :private) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:viewer) { issue.author }
+
let(:design_id) { design.id }
let(:sha) { design.versions.first.sha }
let(:filename) { design.filename }
@@ -44,17 +45,6 @@ RSpec.describe Projects::DesignManagement::Designs::RawImagesController do
expect(response).to have_gitlab_http_status(:ok)
end
- context 'when the feature flag attachment_with_filename is disabled' do
- it 'serves files with just `attachment` in the disposition header' do
- stub_feature_flags(attachment_with_filename: false)
-
- subject
-
- expect(response.header['Content-Disposition']).to eq('attachment')
- expect(response).to have_gitlab_http_status(:ok)
- end
- end
-
it 'serves files with Workhorse' do
subject
diff --git a/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb b/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
index 96ecbaf55b6..56c0ef592ca 100644
--- a/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
+++ b/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Projects::DesignManagement::Designs::ResizedImageController do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:viewer) { issue.author }
let_it_be(:size) { :v432x230 }
+
let(:design) { create(:design, :with_smaller_image_versions, issue: issue, versions_count: 2) }
let(:design_id) { design.id }
let(:sha) { design.versions.first.sha }
diff --git a/spec/controllers/projects/discussions_controller_spec.rb b/spec/controllers/projects/discussions_controller_spec.rb
index 8a793e29bfa..0c8677ea4b9 100644
--- a/spec/controllers/projects/discussions_controller_spec.rb
+++ b/spec/controllers/projects/discussions_controller_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Projects::DiscussionsController do
context "when the discussion is not resolvable" do
before do
- note.update(system: true)
+ note.update!(system: true)
end
it "returns status 404" do
@@ -168,7 +168,7 @@ RSpec.describe Projects::DiscussionsController do
context "when the discussion is not resolvable" do
before do
- note.update(system: true)
+ note.update!(system: true)
end
it "returns status 404" do
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 83ad36b217f..4cb90edb742 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Projects::EnvironmentsController do
let_it_be(:project) { create(:project) }
let_it_be(:maintainer) { create(:user, name: 'main-dos').tap { |u| project.add_maintainer(u) } }
let_it_be(:reporter) { create(:user, name: 'repo-dos').tap { |u| project.add_reporter(u) } }
+
let(:user) { maintainer }
let!(:environment) { create(:environment, name: 'production', project: project) }
diff --git a/spec/controllers/projects/feature_flags_controller_spec.rb b/spec/controllers/projects/feature_flags_controller_spec.rb
index f69cc0ddfd8..cd7d1ea0e8a 100644
--- a/spec/controllers/projects/feature_flags_controller_spec.rb
+++ b/spec/controllers/projects/feature_flags_controller_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Projects::FeatureFlagsController do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+
let(:user) { developer }
before_all do
diff --git a/spec/controllers/projects/forks_controller_spec.rb b/spec/controllers/projects/forks_controller_spec.rb
index 7da3d403b53..8ca3009e0c7 100644
--- a/spec/controllers/projects/forks_controller_spec.rb
+++ b/spec/controllers/projects/forks_controller_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Projects::ForksController do
context 'when fork is internal' do
before do
- forked_project.update(visibility_level: Project::INTERNAL, group: group)
+ forked_project.update!(visibility_level: Project::INTERNAL, group: group)
end
it 'forks counts are correct' do
@@ -86,7 +86,7 @@ RSpec.describe Projects::ForksController do
context 'when fork is private' do
before do
- forked_project.update(visibility_level: Project::PRIVATE, group: group)
+ forked_project.update!(visibility_level: Project::PRIVATE, group: group)
end
shared_examples 'forks counts' do
@@ -153,8 +153,11 @@ RSpec.describe Projects::ForksController do
end
describe 'GET new' do
- subject do
+ let(:format) { :html }
+
+ subject(:do_request) do
get :new,
+ format: format,
params: {
namespace_id: project.namespace,
project_id: project
@@ -166,24 +169,32 @@ RSpec.describe Projects::ForksController do
sign_in(user)
end
- context 'when JSON requested' do
- it 'responds with available groups' do
- get :new,
- format: :json,
- params: {
- namespace_id: project.namespace,
- project_id: project
- }
+ it 'responds with status 200' do
+ request
- expect(json_response['namespaces'].length).to eq(1)
- expect(json_response['namespaces'].first['id']).to eq(group.id)
- end
+ expect(response).to have_gitlab_http_status(:ok)
end
- it 'responds with status 200' do
- subject
+ context 'when JSON is requested' do
+ let(:format) { :json }
- expect(response).to have_gitlab_http_status(:ok)
+ it 'responds with user namespace + groups' do
+ do_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['namespaces'].length).to eq(2)
+ expect(json_response['namespaces'][0]['id']).to eq(user.namespace.id)
+ expect(json_response['namespaces'][1]['id']).to eq(group.id)
+ end
+
+ it 'responds with group only when fork_project_form feature flag is disabled' do
+ stub_feature_flags(fork_project_form: false)
+ do_request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['namespaces'].length).to eq(1)
+ expect(json_response['namespaces'][0]['id']).to eq(group.id)
+ end
end
end
diff --git a/spec/controllers/projects/group_links_controller_spec.rb b/spec/controllers/projects/group_links_controller_spec.rb
index 084a807e162..d514c486f60 100644
--- a/spec/controllers/projects/group_links_controller_spec.rb
+++ b/spec/controllers/projects/group_links_controller_spec.rb
@@ -8,15 +8,16 @@ RSpec.describe Projects::GroupLinksController do
let_it_be(:project) { create(:project, :private, group: group2) }
let_it_be(:user) { create(:user) }
- around do |example|
- travel_to DateTime.new(2019, 4, 1) { example.run }
- end
-
before do
+ travel_to DateTime.new(2019, 4, 1)
project.add_maintainer(user)
sign_in(user)
end
+ after do
+ travel_back
+ end
+
describe '#create' do
shared_context 'link project to group' do
before do
@@ -31,7 +32,7 @@ RSpec.describe Projects::GroupLinksController do
context 'when project is not allowed to be shared with a group' do
before do
- group.update(share_with_group_lock: false)
+ group.update!(share_with_group_lock: false)
end
include_context 'link project to group'
diff --git a/spec/controllers/projects/imports_controller_spec.rb b/spec/controllers/projects/imports_controller_spec.rb
index 5e09a50aa36..65a80b9e8ec 100644
--- a/spec/controllers/projects/imports_controller_spec.rb
+++ b/spec/controllers/projects/imports_controller_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe Projects::ImportsController do
context 'when import is in progress' do
before do
- import_state.update(status: :started)
+ import_state.update!(status: :started)
end
it 'renders template' do
@@ -65,7 +65,7 @@ RSpec.describe Projects::ImportsController do
context 'when import failed' do
before do
- import_state.update(status: :failed)
+ import_state.update!(status: :failed)
end
it 'redirects to new_namespace_project_import_path' do
@@ -77,7 +77,7 @@ RSpec.describe Projects::ImportsController do
context 'when import finished' do
before do
- import_state.update(status: :finished)
+ import_state.update!(status: :finished)
end
context 'when project is a fork' do
@@ -126,7 +126,7 @@ RSpec.describe Projects::ImportsController do
context 'when import never happened' do
before do
- import_state.update(status: :none)
+ import_state.update!(status: :none)
end
it 'redirects to namespace_project_path' do
diff --git a/spec/controllers/projects/incidents_controller_spec.rb b/spec/controllers/projects/incidents_controller_spec.rb
index ddd15b9b1dd..460821634b0 100644
--- a/spec/controllers/projects/incidents_controller_spec.rb
+++ b/spec/controllers/projects/incidents_controller_spec.rb
@@ -69,6 +69,7 @@ RSpec.describe Projects::IncidentsController do
end
let_it_be(:resource) { create(:incident, project: project) }
+
let(:user) { developer }
it 'renders incident page' do
diff --git a/spec/controllers/projects/issues_controller_spec.rb b/spec/controllers/projects/issues_controller_spec.rb
index 74062038248..3e016a5e8d2 100644
--- a/spec/controllers/projects/issues_controller_spec.rb
+++ b/spec/controllers/projects/issues_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Projects::IssuesController do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:user, reload: true) { create(:user) }
+
let(:issue) { create(:issue, project: project) }
let(:spam_action_response_fields) { { 'stub_spam_action_response_fields' => true } }
@@ -44,7 +45,7 @@ RSpec.describe Projects::IssuesController do
let_it_be(:issue) { create(:issue, project: new_project) }
before do
- project.route.destroy
+ project.route.destroy!
new_project.redirect_routes.create!(path: project.full_path)
new_project.add_developer(user)
end
@@ -63,23 +64,6 @@ RSpec.describe Projects::IssuesController do
expect(response).to have_gitlab_http_status(:moved_permanently)
end
end
-
- describe 'the null hypothesis experiment', :experiment do
- before do
- stub_experiments(null_hypothesis: :candidate)
- end
-
- it 'defines the expected before actions' do
- expect(controller).to use_before_action(:run_null_hypothesis_experiment)
- end
-
- it 'assigns the candidate experience and tracks the event' do
- expect(experiment(:null_hypothesis)).to track('index').on_any_instance.for(:candidate)
- .with_context(project: project)
-
- get :index, params: { namespace_id: project.namespace, project_id: project }
- end
- end
end
context 'internal issue tracker' do
@@ -209,6 +193,32 @@ RSpec.describe Projects::IssuesController do
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['issue_email_participants']).to contain_exactly({ "email" => participants[0].email }, { "email" => participants[1].email })
end
+
+ context 'with the invite_members_in_comment experiment', :experiment do
+ context 'when user can invite' do
+ before do
+ stub_experiments(invite_members_in_comment: :invite_member_link)
+ project.add_maintainer(user)
+ end
+
+ it 'assigns the candidate experience and tracks the event' do
+ expect(experiment(:invite_members_in_comment)).to track(:view, property: project.root_ancestor.id.to_s)
+ .for(:invite_member_link)
+ .with_context(namespace: project.root_ancestor)
+ .on_next_instance
+
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
+ end
+ end
+
+ context 'when user can not invite' do
+ it 'does not track the event' do
+ expect(experiment(:invite_members_in_comment)).not_to track(:view)
+
+ get :show, params: { namespace_id: project.namespace, project_id: project, id: issue.iid }
+ end
+ end
+ end
end
describe 'GET #new' do
@@ -342,6 +352,7 @@ RSpec.describe Projects::IssuesController do
end
let_it_be(:issue) { create(:issue, project: project) }
+
let(:developer) { user }
let(:params) do
{
@@ -685,7 +696,7 @@ RSpec.describe Projects::IssuesController do
issue.update!(last_edited_by: deleted_user, last_edited_at: Time.current)
- deleted_user.destroy
+ deleted_user.destroy!
sign_in(user)
end
@@ -1038,10 +1049,10 @@ RSpec.describe Projects::IssuesController do
labels = create_list(:label, 10, project: project).map(&:to_reference)
issue = create(:issue, project: project, description: 'Test issue')
- control_count = ActiveRecord::QueryRecorder.new { issue.update(description: [issue.description, label].join(' ')) }.count
+ control_count = ActiveRecord::QueryRecorder.new { issue.update!(description: [issue.description, label].join(' ')) }.count
# Follow-up to get rid of this `2 * label.count` requirement: https://gitlab.com/gitlab-org/gitlab-foss/issues/52230
- expect { issue.update(description: [issue.description, labels].join(' ')) }
+ expect { issue.update!(description: [issue.description, labels].join(' ')) }
.not_to exceed_query_limit(control_count + 2 * labels.count)
end
@@ -1158,6 +1169,7 @@ RSpec.describe Projects::IssuesController do
context 'resolving discussions in MergeRequest' do
let_it_be(:discussion) { create(:diff_note_on_merge_request).to_discussion }
+
let(:merge_request) { discussion.noteable }
let(:project) { merge_request.source_project }
@@ -1420,9 +1432,7 @@ RSpec.describe Projects::IssuesController do
expect_next_instance_of(Spam::AkismetService) do |akismet_service|
expect(akismet_service).to receive_messages(submit_spam: true)
end
- expect_next_instance_of(ApplicationSetting) do |setting|
- expect(setting).to receive_messages(akismet_enabled: true)
- end
+ stub_application_setting(akismet_enabled: true)
end
def post_spam
@@ -1490,12 +1500,6 @@ RSpec.describe Projects::IssuesController do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response).to eq({ 'errors' => 'Destroy confirmation not provided for issue' })
end
-
- it 'delegates the update of the todos count cache to TodoService' do
- expect_any_instance_of(TodoService).to receive(:destroy_target).with(issue).once
-
- delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: issue.iid, destroy_confirm: true }
- end
end
end
@@ -1623,6 +1627,7 @@ RSpec.describe Projects::IssuesController do
describe 'POST #import_csv' do
let_it_be(:project) { create(:project, :public) }
+
let(:file) { fixture_file_upload('spec/fixtures/csv_comma.csv') }
context 'unauthorized' do
@@ -1822,6 +1827,7 @@ RSpec.describe Projects::IssuesController do
context 'with cross-reference system note', :request_store do
let_it_be(:new_issue) { create(:issue) }
+
let(:cross_reference) { "mentioned in #{new_issue.to_reference(issue.project)}" }
before do
@@ -1899,7 +1905,7 @@ RSpec.describe Projects::IssuesController do
before do
sign_in(user)
- project.route.destroy
+ project.route.destroy!
new_project.redirect_routes.create!(path: project.full_path)
new_project.add_developer(user)
end
diff --git a/spec/controllers/projects/jobs_controller_spec.rb b/spec/controllers/projects/jobs_controller_spec.rb
index 80e1268cb01..a7a36d3a074 100644
--- a/spec/controllers/projects/jobs_controller_spec.rb
+++ b/spec/controllers/projects/jobs_controller_spec.rb
@@ -1275,6 +1275,7 @@ RSpec.describe Projects::JobsController, :clean_gitlab_redis_shared_state do
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:project) { create(:project, :private, :repository, namespace: owner.namespace) }
+
let(:user) { maintainer }
let(:pipeline) { create(:ci_pipeline, project: project, source: :webide, config_source: :webide_source, user: user) }
let(:job) { create(:ci_build, :running, :with_runner_session, pipeline: pipeline, user: user) }
diff --git a/spec/controllers/projects/labels_controller_spec.rb b/spec/controllers/projects/labels_controller_spec.rb
index f452c22a5ca..081927ea73c 100644
--- a/spec/controllers/projects/labels_controller_spec.rb
+++ b/spec/controllers/projects/labels_controller_spec.rb
@@ -65,7 +65,7 @@ RSpec.describe Projects::LabelsController do
end
it 'does not include group labels when project does not belong to a group' do
- project.update(namespace: create(:namespace))
+ project.update!(namespace: create(:namespace))
list_labels
@@ -93,6 +93,26 @@ RSpec.describe Projects::LabelsController do
end
end
+ context 'with views rendered' do
+ render_views
+
+ before do
+ list_labels
+ end
+
+ it 'avoids N+1 queries' do
+ control = ActiveRecord::QueryRecorder.new(skip_cached: false) { list_labels }
+
+ create_list(:label, 3, project: project)
+ create_list(:group_label, 3, group: group)
+
+ # some n+1 queries still exist
+ # calls to get max project authorization access level
+ expect { list_labels }.not_to exceed_all_query_limit(control.count).with_threshold(25)
+ expect(assigns(:labels).count).to eq(10)
+ end
+ end
+
def list_labels
get :index, params: { namespace_id: project.namespace.to_param, project_id: project }
end
@@ -221,7 +241,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
- let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+ let_it_be(:redirect_route) { project.redirect_routes.create!(path: project.full_path + 'old') }
it 'redirects to the canonical path' do
get :index, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
@@ -267,7 +287,7 @@ RSpec.describe Projects::LabelsController do
end
context 'when requesting a redirected path' do
- let_it_be(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+ let_it_be(:redirect_route) { project.redirect_routes.create!(path: project.full_path + 'old') }
it 'returns not found' do
post :generate, params: { namespace_id: project.namespace, project_id: project.to_param + 'old' }
diff --git a/spec/controllers/projects/merge_requests/content_controller_spec.rb b/spec/controllers/projects/merge_requests/content_controller_spec.rb
index 67d3ef6f4f0..0eaa528a330 100644
--- a/spec/controllers/projects/merge_requests/content_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/content_controller_spec.rb
@@ -11,13 +11,13 @@ RSpec.describe Projects::MergeRequests::ContentController do
sign_in(user)
end
- def do_request(action = :cached_widget)
+ def do_request(action = :cached_widget, params = {})
get action, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.iid,
format: :json
- }
+ }.merge(params)
end
context 'user has access to the project' do
@@ -42,6 +42,10 @@ RSpec.describe Projects::MergeRequests::ContentController do
end
describe 'GET widget' do
+ before do
+ merge_request.mark_as_unchecked!
+ end
+
it 'checks whether the MR can be merged' do
controller.instance_variable_set(:@merge_request, merge_request)
@@ -53,6 +57,17 @@ RSpec.describe Projects::MergeRequests::ContentController do
expect(response.headers['Poll-Interval']).to eq('10000')
end
+ context 'when async_mergeability_check param is passed' do
+ it 'checks mergeability asynchronously' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService) do |service|
+ expect(service).not_to receive(:execute)
+ expect(service).to receive(:async_execute).and_call_original
+ end
+
+ do_request(:widget, { async_mergeability_check: true })
+ end
+ end
+
context 'merged merge request' do
let(:merge_request) do
create(:merged_merge_request, :with_test_reports, target_project: project, source_project: project)
diff --git a/spec/controllers/projects/merge_requests/creations_controller_spec.rb b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
index 091a44130a1..df2023b7356 100644
--- a/spec/controllers/projects/merge_requests/creations_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
@@ -213,6 +213,38 @@ RSpec.describe Projects::MergeRequests::CreationsController do
expect(assigns(:commit)).to be_nil
expect(response).to have_gitlab_http_status(:ok)
end
+
+ context 'no target_project_id provided' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'selects itself as a target project' do
+ get :branch_to,
+ params: {
+ namespace_id: project.namespace,
+ project_id: project,
+ ref: 'master'
+ }
+
+ expect(assigns(:target_project)).to eq(project)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ context 'project is a fork' do
+ it 'calls to project defaults to selects a correct target project' do
+ get :branch_to,
+ params: {
+ namespace_id: fork_project.namespace,
+ project_id: fork_project,
+ ref: 'master'
+ }
+
+ expect(assigns(:target_project)).to eq(project)
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
end
describe 'POST create' do
diff --git a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
index af39d4dec72..580211893dc 100644
--- a/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/drafts_controller_spec.rb
@@ -297,7 +297,7 @@ RSpec.describe Projects::MergeRequests::DraftsController do
expect { post :publish, params: params }.to change { Note.count }.by(0).and change { DraftNote.count }.by(0)
end
- it 'publishes a draft note with quick actions and applies them' do
+ it 'publishes a draft note with quick actions and applies them', :sidekiq_inline do
project.add_developer(user2)
create(:draft_note, merge_request: merge_request, author: user,
note: "/assign #{user2.to_reference}")
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index 93d5e7eff6c..337a4a19b2e 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Projects::MergeRequestsController do
let_it_be_with_refind(:project) { create(:project, :repository) }
let_it_be_with_reload(:project_public_with_private_builds) { create(:project, :repository, :public, :builds_private) }
+
let(:user) { project.owner }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
@@ -40,6 +41,32 @@ RSpec.describe Projects::MergeRequestsController do
get :show, params: params.merge(extra_params)
end
+ context 'with the invite_members_in_comment experiment', :experiment do
+ context 'when user can invite' do
+ before do
+ stub_experiments(invite_members_in_comment: :invite_member_link)
+ project.add_maintainer(user)
+ end
+
+ it 'assigns the candidate experience and tracks the event' do
+ expect(experiment(:invite_members_in_comment)).to track(:view, property: project.root_ancestor.id.to_s)
+ .for(:invite_member_link)
+ .with_context(namespace: project.root_ancestor)
+ .on_next_instance
+
+ go
+ end
+ end
+
+ context 'when user can not invite' do
+ it 'does not track the event' do
+ expect(experiment(:invite_members_in_comment)).not_to track(:view)
+
+ go
+ end
+ end
+ end
+
context 'with view param' do
before do
go(view: 'parallel')
@@ -55,13 +82,19 @@ RSpec.describe Projects::MergeRequestsController do
merge_request.mark_as_unchecked!
end
- it 'checks mergeability asynchronously' do
- expect_next_instance_of(MergeRequests::MergeabilityCheckService) do |service|
- expect(service).not_to receive(:execute)
- expect(service).to receive(:async_execute)
+ context 'check_mergeability_async_in_widget feature flag is disabled' do
+ before do
+ stub_feature_flags(check_mergeability_async_in_widget: false)
end
- go
+ it 'checks mergeability asynchronously' do
+ expect_next_instance_of(MergeRequests::MergeabilityCheckService) do |service|
+ expect(service).not_to receive(:execute)
+ expect(service).to receive(:async_execute)
+ end
+
+ go
+ end
end
end
@@ -695,12 +728,6 @@ RSpec.describe Projects::MergeRequestsController do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response).to eq({ 'errors' => 'Destroy confirmation not provided for merge request' })
end
-
- it 'delegates the update of the todos count cache to TodoService' do
- expect_any_instance_of(TodoService).to receive(:destroy_target).with(merge_request).once
-
- delete :destroy, params: { namespace_id: project.namespace, project_id: project, id: merge_request.iid, destroy_confirm: true }
- end
end
end
diff --git a/spec/controllers/projects/milestones_controller_spec.rb b/spec/controllers/projects/milestones_controller_spec.rb
index b93f1b41a7e..b62353784b3 100644
--- a/spec/controllers/projects/milestones_controller_spec.rb
+++ b/spec/controllers/projects/milestones_controller_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe Projects::MilestonesController do
context 'with a single group ancestor' do
before do
- project.update(namespace: group)
+ project.update!(namespace: group)
get :index, params: { namespace_id: project.namespace.id, project_id: project.id }, format: :json
end
@@ -122,7 +122,7 @@ RSpec.describe Projects::MilestonesController do
let!(:subgroup_milestone) { create(:milestone, group: subgroup) }
before do
- project.update(namespace: subgroup)
+ project.update!(namespace: subgroup)
get :index, params: { namespace_id: project.namespace.id, project_id: project.id }, format: :json
end
@@ -158,7 +158,7 @@ RSpec.describe Projects::MilestonesController do
let(:group) { create(:group) }
before do
- project.update(namespace: group)
+ project.update!(namespace: group)
end
context 'when user does not have permission to promote milestone' do
@@ -234,7 +234,7 @@ RSpec.describe Projects::MilestonesController do
end
it 'renders 404' do
- project.update(namespace: user.namespace)
+ project.update!(namespace: user.namespace)
post :promote, params: { namespace_id: project.namespace.id, project_id: project.id, id: milestone.iid }
@@ -253,7 +253,7 @@ RSpec.describe Projects::MilestonesController do
before do
project.add_guest(guest_user)
sign_in(guest_user)
- issue.update(assignee_ids: issue_assignee.id)
+ issue.update!(assignee_ids: issue_assignee.id)
end
context "when issue is not confidential" do
@@ -269,7 +269,7 @@ RSpec.describe Projects::MilestonesController do
context "when issue is confidential" do
before do
- issue.update(confidential: true)
+ issue.update!(confidential: true)
end
it 'shows no milestone participants' do
diff --git a/spec/controllers/projects/notes_controller_spec.rb b/spec/controllers/projects/notes_controller_spec.rb
index add249e2c74..d92862f0ca3 100644
--- a/spec/controllers/projects/notes_controller_spec.rb
+++ b/spec/controllers/projects/notes_controller_spec.rb
@@ -334,7 +334,7 @@ RSpec.describe Projects::NotesController do
before do
project.update_attribute(:visibility_level, project_visibility)
- project.project_feature.update(merge_requests_access_level: merge_requests_access_level)
+ project.project_feature.update!(merge_requests_access_level: merge_requests_access_level)
sign_in(user)
end
@@ -917,7 +917,7 @@ RSpec.describe Projects::NotesController do
context "when the note is not resolvable" do
before do
- note.update(system: true)
+ note.update!(system: true)
end
it "returns status 404" do
@@ -980,7 +980,7 @@ RSpec.describe Projects::NotesController do
context "when the note is not resolvable" do
before do
- note.update(system: true)
+ note.update!(system: true)
end
it "returns status 404" do
diff --git a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
index 8a344a72120..923581d9367 100644
--- a/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
+++ b/spec/controllers/projects/performance_monitoring/dashboards_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::PerformanceMonitoring::DashboardsController do
let_it_be(:user) { create(:user) }
let_it_be(:namespace) { create(:namespace) }
+
let!(:project) { create(:project, :repository, name: 'dashboard-project', namespace: namespace) }
let(:repository) { project.repository }
let(:branch) { double(name: branch_name) }
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index e1405660ccb..753223c5a4f 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -7,13 +7,14 @@ RSpec.describe Projects::PipelinesController do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public, :repository) }
+
let(:feature) { ProjectFeature::ENABLED }
before do
allow(Sidekiq.logger).to receive(:info)
stub_not_protect_default_branch
project.add_developer(user)
- project.project_feature.update(builds_access_level: feature)
+ project.project_feature.update!(builds_access_level: feature)
sign_in(user)
end
@@ -272,6 +273,23 @@ RSpec.describe Projects::PipelinesController do
end
end
+ describe 'GET #index' do
+ context 'pipeline_empty_state_templates experiment' do
+ before do
+ stub_application_setting(auto_devops_enabled: false)
+ end
+
+ it 'tracks the view', :experiment do
+ expect(experiment(:pipeline_empty_state_templates))
+ .to track(:view, value: project.namespace_id)
+ .with_context(actor: user)
+ .on_next_instance
+
+ get :index, params: { namespace_id: project.namespace, project_id: project }
+ end
+ end
+ end
+
describe 'GET show.json' do
let(:pipeline) { create(:ci_pipeline, project: project) }
@@ -628,44 +646,6 @@ RSpec.describe Projects::PipelinesController do
end
end
- describe 'GET stages_ajax.json' do
- let(:pipeline) { create(:ci_pipeline, project: project) }
-
- context 'when accessing existing stage' do
- before do
- create(:ci_build, pipeline: pipeline, stage: 'build')
-
- get_stage_ajax('build')
- end
-
- it 'returns html source for stage dropdown' do
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to render_template('projects/pipelines/_stage')
- expect(json_response).to include('html')
- end
- end
-
- context 'when accessing unknown stage' do
- before do
- get_stage_ajax('test')
- end
-
- it 'responds with not found' do
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
- def get_stage_ajax(name)
- get :stage_ajax, params: {
- namespace_id: project.namespace,
- project_id: project,
- id: pipeline.id,
- stage: name
- },
- format: :json
- end
- end
-
describe 'GET status.json' do
let(:pipeline) { create(:ci_pipeline, project: project) }
let(:status) { pipeline.detailed_status(double('user')) }
@@ -702,7 +682,7 @@ RSpec.describe Projects::PipelinesController do
before do
project.add_developer(user)
- project.project_feature.update(builds_access_level: feature)
+ project.project_feature.update!(builds_access_level: feature)
end
context 'with a valid .gitlab-ci.yml file' do
@@ -721,7 +701,7 @@ RSpec.describe Projects::PipelinesController do
pipeline = project.ci_pipelines.last
expected_redirect_path = Gitlab::Routing.url_helpers.project_pipeline_path(project, pipeline)
- expect(pipeline).to be_pending
+ expect(pipeline).to be_created
expect(response).to redirect_to(expected_redirect_path)
end
end
@@ -777,7 +757,7 @@ RSpec.describe Projects::PipelinesController do
before do
project.add_developer(user)
- project.project_feature.update(builds_access_level: feature)
+ project.project_feature.update!(builds_access_level: feature)
end
context 'with a valid .gitlab-ci.yml file' do
diff --git a/spec/controllers/projects/pipelines_settings_controller_spec.rb b/spec/controllers/projects/pipelines_settings_controller_spec.rb
index ad631b7c3da..39fb153e802 100644
--- a/spec/controllers/projects/pipelines_settings_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_settings_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::PipelinesSettingsController do
let_it_be(:user) { create(:user) }
let_it_be(:project_auto_devops) { create(:project_auto_devops) }
+
let(:project) { project_auto_devops.project }
before do
diff --git a/spec/controllers/projects/project_members_controller_spec.rb b/spec/controllers/projects/project_members_controller_spec.rb
index 53a7c2ca069..46a0fc8edb0 100644
--- a/spec/controllers/projects/project_members_controller_spec.rb
+++ b/spec/controllers/projects/project_members_controller_spec.rb
@@ -7,8 +7,12 @@ RSpec.describe Projects::ProjectMembersController do
let(:group) { create(:group, :public) }
let(:project) { create(:project, :public) }
- around do |example|
- travel_to DateTime.new(2019, 4, 1) { example.run }
+ before do
+ travel_to DateTime.new(2019, 4, 1)
+ end
+
+ after do
+ travel_back
end
describe 'GET index' do
diff --git a/spec/controllers/projects/raw_controller_spec.rb b/spec/controllers/projects/raw_controller_spec.rb
index b1c3c1c0276..5dee36ee7c2 100644
--- a/spec/controllers/projects/raw_controller_spec.rb
+++ b/spec/controllers/projects/raw_controller_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Projects::RawController do
include RepoHelpers
let_it_be(:project) { create(:project, :public, :repository) }
+
let(:inline) { nil }
describe 'GET #show' do
diff --git a/spec/controllers/projects/registry/repositories_controller_spec.rb b/spec/controllers/projects/registry/repositories_controller_spec.rb
index 9b803edd463..0685e5a2055 100644
--- a/spec/controllers/projects/registry/repositories_controller_spec.rb
+++ b/spec/controllers/projects/registry/repositories_controller_spec.rb
@@ -16,19 +16,19 @@ RSpec.describe Projects::Registry::RepositoriesController do
project.add_developer(user)
end
- shared_examples 'with name parameter' do
- let_it_be(:repo) { create(:container_repository, project: project, name: 'my_searched_image') }
- let_it_be(:another_repo) { create(:container_repository, project: project, name: 'bar') }
-
- it 'returns the searched repo' do
- go_to_index(format: :json, params: { name: 'my_searched_image' })
+ shared_examples 'renders 200 for html and 404 for json' do
+ it 'successfully renders container repositories', :snowplow do
+ go_to_index
expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.length).to eq 1
- expect(json_response.first).to include(
- 'id' => repo.id,
- 'name' => repo.name
- )
+ # event tracked in GraphQL API: https://gitlab.com/gitlab-org/gitlab/-/merge_requests/44926
+ expect_no_snowplow_event
+ end
+
+ it 'returns 404 for request in json format' do
+ go_to_index(format: :json)
+
+ expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -50,33 +50,12 @@ RSpec.describe Projects::Registry::RepositoriesController do
tags: %w[rc1 latest])
end
- it 'successfully renders container repositories', :snowplow do
- go_to_index
-
- expect_no_snowplow_event
- expect(response).to have_gitlab_http_status(:ok)
- end
-
- it 'tracks the event', :snowplow do
- go_to_index(format: :json)
-
- expect_snowplow_event(category: anything, action: 'list_repositories')
- end
-
it 'creates a root container repository' do
expect { go_to_index }.to change { ContainerRepository.all.count }.by(1)
expect(ContainerRepository.first).to be_root_repository
end
- it 'json has a list of projects' do
- go_to_index(format: :json)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('registry/repositories')
- expect(response).to include_pagination_headers
- end
-
- it_behaves_like 'with name parameter'
+ it_behaves_like 'renders 200 for html and 404 for json'
end
context 'when there are no tags for this repository' do
@@ -84,22 +63,11 @@ RSpec.describe Projects::Registry::RepositoriesController do
stub_container_registry_tags(repository: :any, tags: [])
end
- it 'successfully renders container repositories' do
- go_to_index
-
- expect(response).to have_gitlab_http_status(:ok)
- end
-
it 'does not ensure root container repository' do
expect { go_to_index }.not_to change { ContainerRepository.all.count }
end
- it 'responds with json if asked' do
- go_to_index(format: :json)
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to be_kind_of(Array)
- end
+ it_behaves_like 'renders 200 for html and 404 for json'
end
end
end
diff --git a/spec/controllers/projects/registry/tags_controller_spec.rb b/spec/controllers/projects/registry/tags_controller_spec.rb
index 5bff89b4308..c03a280d2cd 100644
--- a/spec/controllers/projects/registry/tags_controller_spec.rb
+++ b/spec/controllers/projects/registry/tags_controller_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe Projects::Registry::TagsController do
it 'tracks the event', :snowplow do
get_tags
- expect_snowplow_event(category: anything, action: 'list_tags')
+ expect_snowplow_event(category: 'Projects::Registry::TagsController', action: 'list_tags')
end
end
@@ -107,11 +107,12 @@ RSpec.describe Projects::Registry::TagsController do
destroy_tag('test.')
end
- it 'tracks the event' do
+ it 'tracks the event', :snowplow do
expect_delete_tags(%w[test.])
- expect(controller).to receive(:track_event).with(:delete_tag)
destroy_tag('test.')
+
+ expect_snowplow_event(category: 'Projects::Registry::TagsController', action: 'delete_tag')
end
end
end
diff --git a/spec/controllers/projects/releases/evidences_controller_spec.rb b/spec/controllers/projects/releases/evidences_controller_spec.rb
index 0ec4cdf2a31..68433969d69 100644
--- a/spec/controllers/projects/releases/evidences_controller_spec.rb
+++ b/spec/controllers/projects/releases/evidences_controller_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Projects::Releases::EvidencesController do
let_it_be(:private_project) { create(:project, :repository, :private) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+
let(:user) { developer }
before do
@@ -62,7 +63,7 @@ RSpec.describe Projects::Releases::EvidencesController do
context 'when the release was created before evidence existed' do
before do
- evidence.destroy
+ evidence.destroy!
end
it_behaves_like 'not found'
diff --git a/spec/controllers/projects/releases_controller_spec.rb b/spec/controllers/projects/releases_controller_spec.rb
index fc7ab88bbe0..a1e36ec5c4c 100644
--- a/spec/controllers/projects/releases_controller_spec.rb
+++ b/spec/controllers/projects/releases_controller_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe Projects::ReleasesController do
let_it_be(:reporter) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:user) { developer }
+
let!(:release_1) { create(:release, project: project, released_at: Time.zone.parse('2018-10-18')) }
let!(:release_2) { create(:release, project: project, released_at: Time.zone.parse('2019-10-19')) }
diff --git a/spec/controllers/projects/repositories_controller_spec.rb b/spec/controllers/projects/repositories_controller_spec.rb
index e6327a72a68..cb2579b800a 100644
--- a/spec/controllers/projects/repositories_controller_spec.rb
+++ b/spec/controllers/projects/repositories_controller_spec.rb
@@ -56,28 +56,6 @@ RSpec.describe Projects::RepositoriesController do
expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
end
- it 'handles legacy queries with no ref' do
- get :archive, params: { namespace_id: project.namespace, project_id: project }, format: "zip"
-
- expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
- end
-
- it 'handles legacy queries with the ref specified as ref in params' do
- get :archive, params: { namespace_id: project.namespace, project_id: project, ref: 'feature' }, format: 'zip'
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:ref)).to eq('feature')
- expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
- end
-
- it 'handles legacy queries with the ref specified as id in params' do
- get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'feature' }, format: 'zip'
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(assigns(:ref)).to eq('feature')
- expect(response.header[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-archive:")
- end
-
it 'prioritizes the id param over the ref param when both are specified' do
get :archive, params: { namespace_id: project.namespace, project_id: project, id: 'feature', ref: 'feature_conflict' }, format: 'zip'
diff --git a/spec/controllers/projects/runners_controller_spec.rb b/spec/controllers/projects/runners_controller_spec.rb
index d63d88f8283..3021ad42c9f 100644
--- a/spec/controllers/projects/runners_controller_spec.rb
+++ b/spec/controllers/projects/runners_controller_spec.rb
@@ -46,7 +46,7 @@ RSpec.describe Projects::RunnersController do
describe '#resume' do
it 'marks the runner as active and ticks the queue' do
- runner.update(active: false)
+ runner.update!(active: false)
expect do
post :resume, params: params
@@ -61,7 +61,7 @@ RSpec.describe Projects::RunnersController do
describe '#pause' do
it 'marks the runner as inactive and ticks the queue' do
- runner.update(active: true)
+ runner.update!(active: true)
expect do
post :pause, params: params
diff --git a/spec/controllers/projects/services_controller_spec.rb b/spec/controllers/projects/services_controller_spec.rb
index 8f928cf3382..488a34b74df 100644
--- a/spec/controllers/projects/services_controller_spec.rb
+++ b/spec/controllers/projects/services_controller_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::ServicesController do
include JiraServiceHelper
+ include AfterNextHelpers
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
@@ -13,7 +14,6 @@ RSpec.describe Projects::ServicesController do
before do
sign_in(user)
project.add_maintainer(user)
- allow(Gitlab::UrlBlocker).to receive(:validate!).and_return([URI.parse('http://example.com'), nil])
end
describe '#test' do
@@ -114,7 +114,7 @@ RSpec.describe Projects::ServicesController do
end
context 'failure' do
- it 'returns success status code and the error message' do
+ it 'returns an error response when the integration test fails' do
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
.to_return(status: 404)
@@ -128,6 +128,36 @@ RSpec.describe Projects::ServicesController do
'test_failed' => true
)
end
+
+ context 'with the Slack integration' do
+ let_it_be(:service) { build(:slack_service) }
+
+ it 'returns an error response when the URL is blocked' do
+ put :test, params: project_params(service: { webhook: 'http://127.0.0.1' })
+
+ expect(response).to be_successful
+ expect(json_response).to eq(
+ 'error' => true,
+ 'message' => 'Connection failed. Please check your settings.',
+ 'service_response' => "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed",
+ 'test_failed' => true
+ )
+ end
+
+ it 'returns an error response when a network exception is raised' do
+ expect_next(SlackService).to receive(:test).and_raise(Errno::ECONNREFUSED)
+
+ put :test, params: project_params
+
+ expect(response).to be_successful
+ expect(json_response).to eq(
+ 'error' => true,
+ 'message' => 'Connection failed. Please check your settings.',
+ 'service_response' => 'Connection refused',
+ 'test_failed' => true
+ )
+ end
+ end
end
end
diff --git a/spec/controllers/projects/settings/access_tokens_controller_spec.rb b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
index ff52b2a765a..2a7e3d0b322 100644
--- a/spec/controllers/projects/settings/access_tokens_controller_spec.rb
+++ b/spec/controllers/projects/settings/access_tokens_controller_spec.rb
@@ -4,7 +4,8 @@ require('spec_helper')
RSpec.describe Projects::Settings::AccessTokensController do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
let_it_be(:bot_user) { create(:user, :project_bot) }
before_all do
@@ -40,6 +41,26 @@ RSpec.describe Projects::Settings::AccessTokensController do
it_behaves_like 'feature unavailable'
it_behaves_like 'project access tokens available #create'
+
+ context 'when project access token creation is disabled' do
+ before do
+ group.namespace_settings.update_column(:resource_access_token_creation_allowed, false)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+
+ it 'does not create the token' do
+ expect { subject }.not_to change { PersonalAccessToken.count }
+ end
+
+ it 'does not add the project bot as a member' do
+ expect { subject }.not_to change { Member.count }
+ end
+
+ it 'does not create the project bot user' do
+ expect { subject }.not_to change { User.count }
+ end
+ end
end
describe '#revoke', :sidekiq_inline do
diff --git a/spec/controllers/projects/settings/ci_cd_controller_spec.rb b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
index 7a6e11d53d4..d953249c139 100644
--- a/spec/controllers/projects/settings/ci_cd_controller_spec.rb
+++ b/spec/controllers/projects/settings/ci_cd_controller_spec.rb
@@ -5,6 +5,7 @@ require('spec_helper')
RSpec.describe Projects::Settings::CiCdController do
let_it_be(:user) { create(:user) }
let_it_be(:project_auto_devops) { create(:project_auto_devops) }
+
let(:project) { project_auto_devops.project }
before do
diff --git a/spec/controllers/projects/settings/operations_controller_spec.rb b/spec/controllers/projects/settings/operations_controller_spec.rb
index 46f69eaf96a..d2934ec4e97 100644
--- a/spec/controllers/projects/settings/operations_controller_spec.rb
+++ b/spec/controllers/projects/settings/operations_controller_spec.rb
@@ -493,6 +493,7 @@ RSpec.describe Projects::Settings::OperationsController do
describe 'PATCH #update' do
let_it_be(:external_url) { 'https://gitlab.com' }
+
let(:params) do
{
tracing_setting_attributes: {
diff --git a/spec/controllers/projects/snippets_controller_spec.rb b/spec/controllers/projects/snippets_controller_spec.rb
index 793ffbbfad9..1a6c0974f08 100644
--- a/spec/controllers/projects/snippets_controller_spec.rb
+++ b/spec/controllers/projects/snippets_controller_spec.rb
@@ -46,6 +46,10 @@ RSpec.describe Projects::SnippetsController do
let(:params) { base_params }
end
+ it_behaves_like 'snippets views' do
+ let(:params) { base_params }
+ end
+
context 'when the project snippet is private' do
let_it_be(:project_snippet) { create(:project_snippet, :private, project: project, author: user) }
diff --git a/spec/controllers/projects/starrers_controller_spec.rb b/spec/controllers/projects/starrers_controller_spec.rb
index 66888fa3024..8d03600cd58 100644
--- a/spec/controllers/projects/starrers_controller_spec.rb
+++ b/spec/controllers/projects/starrers_controller_spec.rb
@@ -170,7 +170,7 @@ RSpec.describe Projects::StarrersController do
context 'when project is private' do
before do
- project.update(visibility_level: Project::PRIVATE)
+ project.update!(visibility_level: Project::PRIVATE)
end
it 'starrers are not visible for non logged in users' do
diff --git a/spec/controllers/projects/static_site_editor_controller_spec.rb b/spec/controllers/projects/static_site_editor_controller_spec.rb
index b563f3b667f..73b0e3bba69 100644
--- a/spec/controllers/projects/static_site_editor_controller_spec.rb
+++ b/spec/controllers/projects/static_site_editor_controller_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::StaticSiteEditorController do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { create(:user) }
+
let(:data) { { key: 'value' } }
describe 'GET index' do
diff --git a/spec/controllers/projects/todos_controller_spec.rb b/spec/controllers/projects/todos_controller_spec.rb
index 0e35f401bc8..9a73417ffdb 100644
--- a/spec/controllers/projects/todos_controller_spec.rb
+++ b/spec/controllers/projects/todos_controller_spec.rb
@@ -5,6 +5,7 @@ require('spec_helper')
RSpec.describe Projects::TodosController do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:design) { create(:design, project: project, issue: issue) }
diff --git a/spec/controllers/projects/uploads_controller_spec.rb b/spec/controllers/projects/uploads_controller_spec.rb
index dda58f06a37..c008c7253d8 100644
--- a/spec/controllers/projects/uploads_controller_spec.rb
+++ b/spec/controllers/projects/uploads_controller_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Projects::UploadsController do
let!(:upload) { create(:upload, :issuable_upload, :with_file, model: model) }
let(:project) { model }
let(:upload_path) { File.basename(upload.path) }
- let!(:redirect_route) { project.redirect_routes.create(path: project.full_path + 'old') }
+ let!(:redirect_route) { project.redirect_routes.create!(path: project.full_path + 'old') }
it 'redirects to a file with the proper extension' do
get :show, params: { namespace_id: project.namespace, project_id: project.to_param + 'old', filename: File.basename(upload.path), secret: upload.secret }
diff --git a/spec/controllers/projects_controller_spec.rb b/spec/controllers/projects_controller_spec.rb
index 554487db8f2..ffe2d393b1e 100644
--- a/spec/controllers/projects_controller_spec.rb
+++ b/spec/controllers/projects_controller_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe ProjectsController do
let_it_be(:project, reload: true) { create(:project, service_desk_enabled: false) }
let_it_be(:public_project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
+
let(:jpg) { fixture_file_upload('spec/fixtures/rails_sample.jpg', 'image/jpg') }
let(:txt) { fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain') }
@@ -159,7 +160,7 @@ RSpec.describe ProjectsController do
before do
setting = user.notification_settings_for(public_project)
setting.level = :watch
- setting.save
+ setting.save!
end
it "shows current notification setting" do
@@ -221,24 +222,23 @@ RSpec.describe ProjectsController do
allow(controller).to receive(:record_experiment_user)
end
- context 'when user can push to default branch' do
+ context 'when user can push to default branch', :experiment do
let(:user) { empty_project.owner }
- it 'creates an "view_project_show" experiment tracking event', :snowplow do
- allow_next_instance_of(ApplicationExperiment) do |e|
- allow(e).to receive(:should_track?).and_return(true)
- end
+ it 'creates an "view_project_show" experiment tracking event' do
+ expect(experiment(:empty_repo_upload)).to track(
+ :view_project_show,
+ property: 'empty'
+ ).on_next_instance
get :show, params: { namespace_id: empty_project.namespace, id: empty_project }
-
- expect_snowplow_event(category: 'empty_repo_upload', action: 'view_project_show', context: [{ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0', data: anything }], property: 'empty')
end
end
User.project_views.keys.each do |project_view|
context "with #{project_view} view set" do
before do
- user.update(project_view: project_view)
+ user.update!(project_view: project_view)
get :show, params: { namespace_id: empty_project.namespace, id: empty_project }
end
@@ -261,7 +261,7 @@ RSpec.describe ProjectsController do
User.project_views.keys.each do |project_view|
context "with #{project_view} view set" do
before do
- user.update(project_view: project_view)
+ user.update!(project_view: project_view)
get :show, params: { namespace_id: empty_project.namespace, id: empty_project }
end
@@ -444,7 +444,13 @@ RSpec.describe ProjectsController do
:created,
property: 'blank',
value: 1
- ).on_any_instance.with_context(actor: user)
+ ).with_context(actor: user).on_next_instance
+
+ post :create, params: { project: project_params }
+ end
+
+ it 'tracks a created event for the new_repo experiment', :experiment do
+ expect(experiment(:new_repo, :candidate)).to track(:project_created).on_next_instance
post :create, params: { project: project_params }
end
@@ -549,6 +555,7 @@ RSpec.describe ProjectsController do
describe '#housekeeping' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
+
let(:housekeeping) { Repositories::HousekeepingService.new(project) }
context 'when authenticated as owner' do
@@ -1098,6 +1105,7 @@ RSpec.describe ProjectsController do
context 'state filter on references' do
let_it_be(:issue) { create(:issue, :closed, project: public_project) }
+
let(:merge_request) { create(:merge_request, :closed, target_project: public_project) }
it 'renders JSON body with state filter for issues' do
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
index d32c936b8c9..008259a8bfa 100644
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ b/spec/controllers/registrations/welcome_controller_spec.rb
@@ -60,8 +60,10 @@ RSpec.describe Registrations::WelcomeController do
end
describe '#update' do
+ let(:email_opted_in) { '0' }
+
subject(:update) do
- patch :update, params: { user: { role: 'software_developer', setup_for_company: 'false' } }
+ patch :update, params: { user: { role: 'software_developer', setup_for_company: 'false', email_opted_in: email_opted_in } }
end
context 'without a signed in user' do
@@ -74,6 +76,24 @@ RSpec.describe Registrations::WelcomeController do
end
it { is_expected.to redirect_to(dashboard_projects_path)}
+
+ context 'when the user opted in' do
+ let(:email_opted_in) { '1' }
+
+ it 'sets the email_opted_in field' do
+ subject
+
+ expect(controller.current_user.email_opted_in).to eq(true)
+ end
+ end
+
+ context 'when the user opted out' do
+ it 'sets the email_opted_in field' do
+ subject
+
+ expect(controller.current_user.email_opted_in).to eq(false)
+ end
+ end
end
end
end
diff --git a/spec/controllers/root_controller_spec.rb b/spec/controllers/root_controller_spec.rb
index 49841aa61d7..01ff646274a 100644
--- a/spec/controllers/root_controller_spec.rb
+++ b/spec/controllers/root_controller_spec.rb
@@ -134,26 +134,6 @@ RSpec.describe RootController do
expect(response).to render_template 'dashboard/projects/index'
end
-
- context 'when customize_homepage is enabled' do
- it 'renders the default dashboard' do
- get :index
-
- expect(assigns[:customize_homepage]).to be true
- end
- end
-
- context 'when customize_homepage is not enabled' do
- before do
- stub_feature_flags(customize_homepage: false)
- end
-
- it 'renders the default dashboard' do
- get :index
-
- expect(assigns[:customize_homepage]).to be false
- end
- end
end
end
end
diff --git a/spec/controllers/sessions_controller_spec.rb b/spec/controllers/sessions_controller_spec.rb
index c31ba6fe156..abdafa2880a 100644
--- a/spec/controllers/sessions_controller_spec.rb
+++ b/spec/controllers/sessions_controller_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe SessionsController do
post(:create, params: { user: { login: 'invalid', password: 'invalid' } })
expect(response)
- .to set_flash.now[:alert].to(/Invalid Login or password/)
+ .to set_flash.now[:alert].to(/Invalid login or password/)
end
end
@@ -348,7 +348,7 @@ RSpec.describe SessionsController do
otp_user_id: user.id
)
- expect(response).to set_flash.now[:alert].to(/Invalid Login or password/)
+ expect(response).to set_flash.now[:alert].to(/Invalid login or password/)
end
end
@@ -524,7 +524,7 @@ RSpec.describe SessionsController do
it 'sets the username and caller_id in the context' do
expect(controller).to receive(:destroy).and_wrap_original do |m, *args|
- expect(Labkit::Context.current.to_h)
+ expect(Gitlab::ApplicationContext.current)
.to include('meta.user' => user.username,
'meta.caller_id' => 'SessionsController#destroy')
@@ -538,9 +538,9 @@ RSpec.describe SessionsController do
context 'when not signed in' do
it 'sets the caller_id in the context' do
expect(controller).to receive(:new).and_wrap_original do |m, *args|
- expect(Labkit::Context.current.to_h)
+ expect(Gitlab::ApplicationContext.current)
.to include('meta.caller_id' => 'SessionsController#new')
- expect(Labkit::Context.current.to_h)
+ expect(Gitlab::ApplicationContext.current)
.not_to include('meta.user')
m.call(*args)
@@ -557,9 +557,9 @@ RSpec.describe SessionsController do
it 'sets the caller_id in the context' do
allow_any_instance_of(User).to receive(:lock_access!).and_wrap_original do |m, *args|
- expect(Labkit::Context.current.to_h)
+ expect(Gitlab::ApplicationContext.current)
.to include('meta.caller_id' => 'SessionsController#create')
- expect(Labkit::Context.current.to_h)
+ expect(Gitlab::ApplicationContext.current)
.not_to include('meta.user')
m.call(*args)
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index d292ba60a12..908d5741709 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -19,7 +19,6 @@ RSpec.describe 'Database schema' do
approver_groups: %w[target_id],
approvers: %w[target_id user_id],
audit_events: %w[author_id entity_id target_id],
- audit_events_archived: %w[author_id entity_id target_id],
award_emoji: %w[awardable_id user_id],
aws_roles: %w[role_external_id],
boards: %w[milestone_id iteration_id],
@@ -86,9 +85,7 @@ RSpec.describe 'Database schema' do
users: %w[color_scheme_id created_by_id theme_id email_opted_in_source_id],
users_star_projects: %w[user_id],
vulnerability_identifiers: %w[external_id],
- vulnerability_scanners: %w[external_id],
- web_hooks: %w[group_id],
- web_hook_logs_part_0c5294f417: %w[web_hook_id]
+ vulnerability_scanners: %w[external_id]
}.with_indifferent_access.freeze
context 'for table' do
@@ -115,7 +112,7 @@ RSpec.describe 'Database schema' do
# postgres and mysql both automatically create an index on the primary
# key. Also, the rails connection.indexes() method does not return
# automatically generated indexes (like the primary key index).
- first_indexed_column = first_indexed_column.push(primary_key_column)
+ first_indexed_column.push(primary_key_column)
expect(first_indexed_column.uniq).to include(*foreign_keys_columns)
end
diff --git a/spec/deprecation_toolkit_env.rb b/spec/deprecation_toolkit_env.rb
index f4ead6d5f01..fb70a8844a1 100644
--- a/spec/deprecation_toolkit_env.rb
+++ b/spec/deprecation_toolkit_env.rb
@@ -55,12 +55,10 @@ module DeprecationToolkitEnv
# one by one
def self.allowed_kwarg_warning_paths
%w[
- activerecord-6.0.3.4/lib/active_record/migration.rb
- devise-4.7.3/lib/devise/test/controller_helpers.rb
- activesupport-6.0.3.4/lib/active_support/cache.rb
- batch-loader-1.4.0/lib/batch_loader/graphql.rb
+ activerecord-6.0.3.6/lib/active_record/migration.rb
+ activesupport-6.0.3.6/lib/active_support/cache.rb
carrierwave-1.3.1/lib/carrierwave/sanitized_file.rb
- activerecord-6.0.3.4/lib/active_record/relation.rb
+ activerecord-6.0.3.6/lib/active_record/relation.rb
selenium-webdriver-3.142.7/lib/selenium/webdriver/firefox/driver.rb
asciidoctor-2.0.12/lib/asciidoctor/extensions.rb
]
diff --git a/spec/experiments/application_experiment_spec.rb b/spec/experiments/application_experiment_spec.rb
index 2481ee5a806..424a3af20a3 100644
--- a/spec/experiments/application_experiment_spec.rb
+++ b/spec/experiments/application_experiment_spec.rb
@@ -114,7 +114,7 @@ RSpec.describe ApplicationExperiment, :experiment do
data: { data: '_data_' }
},
{
- schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0',
+ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
data: { experiment: 'namespaced/stub', key: '86208ac54ca798e11f127e8b23ec396a', variant: 'control' }
}
]
diff --git a/spec/experiments/members/invite_email_experiment_spec.rb b/spec/experiments/members/invite_email_experiment_spec.rb
index 539230e39b9..a9a269347e0 100644
--- a/spec/experiments/members/invite_email_experiment_spec.rb
+++ b/spec/experiments/members/invite_email_experiment_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::InviteEmailExperiment do
+RSpec.describe Members::InviteEmailExperiment, :clean_gitlab_redis_shared_state do
subject(:invite_email) { experiment('members/invite_email', **context) }
let(:context) { { actor: double('Member', created_by: double('User', avatar_url: '_avatar_url_')) } }
@@ -23,7 +23,7 @@ RSpec.describe Members::InviteEmailExperiment do
end
end
- describe "variant resolution", :clean_gitlab_redis_shared_state do
+ describe "variant resolution" do
it "proves out round robin in variant selection", :aggregate_failures do
instance_1 = described_class.new('members/invite_email', **context)
allow(instance_1).to receive(:enabled?).and_return(true)
@@ -45,4 +45,69 @@ RSpec.describe Members::InviteEmailExperiment do
expect(instance_3.variant.name).to eq('avatar')
end
end
+
+ describe Members::RoundRobin do
+ subject(:round_robin) { Members::RoundRobin.new('_key_', %i[variant1 variant2]) }
+
+ describe "execute" do
+ context "when there are 2 variants" do
+ it "proves out round robin in selection", :aggregate_failures do
+ expect(round_robin.execute).to eq :variant2
+ expect(round_robin.execute).to eq :variant1
+ expect(round_robin.execute).to eq :variant2
+ end
+ end
+
+ context "when there are more than 2 variants" do
+ subject(:round_robin) { Members::RoundRobin.new('_key_', %i[variant1 variant2 variant3]) }
+
+ it "proves out round robin in selection", :aggregate_failures do
+ expect(round_robin.execute).to eq :variant2
+ expect(round_robin.execute).to eq :variant3
+ expect(round_robin.execute).to eq :variant1
+
+ expect(round_robin.execute).to eq :variant2
+ expect(round_robin.execute).to eq :variant3
+ expect(round_robin.execute).to eq :variant1
+ end
+ end
+
+ context "when writing to cache fails" do
+ subject(:round_robin) { Members::RoundRobin.new('_key_', []) }
+
+ it "raises an error and logs" do
+ allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(Members::RoundRobin::CacheError)
+ expect(Gitlab::AppLogger).to receive(:warn)
+
+ expect { round_robin.execute }.to raise_error(Members::RoundRobin::CacheError)
+ end
+ end
+ end
+
+ describe "#counter_expires_in" do
+ it 'displays the expiration time in seconds' do
+ round_robin.execute
+
+ expect(round_robin.counter_expires_in).to be_between(0, described_class::COUNTER_EXPIRE_TIME)
+ end
+ end
+
+ describe '#value' do
+ it 'get the count' do
+ expect(round_robin.counter_value).to eq(0)
+
+ round_robin.execute
+
+ expect(round_robin.counter_value).to eq(1)
+ end
+ end
+
+ describe '#reset!' do
+ it 'resets the count down to zero' do
+ 3.times { round_robin.execute }
+
+ expect { round_robin.reset! }.to change { round_robin.counter_value }.from(3).to(0)
+ end
+ end
+ end
end
diff --git a/spec/experiments/new_project_readme_experiment_spec.rb b/spec/experiments/new_project_readme_experiment_spec.rb
index 17e28cf6e7f..87446394bff 100644
--- a/spec/experiments/new_project_readme_experiment_spec.rb
+++ b/spec/experiments/new_project_readme_experiment_spec.rb
@@ -7,10 +7,6 @@ RSpec.describe NewProjectReadmeExperiment, :experiment do
let(:actor) { User.new(id: 42, created_at: Time.current) }
- before do
- stub_experiments(new_project_readme: :control)
- end
-
describe "exclusions" do
let(:threshold) { described_class::MAX_ACCOUNT_AGE }
diff --git a/spec/experiments/strategy/round_robin_spec.rb b/spec/experiments/strategy/round_robin_spec.rb
deleted file mode 100644
index f837a4701b2..00000000000
--- a/spec/experiments/strategy/round_robin_spec.rb
+++ /dev/null
@@ -1,68 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Strategy::RoundRobin, :clean_gitlab_redis_shared_state do
- subject(:round_robin) { described_class.new('_key_', %i[variant1 variant2]) }
-
- describe "execute" do
- context "when there are 2 variants" do
- it "proves out round robin in selection", :aggregate_failures do
- expect(round_robin.execute).to eq :variant2
- expect(round_robin.execute).to eq :variant1
- expect(round_robin.execute).to eq :variant2
- end
- end
-
- context "when there are more than 2 variants" do
- subject(:round_robin) { described_class.new('_key_', %i[variant1 variant2 variant3]) }
-
- it "proves out round robin in selection", :aggregate_failures do
- expect(round_robin.execute).to eq :variant2
- expect(round_robin.execute).to eq :variant3
- expect(round_robin.execute).to eq :variant1
-
- expect(round_robin.execute).to eq :variant2
- expect(round_robin.execute).to eq :variant3
- expect(round_robin.execute).to eq :variant1
- end
- end
-
- context "when writing to cache fails" do
- subject(:round_robin) { described_class.new('_key_', []) }
-
- it "raises an error and logs" do
- allow(Gitlab::Redis::SharedState).to receive(:with).and_raise(Strategy::RoundRobin::CacheError)
- expect(Gitlab::AppLogger).to receive(:warn)
-
- expect { round_robin.execute }.to raise_error(Strategy::RoundRobin::CacheError)
- end
- end
- end
-
- describe "#counter_expires_in" do
- it 'displays the expiration time in seconds' do
- round_robin.execute
-
- expect(round_robin.counter_expires_in).to be_between(0, described_class::COUNTER_EXPIRE_TIME)
- end
- end
-
- describe '#value' do
- it 'get the count' do
- expect(round_robin.counter_value).to eq(0)
-
- round_robin.execute
-
- expect(round_robin.counter_value).to eq(1)
- end
- end
-
- describe '#reset!' do
- it 'resets the count down to zero' do
- 3.times { round_robin.execute }
-
- expect { round_robin.reset! }.to change { round_robin.counter_value }.from(3).to(0)
- end
- end
-end
diff --git a/spec/factories/atlassian_identities.rb b/spec/factories/atlassian_identities.rb
index 698cf4ae7ad..80420e335a9 100644
--- a/spec/factories/atlassian_identities.rb
+++ b/spec/factories/atlassian_identities.rb
@@ -3,7 +3,7 @@
FactoryBot.define do
factory :atlassian_identity, class: 'Atlassian::Identity' do
extern_uid { generate(:username) }
- user { create(:user) }
+ user { association(:user) }
expires_at { 2.weeks.from_now }
token { SecureRandom.alphanumeric(1254) }
refresh_token { SecureRandom.alphanumeric(45) }
diff --git a/spec/factories/bulk_import/trackers.rb b/spec/factories/bulk_import/trackers.rb
index 03af5b41e0f..94340b0f389 100644
--- a/spec/factories/bulk_import/trackers.rb
+++ b/spec/factories/bulk_import/trackers.rb
@@ -5,7 +5,19 @@ FactoryBot.define do
association :entity, factory: :bulk_import_entity
stage { 0 }
- relation { :relation }
has_next_page { false }
+ sequence(:pipeline_name) { |n| "pipeline_name_#{n}" }
+
+ trait :started do
+ status { 1 }
+
+ sequence(:jid) { |n| "bulk_import_entity_#{n}" }
+ end
+
+ trait :finished do
+ status { 2 }
+
+ sequence(:jid) { |n| "bulk_import_entity_#{n}" }
+ end
end
end
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 886be520668..b06d581d2c0 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -30,6 +30,21 @@ FactoryBot.define do
yaml_variables { nil }
end
+ trait :unique_name do
+ name { generate(:job_name) }
+ end
+
+ trait :dependent do
+ transient do
+ sequence(:needed_name) { |n| "dependency #{n}" }
+ needed { association(:ci_build, name: needed_name, pipeline: pipeline) }
+ end
+
+ after(:create) do |build, evaluator|
+ build.needs << create(:ci_build_need, build: build, name: evaluator.needed.name)
+ end
+ end
+
trait :started do
started_at { 'Di 29. Okt 09:51:28 CET 2013' }
end
diff --git a/spec/factories/ci/pipeline_artifacts.rb b/spec/factories/ci/pipeline_artifacts.rb
index 05ff7afed7c..3250c7abb4b 100644
--- a/spec/factories/ci/pipeline_artifacts.rb
+++ b/spec/factories/ci/pipeline_artifacts.rb
@@ -13,6 +13,22 @@ FactoryBot.define do
Rails.root.join('spec/fixtures/pipeline_artifacts/code_coverage.json'), 'application/json')
end
+ trait :checksummed do
+ verification_checksum { 'abc' }
+ end
+
+ trait :checksum_failure do
+ verification_failure { 'Could not calculate the checksum' }
+ end
+
+ trait :expired do
+ expire_at { Date.yesterday }
+ end
+
+ trait :remote_store do
+ file_store { ::ObjectStorage::Store::REMOTE}
+ end
+
trait :with_coverage_report do
file_type { :code_coverage }
diff --git a/spec/factories/ci/reports/codequality_degradations.rb b/spec/factories/ci/reports/codequality_degradations.rb
index d82157b457a..8b53f2bf46e 100644
--- a/spec/factories/ci/reports/codequality_degradations.rb
+++ b/spec/factories/ci/reports/codequality_degradations.rb
@@ -95,4 +95,47 @@ FactoryBot.define do
}.with_indifferent_access
end
end
+
+ # TODO: Use this in all other specs and remove the previous numbered factories
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/325886
+ factory :codequality_degradation, class: Hash do
+ skip_create
+
+ # Feel free to add in more configurable properties here
+ # as the need arises
+ fingerprint { SecureRandom.hex }
+ severity { "major" }
+
+ Gitlab::Ci::Reports::CodequalityReports::SEVERITY_PRIORITIES.keys.each do |s|
+ trait s.to_sym do
+ severity { s }
+ end
+ end
+
+ initialize_with do
+ {
+ "categories": [
+ "Complexity"
+ ],
+ "check_name": "argument_count",
+ "content": {
+ "body": ""
+ },
+ "description": "Avoid parameter lists longer than 5 parameters. [12/5]",
+ "fingerprint": fingerprint,
+ "location": {
+ "path": "file_a.rb",
+ "lines": {
+ "begin": 10,
+ "end": 10
+ }
+ },
+ "other_locations": [],
+ "remediation_points": 900000,
+ "severity": severity,
+ "type": "issue",
+ "engine_name": "structure"
+ }.with_indifferent_access
+ end
+ end
end
diff --git a/spec/factories/ci/test_case.rb b/spec/factories/ci/unit_test.rb
index 601a3fae970..480724f260a 100644
--- a/spec/factories/ci/test_case.rb
+++ b/spec/factories/ci/unit_test.rb
@@ -1,8 +1,10 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_test_case, class: 'Ci::TestCase' do
+ factory :ci_unit_test, class: 'Ci::UnitTest' do
project
+ suite_name { 'rspec' }
+ name { 'Math#add returns sum' }
key_hash { Digest::SHA256.hexdigest(SecureRandom.hex) }
end
end
diff --git a/spec/factories/ci/test_case_failure.rb b/spec/factories/ci/unit_test_failure.rb
index 11fb002804b..07cd3419754 100644
--- a/spec/factories/ci/test_case_failure.rb
+++ b/spec/factories/ci/unit_test_failure.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
FactoryBot.define do
- factory :ci_test_case_failure, class: 'Ci::TestCaseFailure' do
+ factory :ci_unit_test_failure, class: 'Ci::UnitTestFailure' do
build factory: :ci_build
- test_case factory: :ci_test_case
+ unit_test factory: :ci_unit_test
failed_at { Time.current }
end
end
diff --git a/spec/factories/clusters/applications/helm.rb b/spec/factories/clusters/applications/helm.rb
index 01df5cc677d..1ff1292c36e 100644
--- a/spec/factories/clusters/applications/helm.rb
+++ b/spec/factories/clusters/applications/helm.rb
@@ -4,18 +4,26 @@ FactoryBot.define do
factory :clusters_applications_helm, class: 'Clusters::Applications::Helm' do
cluster factory: %i(cluster provided_by_gcp)
- before(:create) do
- allow(Gitlab::Kubernetes::Helm::V2::Certificate).to receive(:generate_root)
- .and_return(
- double(
- key_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_key.key')),
- cert_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem'))
+ transient do
+ helm_installed { true }
+ end
+
+ before(:create) do |_record, evaluator|
+ if evaluator.helm_installed
+ allow(Gitlab::Kubernetes::Helm::V2::Certificate).to receive(:generate_root)
+ .and_return(
+ double(
+ key_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_key.key')),
+ cert_string: File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem'))
+ )
)
- )
+ end
end
- after(:create) do
- allow(Gitlab::Kubernetes::Helm::V2::Certificate).to receive(:generate_root).and_call_original
+ after(:create) do |_record, evaluator|
+ if evaluator.helm_installed
+ allow(Gitlab::Kubernetes::Helm::V2::Certificate).to receive(:generate_root).and_call_original
+ end
end
trait :not_installable do
@@ -69,19 +77,28 @@ FactoryBot.define do
status { 10 }
end
+ trait :externally_installed do
+ status { 11 }
+ end
+
trait :timed_out do
installing
updated_at { ClusterWaitForAppInstallationWorker::TIMEOUT.ago }
end
+ # Common trait used by the apps below
+ trait :no_helm_installed do
+ cluster factory: %i(cluster provided_by_gcp)
+
+ transient do
+ helm_installed { false }
+ end
+ end
+
factory :clusters_applications_ingress, class: 'Clusters::Applications::Ingress' do
modsecurity_enabled { false }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
-
trait :modsecurity_blocking do
modsecurity_enabled { true }
modsecurity_mode { :blocking }
@@ -104,62 +121,34 @@ FactoryBot.define do
factory :clusters_applications_cert_manager, class: 'Clusters::Applications::CertManager' do
email { 'admin@example.com' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_elastic_stack, class: 'Clusters::Applications::ElasticStack' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_crossplane, class: 'Clusters::Applications::Crossplane' do
stack { 'gcp' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_prometheus, class: 'Clusters::Applications::Prometheus' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_runner, class: 'Clusters::Applications::Runner' do
runner factory: %i(ci_runner)
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_knative, class: 'Clusters::Applications::Knative' do
hostname { 'example.com' }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_jupyter, class: 'Clusters::Applications::Jupyter' do
oauth_application factory: :oauth_application
cluster factory: %i(cluster with_installed_helm provided_by_gcp project)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_fluentd, class: 'Clusters::Applications::Fluentd' do
@@ -167,18 +156,10 @@ FactoryBot.define do
waf_log_enabled { true }
cilium_log_enabled { true }
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
factory :clusters_applications_cilium, class: 'Clusters::Applications::Cilium' do
cluster factory: %i(cluster with_installed_helm provided_by_gcp)
-
- trait :no_helm_installed do
- cluster factory: %i(cluster provided_by_gcp)
- end
end
end
end
diff --git a/spec/factories/clusters/integrations/prometheus.rb b/spec/factories/clusters/integrations/prometheus.rb
new file mode 100644
index 00000000000..1f0bb1ed512
--- /dev/null
+++ b/spec/factories/clusters/integrations/prometheus.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :clusters_integrations_prometheus, class: 'Clusters::Integrations::Prometheus' do
+ cluster factory: %i(cluster provided_by_gcp)
+ enabled { true }
+
+ trait :disabled do
+ enabled { false }
+ end
+ end
+end
diff --git a/spec/factories/draft_note.rb b/spec/factories/draft_note.rb
index 67a3377a39f..cde8831f169 100644
--- a/spec/factories/draft_note.rb
+++ b/spec/factories/draft_note.rb
@@ -9,17 +9,30 @@ FactoryBot.define do
transient do
line_number { 14 }
diff_refs { merge_request.try(:diff_refs) }
+ path { "files/ruby/popen.rb" }
end
position do
Gitlab::Diff::Position.new(
- old_path: "files/ruby/popen.rb",
- new_path: "files/ruby/popen.rb",
+ old_path: path,
+ new_path: path,
old_line: nil,
new_line: line_number,
diff_refs: diff_refs
)
end
+
+ factory :draft_note_on_image_diff do
+ transient do
+ path { "files/images/any_image.png" }
+ end
+
+ position do
+ association(:image_diff_position,
+ file: path,
+ diff_refs: diff_refs)
+ end
+ end
end
factory :draft_note_on_discussion, traits: [:on_discussion]
diff --git a/spec/factories/events.rb b/spec/factories/events.rb
index 6c9f1ba0137..c9e4ada3ffa 100644
--- a/spec/factories/events.rb
+++ b/spec/factories/events.rb
@@ -27,17 +27,20 @@ FactoryBot.define do
factory :wiki_page_event do
action { :created }
+ # rubocop: disable FactoryBot/InlineAssociation
+ # A persistent project is needed to have a wiki page being created properly.
project { @overrides[:wiki_page]&.container || create(:project, :wiki_repo) }
- target { create(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) }
+ # rubocop: enable FactoryBot/InlineAssociation
+ target { association(:wiki_page_meta, :for_wiki_page, wiki_page: wiki_page) }
transient do
- wiki_page { create(:wiki_page, container: project) }
+ wiki_page { association(:wiki_page, container: project) }
end
end
trait :has_design do
transient do
- design { create(:design, issue: create(:issue, project: project)) }
+ design { association(:design, issue: association(:issue, project: project)) }
end
end
@@ -45,7 +48,7 @@ FactoryBot.define do
has_design
transient do
- note { create(:note, author: author, project: project, noteable: design) }
+ note { association(:note, author: author, project: project, noteable: design) }
end
action { :commented }
diff --git a/spec/factories/git_wiki_commit_details.rb b/spec/factories/git_wiki_commit_details.rb
index b35f102fd4d..fb3f2954b12 100644
--- a/spec/factories/git_wiki_commit_details.rb
+++ b/spec/factories/git_wiki_commit_details.rb
@@ -5,7 +5,7 @@ FactoryBot.define do
skip_create
transient do
- author { create(:user) }
+ author { association(:user) }
end
sequence(:message) { |n| "Commit message #{n}" }
diff --git a/spec/factories/gitaly/commit.rb b/spec/factories/gitaly/commit.rb
index 2ed201e9aac..4e8220e449a 100644
--- a/spec/factories/gitaly/commit.rb
+++ b/spec/factories/gitaly/commit.rb
@@ -14,7 +14,7 @@ FactoryBot.define do
subject { "My commit" }
body { subject + "\nMy body" }
- author { build(:gitaly_commit_author) }
- committer { build(:gitaly_commit_author) }
+ author { association(:gitaly_commit_author) }
+ committer { association(:gitaly_commit_author) }
end
end
diff --git a/spec/factories/gitlab/database/background_migration/batched_migrations.rb b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
index b45f6ff037b..49cbdc5a8fb 100644
--- a/spec/factories/gitlab/database/background_migration/batched_migrations.rb
+++ b/spec/factories/gitlab/database/background_migration/batched_migrations.rb
@@ -9,5 +9,6 @@ FactoryBot.define do
job_class_name { 'CopyColumnUsingBackgroundMigrationJob' }
table_name { :events }
column_name { :id }
+ total_tuple_count { 10_000 }
end
end
diff --git a/spec/factories/group_group_links.rb b/spec/factories/group_group_links.rb
index 6f98886faff..2a582d8525b 100644
--- a/spec/factories/group_group_links.rb
+++ b/spec/factories/group_group_links.rb
@@ -2,8 +2,8 @@
FactoryBot.define do
factory :group_group_link do
- shared_group { create(:group) }
- shared_with_group { create(:group) }
+ shared_group { association(:group) }
+ shared_with_group { association(:group) }
group_access { Gitlab::Access::DEVELOPER }
trait(:guest) { group_access { Gitlab::Access::GUEST } }
diff --git a/spec/factories/import_export_uploads.rb b/spec/factories/import_export_uploads.rb
index 8521411e0e8..e1dd0c10ff2 100644
--- a/spec/factories/import_export_uploads.rb
+++ b/spec/factories/import_export_uploads.rb
@@ -2,6 +2,6 @@
FactoryBot.define do
factory :import_export_upload do
- project { create(:project) }
+ project { association(:project) }
end
end
diff --git a/spec/factories/packages.rb b/spec/factories/packages.rb
index 882bac1daa9..9edee735af9 100644
--- a/spec/factories/packages.rb
+++ b/spec/factories/packages.rb
@@ -16,6 +16,10 @@ FactoryBot.define do
status { :processing }
end
+ trait :error do
+ status { :error }
+ end
+
factory :maven_package do
maven_metadatum
@@ -36,8 +40,8 @@ FactoryBot.define do
package_type { :rubygems }
after :create do |package|
- create :package_file, :gem, package: package
- create :package_file, :gemspec, package: package
+ create :package_file, package.processing? ? :unprocessed_gem : :gem, package: package
+ create :package_file, :gemspec, package: package unless package.processing?
end
trait(:with_metadatum) do
diff --git a/spec/factories/packages/package_file.rb b/spec/factories/packages/package_file.rb
index 6d8b119040e..74400975670 100644
--- a/spec/factories/packages/package_file.rb
+++ b/spec/factories/packages/package_file.rb
@@ -125,6 +125,9 @@ FactoryBot.define do
trait(:source) do
file_name { 'sample_1.2.3~alpha2.tar.xz' }
+ file_md5 { 'd79b34f58f61ff4ad696d9bd0b8daa68' }
+ file_sha1 { '5f8bba5574eb01ac3b1f5e2988e8c29307788236' }
+ file_sha256 { 'b5a599e88e7cbdda3bde808160a21ba1dd1ec76b2ec8d4912aae769648d68362' }
transient do
file_metadatum_trait { :source }
@@ -133,6 +136,9 @@ FactoryBot.define do
trait(:dsc) do
file_name { 'sample_1.2.3~alpha2.dsc' }
+ file_md5 { '3b0817804f669e16cdefac583ad88f0e' }
+ file_sha1 { '32ecbd674f0bfd310df68484d87752490685a8d6' }
+ file_sha256 { '844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba' }
transient do
file_metadatum_trait { :dsc }
@@ -141,6 +147,9 @@ FactoryBot.define do
trait(:deb) do
file_name { 'libsample0_1.2.3~alpha2_amd64.deb' }
+ file_md5 { 'fb0842b21adc44207996296fe14439dd' }
+ file_sha1 { '5248b95600e85bfe7f63c0dfce330a75f5777366' }
+ file_sha256 { '1c383a525bfcba619c7305ccd106d61db501a6bbaf0003bf8d0c429fbdb7fcc1' }
transient do
file_metadatum_trait { :deb }
@@ -149,6 +158,9 @@ FactoryBot.define do
trait(:deb2) do
file_name { 'sample-dev_1.2.3~binary_amd64.deb' }
+ file_md5 { 'd2afbd28e4d74430d22f9504e18bfdf5' }
+ file_sha1 { 'f81e4f66c8c6bb899653a3340c157965ee69634f' }
+ file_sha256 { '9fbeee2191ce4dab5288fad5ecac1bd369f58fef9a992a880eadf0caf25f086d' }
transient do
file_metadatum_trait { :deb }
@@ -157,6 +169,9 @@ FactoryBot.define do
trait(:udeb) do
file_name { 'sample-udeb_1.2.3~alpha2_amd64.udeb' }
+ file_md5 { '72b1dd7d98229e2fb0355feda1d3a165' }
+ file_sha1 { 'e42e8f2fe04ed1bb73b44a187674480d0e49dcba' }
+ file_sha256 { '2b0c152b3ab4cc07663350424de972c2b7621d69fe6df2e0b94308a191e4632f' }
transient do
file_metadatum_trait { :udeb }
@@ -165,6 +180,9 @@ FactoryBot.define do
trait(:buildinfo) do
file_name { 'sample_1.2.3~alpha2_amd64.buildinfo' }
+ file_md5 { '4e085dd67c120ca967ec314f65770a42' }
+ file_sha1 { '0d47e899f3cc67a2253a4629456ff927e0db5c60' }
+ file_sha256 { 'f9900d3c94e94b329232668dcbef3dba2d96c07147b15b6dc0533452e4dd8a43' }
transient do
file_metadatum_trait { :buildinfo }
@@ -229,6 +247,14 @@ FactoryBot.define do
size { 4.kilobytes }
end
+ trait(:unprocessed_gem) do
+ package
+ file_fixture { 'spec/fixtures/packages/rubygems/package.gem' }
+ file_name { 'package.gem' }
+ file_sha1 { '5fe852b2a6abd96c22c11fa1ff2fb19d9ce58b57' }
+ size { 4.kilobytes }
+ end
+
trait(:gemspec) do
package
file_fixture { 'spec/fixtures/packages/rubygems/package.gemspec' }
diff --git a/spec/factories/sequences.rb b/spec/factories/sequences.rb
index f9952cd9966..b276e6f8cfc 100644
--- a/spec/factories/sequences.rb
+++ b/spec/factories/sequences.rb
@@ -19,4 +19,5 @@ FactoryBot.define do
sequence(:wip_title) { |n| "WIP: #{n}" }
sequence(:jira_title) { |n| "[PROJ-#{n}]: fix bug" }
sequence(:jira_branch) { |n| "feature/PROJ-#{n}" }
+ sequence(:job_name) { |n| "job #{n}" }
end
diff --git a/spec/factories/services.rb b/spec/factories/services.rb
index 18d3b2d99b7..25ef75880bb 100644
--- a/spec/factories/services.rb
+++ b/spec/factories/services.rb
@@ -56,20 +56,24 @@ FactoryBot.define do
api_url { nil }
username { 'jira_username' }
password { 'jira_password' }
+ jira_issue_transition_automatic { false }
jira_issue_transition_id { '56-1' }
issues_enabled { false }
project_key { nil }
vulnerabilities_enabled { false }
vulnerabilities_issuetype { nil }
+ deployment_type { 'cloud' }
end
before(:create) do |service, evaluator|
if evaluator.create_data
create(:jira_tracker_data, service: service,
- url: evaluator.url, api_url: evaluator.api_url, jira_issue_transition_id: evaluator.jira_issue_transition_id,
+ url: evaluator.url, api_url: evaluator.api_url,
+ jira_issue_transition_automatic: evaluator.jira_issue_transition_automatic,
+ jira_issue_transition_id: evaluator.jira_issue_transition_id,
username: evaluator.username, password: evaluator.password, issues_enabled: evaluator.issues_enabled,
project_key: evaluator.project_key, vulnerabilities_enabled: evaluator.vulnerabilities_enabled,
- vulnerabilities_issuetype: evaluator.vulnerabilities_issuetype
+ vulnerabilities_issuetype: evaluator.vulnerabilities_issuetype, deployment_type: evaluator.deployment_type
)
end
end
diff --git a/spec/factories/subscriptions.rb b/spec/factories/subscriptions.rb
index 2b652cd57bf..8ba96d9fa29 100644
--- a/spec/factories/subscriptions.rb
+++ b/spec/factories/subscriptions.rb
@@ -5,5 +5,11 @@ FactoryBot.define do
project
user { project.creator }
subscribable factory: :issue
+
+ trait :group_label do
+ project { nil }
+ user { association(:user) }
+ subscribable factory: :group_label
+ end
end
end
diff --git a/spec/factories/timelogs.rb b/spec/factories/timelogs.rb
index 5d34acc635d..204cb808c8e 100644
--- a/spec/factories/timelogs.rb
+++ b/spec/factories/timelogs.rb
@@ -1,11 +1,22 @@
# frozen_string_literal: true
-# Read about factories at https://github.com/thoughtbot/factory_bot
-
FactoryBot.define do
factory :timelog do
time_spent { 3600 }
- issue
- user { issue.project.creator }
+ for_issue
+
+ factory :issue_timelog, traits: [:for_issue]
+ factory :merge_request_timelog, traits: [:for_merge_request]
+
+ trait :for_issue do
+ issue
+ user { issue.author }
+ end
+
+ trait :for_merge_request do
+ merge_request
+ issue { nil }
+ user { merge_request.author }
+ end
end
end
diff --git a/spec/factories/users/in_product_marketing_email.rb b/spec/factories/users/in_product_marketing_email.rb
new file mode 100644
index 00000000000..c86c469ff31
--- /dev/null
+++ b/spec/factories/users/in_product_marketing_email.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :in_product_marketing_email, class: 'Users::InProductMarketingEmail' do
+ user
+
+ track { 'create' }
+ series { 0 }
+ end
+end
diff --git a/spec/factories_spec.rb b/spec/factories_spec.rb
index 56d643d0cc9..787e0540fda 100644
--- a/spec/factories_spec.rb
+++ b/spec/factories_spec.rb
@@ -32,7 +32,8 @@ RSpec.describe 'factories' do
[:project_member, :blocked],
[:project, :remote_mirror],
[:remote_mirror, :ssh],
- [:user_preference, :only_comments]
+ [:user_preference, :only_comments],
+ [:ci_pipeline_artifact, :remote_store]
]
end
@@ -43,7 +44,7 @@ RSpec.describe 'factories' do
end
it 'does not raise error when created' do
- expect { create(factory.name) }.not_to raise_error
+ expect { create(factory.name) }.not_to raise_error # rubocop:disable Rails/SaveBang
end
factory.definition.defined_traits.map(&:name).each do |trait_name|
@@ -65,14 +66,23 @@ RSpec.describe 'factories' do
# associations must be unique and cannot be reused, or the factory default
# is being mutated.
skip_factory_defaults = %i[
+ evidence
+ exported_protected_branch
fork_network_member
group_member
import_state
+ milestone_release
namespace
project_broken_repo
prometheus_alert
prometheus_alert_event
prometheus_metric
+ protected_branch
+ protected_branch_merge_access_level
+ protected_branch_push_access_level
+ protected_tag
+ release
+ release_link
self_managed_prometheus_alert_event
users_star_project
wiki_page
diff --git a/spec/features/admin/admin_abuse_reports_spec.rb b/spec/features/admin/admin_abuse_reports_spec.rb
index 192182adddc..3a02ce89aa9 100644
--- a/spec/features/admin/admin_abuse_reports_spec.rb
+++ b/spec/features/admin/admin_abuse_reports_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe "Admin::AbuseReports", :js do
describe 'filtering by user' do
let!(:user2) { create(:user) }
- let!(:abuse_report) { create(:abuse_report, user: user) }
+ let!(:abuse_report) { create(:abuse_report, user: user) }
let!(:abuse_report_2) { create(:abuse_report, user: user2) }
it 'shows only single user report' do
diff --git a/spec/features/admin/admin_appearance_spec.rb b/spec/features/admin/admin_appearance_spec.rb
index cd136af8d69..61e7efbc56c 100644
--- a/spec/features/admin/admin_appearance_spec.rb
+++ b/spec/features/admin/admin_appearance_spec.rb
@@ -66,7 +66,7 @@ RSpec.describe 'Admin Appearance' do
context 'when system header and footer messages are not empty' do
before do
- appearance.update(header_message: 'Foo', footer_message: 'Bar')
+ appearance.update!(header_message: 'Foo', footer_message: 'Bar')
end
it 'shows custom system header and footer fields' do
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index bbdf2f7f4a9..e7634f4e020 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe 'Admin Groups' do
expect(page).to have_field('group_path')
expect(page).to have_field('group_visibility_level_0')
expect(page).to have_field('description')
+ expect(page).to have_field('group_admin_note_attributes_note')
end
end
@@ -47,10 +48,12 @@ RSpec.describe 'Admin Groups' do
path_component = 'gitlab'
group_name = 'GitLab group name'
group_description = 'Description of group for GitLab'
+ group_admin_note = 'A note about this group by an admin'
fill_in 'group_path', with: path_component
fill_in 'group_name', with: group_name
fill_in 'group_description', with: group_description
+ fill_in 'group_admin_note_attributes_note', with: group_admin_note
click_button "Create group"
expect(current_path).to eq admin_group_path(Group.find_by(path: path_component))
@@ -61,6 +64,8 @@ RSpec.describe 'Admin Groups' do
expect(li_texts).to match group_name
expect(li_texts).to match path_component
expect(li_texts).to match group_description
+ p_texts = content.all('p').collect(&:text).join('/n')
+ expect(p_texts).to match group_admin_note
end
it 'shows the visibility level radio populated with the default value' do
@@ -116,6 +121,16 @@ RSpec.describe 'Admin Groups' do
expect(page).to have_link(group.name, href: group_path(group))
end
+
+ it 'has a note if one is available' do
+ group = create(:group, :private)
+ note_text = 'A group administrator note'
+ group.update!(admin_note_attributes: { note: note_text })
+
+ visit admin_group_path(group)
+
+ expect(page).to have_text(note_text)
+ end
end
describe 'group edit' do
@@ -145,6 +160,36 @@ RSpec.describe 'Admin Groups' do
expect(name_field.value).to eq original_name
end
+
+ it 'adding an admin note to group without one' do
+ group = create(:group, :private)
+ expect(group.admin_note).to be_nil
+
+ visit admin_group_edit_path(group)
+ admin_note_text = 'A note by an administrator'
+
+ fill_in 'group_admin_note_attributes_note', with: admin_note_text
+ click_button 'Save changes'
+
+ expect(page).to have_content(admin_note_text)
+ end
+
+ it 'editing an existing group admin note' do
+ admin_note_text = 'A note by an administrator'
+ new_admin_note_text = 'A new note by an administrator'
+ group = create(:group, :private)
+ group.create_admin_note(note: admin_note_text)
+
+ visit admin_group_edit_path(group)
+
+ admin_note_field = find('#group_admin_note_attributes_note')
+ expect(admin_note_field.value).to eq(admin_note_text)
+
+ fill_in 'group_admin_note_attributes_note', with: new_admin_note_text
+ click_button 'Save changes'
+
+ expect(page).to have_content(new_admin_note_text)
+ end
end
describe 'add user into a group', :js do
diff --git a/spec/features/admin/admin_labels_spec.rb b/spec/features/admin/admin_labels_spec.rb
index 815a73b1450..43fb1f31a0f 100644
--- a/spec/features/admin/admin_labels_spec.rb
+++ b/spec/features/admin/admin_labels_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe 'admin issues labels' do
- let!(:bug_label) { Label.create(title: 'bug', template: true) }
- let!(:feature_label) { Label.create(title: 'feature', template: true) }
+ let!(:bug_label) { Label.create!(title: 'bug', template: true) }
+ let!(:feature_label) { Label.create!(title: 'feature', template: true) }
before do
admin = create(:admin)
@@ -36,7 +36,7 @@ RSpec.describe 'admin issues labels' do
it 'deletes all labels', :js do
page.within '.labels' do
- page.all('.remove-row').each do |remove|
+ page.all('.js-remove-row').each do |remove|
accept_confirm { remove.click }
wait_for_requests
end
diff --git a/spec/features/admin/admin_mode/login_spec.rb b/spec/features/admin/admin_mode/login_spec.rb
index f1dee075925..5b2dfdb2941 100644
--- a/spec/features/admin/admin_mode/login_spec.rb
+++ b/spec/features/admin/admin_mode/login_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe 'Admin Mode Login' do
expect(codes.size).to eq 10
# Ensure the generated codes get saved
- user.save
+ user.save!
end
context 'with valid code' do
diff --git a/spec/features/admin/admin_mode/logout_spec.rb b/spec/features/admin/admin_mode/logout_spec.rb
index b7fa59bbfb7..8cfac5d8b99 100644
--- a/spec/features/admin/admin_mode/logout_spec.rb
+++ b/spec/features/admin/admin_mode/logout_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe 'Admin Mode Logout', :js do
let(:user) { create(:admin) }
before do
+ stub_feature_flags(combined_menu: false)
+
gitlab_sign_in(user)
gitlab_enable_admin_mode_sign_in(user)
visit admin_root_path
diff --git a/spec/features/admin/admin_mode_spec.rb b/spec/features/admin/admin_mode_spec.rb
index 8169b3a20db..633de20c82d 100644
--- a/spec/features/admin/admin_mode_spec.rb
+++ b/spec/features/admin/admin_mode_spec.rb
@@ -9,10 +9,12 @@ RSpec.describe 'Admin mode' do
let(:admin) { create(:admin) }
before do
+ stub_feature_flags(combined_menu: false)
+
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
- context 'feature flag :user_mode_in_session is enabled', :request_store do
+ context 'application setting :admin_mode is enabled', :request_store do
before do
sign_in(admin)
end
@@ -155,9 +157,9 @@ RSpec.describe 'Admin mode' do
end
end
- context 'feature flag :user_mode_in_session is disabled' do
+ context 'application setting :admin_mode is disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
sign_in(admin)
end
diff --git a/spec/features/admin/admin_runners_spec.rb b/spec/features/admin/admin_runners_spec.rb
index 4f135b81bdf..4e0dcbdf075 100644
--- a/spec/features/admin/admin_runners_spec.rb
+++ b/spec/features/admin/admin_runners_spec.rb
@@ -201,21 +201,21 @@ RSpec.describe "Admin Runners" do
visit admin_runners_path
- within '.runners-content .gl-responsive-table-row:nth-child(2)' do
+ within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(2)' do
expect(page).to have_content 'runner-2'
end
- within '.runners-content .gl-responsive-table-row:nth-child(3)' do
+ within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(3)' do
expect(page).to have_content 'runner-1'
end
sorting_by 'Last Contact'
- within '.runners-content .gl-responsive-table-row:nth-child(2)' do
+ within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(2)' do
expect(page).to have_content 'runner-1'
end
- within '.runners-content .gl-responsive-table-row:nth-child(3)' do
+ within '[data-testid="runners-table"] .gl-responsive-table-row:nth-child(3)' do
expect(page).to have_content 'runner-2'
end
end
@@ -285,8 +285,16 @@ RSpec.describe "Admin Runners" do
end
describe 'runner page breadcrumbs' do
- it 'contains the current runner’s short sha' do
- expect(page.find('h2')).to have_content(runner.short_sha)
+ it 'contains the current runner token' do
+ page.within '[data-testid="breadcrumb-links"]' do
+ expect(page.find('h2')).to have_content(runner.short_sha)
+ end
+ end
+ end
+
+ describe 'runner page title', :js do
+ it 'contains the runner id' do
+ expect(find('.page-title')).to have_content("Runner ##{runner.id}")
end
end
@@ -313,11 +321,11 @@ RSpec.describe "Admin Runners" do
describe 'enable/create' do
shared_examples 'assignable runner' do
it 'enables a runner for a project' do
- within '.unassigned-projects' do
+ within '[data-testid="unassigned-projects"]' do
click_on 'Enable'
end
- assigned_project = page.find('.assigned-projects')
+ assigned_project = page.find('[data-testid="assigned-projects"]')
expect(assigned_project).to have_content(@project2.path)
end
@@ -347,7 +355,7 @@ RSpec.describe "Admin Runners" do
let(:runner) { create(:ci_runner, :instance) }
before do
- @project1.destroy
+ @project1.destroy!
visit admin_runner_path(runner)
end
@@ -363,11 +371,11 @@ RSpec.describe "Admin Runners" do
end
it 'enables specific runner for project' do
- within '.assigned-projects' do
+ within '[data-testid="assigned-projects"]' do
click_on 'Disable'
end
- new_runner_project = page.find('.unassigned-projects')
+ new_runner_project = page.find('[data-testid="unassigned-projects"]')
expect(new_runner_project).to have_content(@project1.path)
end
diff --git a/spec/features/admin/admin_search_settings_spec.rb b/spec/features/admin/admin_search_settings_spec.rb
index a78d17a6651..cd61a1db6f3 100644
--- a/spec/features/admin/admin_search_settings_spec.rb
+++ b/spec/features/admin/admin_search_settings_spec.rb
@@ -20,8 +20,10 @@ RSpec.describe 'Admin searches application settings', :js do
end
context 'in ci/cd settings page' do
- let(:visit_path) { ci_cd_admin_application_settings_path }
+ before do
+ visit(ci_cd_admin_application_settings_path)
+ end
- it_behaves_like 'can search settings with feature flag check', 'Variables', 'Package Registry'
+ it_behaves_like 'can search settings', 'Variables', 'Package Registry'
end
end
diff --git a/spec/features/admin/admin_sees_project_statistics_spec.rb b/spec/features/admin/admin_sees_project_statistics_spec.rb
index be781730924..3433cc01b8e 100644
--- a/spec/features/admin/admin_sees_project_statistics_spec.rb
+++ b/spec/features/admin/admin_sees_project_statistics_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe "Admin > Admin sees project statistics" do
end
context 'when project has no statistics' do
- let(:project) { create(:project, :repository) { |project| project.statistics.destroy } }
+ let(:project) { create(:project, :repository) { |project| project.statistics.destroy! } }
it "shows 'Storage: Unknown'" do
expect(page).to have_content("Storage: Unknown")
diff --git a/spec/features/admin/admin_sees_projects_statistics_spec.rb b/spec/features/admin/admin_sees_projects_statistics_spec.rb
index 2e96814d1e9..d340eb47f34 100644
--- a/spec/features/admin/admin_sees_projects_statistics_spec.rb
+++ b/spec/features/admin/admin_sees_projects_statistics_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe "Admin > Admin sees projects statistics" do
before do
create(:project, :repository)
- create(:project, :repository) { |project| project.statistics.destroy }
+ create(:project, :repository) { |project| project.statistics.destroy! }
sign_in(current_user)
gitlab_enable_admin_mode_sign_in(current_user)
diff --git a/spec/features/admin/admin_settings_spec.rb b/spec/features/admin/admin_settings_spec.rb
index 249621f5835..9a2e2eb2f6f 100644
--- a/spec/features/admin/admin_settings_spec.rb
+++ b/spec/features/admin/admin_settings_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe 'Admin updates settings' do
let(:admin) { create(:admin) }
- context 'feature flag :user_mode_in_session is enabled', :request_store do
+ context 'application setting :admin_mode is enabled', :request_store do
before do
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
sign_in(admin)
@@ -129,7 +129,7 @@ RSpec.describe 'Admin updates settings' do
context 'Change Sign-up restrictions' do
context 'Require Admin approval for new signup setting' do
- it 'changes the setting' do
+ it 'changes the setting', :js do
page.within('.as-signup') do
check 'Require admin approval for new sign-ups'
click_button 'Save changes'
@@ -249,6 +249,14 @@ RSpec.describe 'Admin updates settings' do
expect(page).to have_content "Application settings saved successfully"
expect(current_settings.hide_third_party_offers).to be true
end
+ end
+
+ context 'when the Slack Notifications Service template is active' do
+ before do
+ create(:service, :template, type: 'SlackService', active: true)
+
+ visit general_admin_application_settings_path
+ end
it 'change Slack Notifications Service template settings', :js do
first(:link, 'Service Templates').click
@@ -588,7 +596,7 @@ RSpec.describe 'Admin updates settings' do
context 'Nav bar' do
it 'shows default help links in nav' do
- default_support_url = 'https://about.gitlab.com/getting-help/'
+ default_support_url = "https://#{ApplicationHelper.promo_host}/getting-help/"
visit root_dashboard_path
@@ -615,9 +623,9 @@ RSpec.describe 'Admin updates settings' do
end
end
- context 'feature flag :user_mode_in_session is disabled' do
+ context 'application setting :admin_mode is disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
diff --git a/spec/features/admin/admin_users_impersonation_tokens_spec.rb b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
index cae190e76b0..dc528dd92d4 100644
--- a/spec/features/admin/admin_users_impersonation_tokens_spec.rb
+++ b/spec/features/admin/admin_users_impersonation_tokens_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe 'Admin > Users > Impersonation Tokens', :js do
end
it "removes expired tokens from 'active' section" do
- impersonation_token.update(expires_at: 5.days.ago)
+ impersonation_token.update!(expires_at: 5.days.ago)
visit admin_user_impersonation_tokens_path(user_id: user.username)
diff --git a/spec/features/admin/services/admin_activates_prometheus_spec.rb b/spec/features/admin/services/admin_activates_prometheus_spec.rb
deleted file mode 100644
index a225de365c8..00000000000
--- a/spec/features/admin/services/admin_activates_prometheus_spec.rb
+++ /dev/null
@@ -1,24 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Admin activates Prometheus', :js do
- let(:admin) { create(:user, :admin) }
-
- before do
- sign_in(admin)
- gitlab_enable_admin_mode_sign_in(admin)
-
- visit(admin_application_settings_services_path)
-
- click_link('Prometheus')
- end
-
- it 'activates service' do
- check('Active')
- fill_in('API URL', with: 'http://prometheus.example.com')
- click_button('Save changes')
-
- expect(page).to have_content('Application settings saved successfully')
- end
-end
diff --git a/spec/features/admin/services/admin_visits_service_templates_spec.rb b/spec/features/admin/services/admin_visits_service_templates_spec.rb
index 563bca8b32f..1fd8c8316e3 100644
--- a/spec/features/admin/services/admin_visits_service_templates_spec.rb
+++ b/spec/features/admin/services/admin_visits_service_templates_spec.rb
@@ -9,23 +9,45 @@ RSpec.describe 'Admin visits service templates' do
before do
sign_in(admin)
gitlab_enable_admin_mode_sign_in(admin)
-
- visit(admin_application_settings_services_path)
end
- context 'without instance-level integration' do
- it 'shows a link to service template' do
- expect(page).to have_link('Slack', href: edit_admin_application_settings_service_path(slack_service.id))
- expect(page).not_to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_service))
+ context 'without an active service template' do
+ before do
+ visit(admin_application_settings_services_path)
+ end
+
+ it 'does not show service template content' do
+ expect(page).not_to have_content('Service template allows you to set default values for integrations')
end
end
- context 'with instance-level integration' do
- let_it_be(:slack_instance_integration) { create(:slack_service, instance: true, project: nil) }
+ context 'with an active service template' do
+ before do
+ create(:slack_service, :template, active: true)
+ visit(admin_application_settings_services_path)
+ end
+
+ it 'shows service template content' do
+ expect(page).to have_content('Service template allows you to set default values for integrations')
+ end
+
+ context 'without instance-level integration' do
+ it 'shows a link to service template' do
+ expect(page).to have_link('Slack', href: edit_admin_application_settings_service_path(slack_service.id))
+ expect(page).not_to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_service))
+ end
+ end
+
+ context 'with instance-level integration' do
+ before do
+ create(:slack_service, instance: true, project: nil)
+ visit(admin_application_settings_services_path)
+ end
- it 'shows a link to instance-level integration' do
- expect(page).not_to have_link('Slack', href: edit_admin_application_settings_service_path(slack_service.id))
- expect(page).to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_service))
+ it 'shows a link to instance-level integration' do
+ expect(page).not_to have_link('Slack', href: edit_admin_application_settings_service_path(slack_service.id))
+ expect(page).to have_link('Slack', href: edit_admin_application_settings_integration_path(slack_service))
+ end
end
end
end
diff --git a/spec/features/alerts_settings/user_views_alerts_settings_spec.rb b/spec/features/alerts_settings/user_views_alerts_settings_spec.rb
index 60f2f776595..6675abd6b42 100644
--- a/spec/features/alerts_settings/user_views_alerts_settings_spec.rb
+++ b/spec/features/alerts_settings/user_views_alerts_settings_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Alert integrations settings form', :js do
it 'shows the alerts setting form title' do
page.within('#js-alert-management-settings') do
- expect(find('h4')).to have_content('Alerts')
+ expect(find('h4')).to have_content('Alert integrations')
end
end
diff --git a/spec/features/boards/add_issues_modal_spec.rb b/spec/features/boards/add_issues_modal_spec.rb
deleted file mode 100644
index 8d0fa3e023b..00000000000
--- a/spec/features/boards/add_issues_modal_spec.rb
+++ /dev/null
@@ -1,270 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Issue Boards add issue modal', :js do
- let(:project) { create(:project, :public) }
- let(:board) { create(:board, project: project) }
- let(:user) { create(:user) }
- let!(:planning) { create(:label, project: project, name: 'Planning') }
- let!(:label) { create(:label, project: project) }
- let!(:list1) { create(:list, board: board, label: planning, position: 0) }
- let!(:list2) { create(:list, board: board, label: label, position: 1) }
- let!(:issue) { create(:issue, project: project, title: 'abc', description: 'def') }
- let!(:issue2) { create(:issue, project: project, title: 'hij', description: 'klm') }
-
- before do
- project.add_maintainer(user)
-
- sign_in(user)
-
- visit project_board_path(project, board)
- wait_for_requests
- end
-
- it 'resets filtered search state' do
- visit project_board_path(project, board, search: 'testing')
-
- wait_for_requests
-
- click_button('Add issues')
-
- page.within('.add-issues-modal') do
- expect(find('.form-control').value).to eq('')
- expect(page).to have_selector('.clear-search', visible: false)
- expect(find('.form-control')[:placeholder]).to eq('Search or filter results...')
- end
- end
-
- context 'modal interaction' do
- before do
- stub_feature_flags(add_issues_button: true)
- end
-
- it 'opens modal' do
- click_button('Add issues')
-
- expect(page).to have_selector('.add-issues-modal')
- end
-
- it 'closes modal' do
- click_button('Add issues')
-
- page.within('.add-issues-modal') do
- find('.close').click
- end
-
- expect(page).not_to have_selector('.add-issues-modal')
- end
-
- it 'closes modal if cancel button clicked' do
- click_button('Add issues')
-
- page.within('.add-issues-modal') do
- click_button 'Cancel'
- end
-
- expect(page).not_to have_selector('.add-issues-modal')
- end
-
- it 'does not show tooltip on add issues button' do
- button = page.find('.filter-dropdown-container button', text: 'Add issues')
-
- expect(button[:title]).not_to eq("Please add a list to your board first")
- end
- end
-
- context 'issues list' do
- before do
- stub_feature_flags(add_issues_button: true)
- click_button('Add issues')
-
- wait_for_requests
- end
-
- it 'loads issues' do
- page.within('.add-issues-modal') do
- page.within('.gl-tabs') do
- expect(page).to have_content('2')
- end
-
- expect(page).to have_selector('.board-card', count: 2)
- end
- end
-
- it 'shows selected issues tab and empty state message' do
- page.within('.add-issues-modal') do
- click_link 'Selected issues'
-
- expect(page).not_to have_selector('.board-card')
- expect(page).to have_content("Go back to Open issues and select some issues to add to your board.")
- end
- end
-
- context 'list dropdown' do
- it 'resets after deleting list' do
- page.within('.add-issues-modal') do
- expect(find('.add-issues-footer')).to have_button(planning.title)
-
- click_button 'Cancel'
- end
-
- page.within(find('.board:nth-child(2)')) do
- find('button[title="List settings"]').click
- end
-
- page.within(find('.js-board-settings-sidebar')) do
- accept_confirm { find('[data-testid="remove-list"]').click }
- end
-
- click_button('Add issues')
-
- wait_for_requests
-
- page.within('.add-issues-modal') do
- expect(find('.add-issues-footer')).not_to have_button(planning.title)
- expect(find('.add-issues-footer')).to have_button(label.title)
- end
- end
- end
-
- context 'search' do
- it 'returns issues' do
- page.within('.add-issues-modal') do
- find('.form-control').native.send_keys(issue.title)
- find('.form-control').native.send_keys(:enter)
-
- wait_for_requests
-
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
-
- it 'returns no issues' do
- page.within('.add-issues-modal') do
- find('.form-control').native.send_keys('testing search')
- find('.form-control').native.send_keys(:enter)
-
- wait_for_requests
-
- expect(page).not_to have_selector('.board-card')
- expect(page).not_to have_content("You haven't added any issues to your project yet")
- end
- end
- end
-
- context 'selecting issues' do
- it 'selects single issue' do
- page.within('.add-issues-modal') do
- first('.board-card .board-card-number').click
-
- page.within('.gl-tabs') do
- expect(page).to have_content('Selected issues 1')
- end
- end
- end
-
- it 'changes button text' do
- page.within('.add-issues-modal') do
- first('.board-card .board-card-number').click
-
- expect(first('.add-issues-footer .btn')).to have_content('Add 1 issue')
- end
- end
-
- it 'changes button text with plural' do
- page.within('.add-issues-modal') do
- all('.board-card .js-board-card-number-container').each do |el|
- el.click
- end
-
- expect(first('.add-issues-footer .btn')).to have_content('Add 2 issues')
- end
- end
-
- it 'shows only selected issues on selected tab' do
- page.within('.add-issues-modal') do
- first('.board-card .board-card-number').click
-
- click_link 'Selected issues'
-
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
-
- it 'selects all issues' do
- page.within('.add-issues-modal') do
- click_button 'Select all'
-
- expect(page).to have_selector('.is-active', count: 2)
- end
- end
-
- it 'deselects all issues' do
- page.within('.add-issues-modal') do
- click_button 'Select all'
-
- expect(page).to have_selector('.is-active', count: 2)
-
- click_button 'Deselect all'
-
- expect(page).not_to have_selector('.is-active')
- end
- end
-
- it "selects all that aren't already selected" do
- page.within('.add-issues-modal') do
- first('.board-card .board-card-number').click
-
- expect(page).to have_selector('.is-active', count: 1)
-
- click_button 'Select all'
-
- expect(page).to have_selector('.is-active', count: 2)
- end
- end
-
- it 'unselects from selected tab' do
- page.within('.add-issues-modal') do
- first('.board-card .board-card-number').click
-
- click_link 'Selected issues'
-
- first('.board-card .board-card-number').click
-
- expect(page).not_to have_selector('.is-active')
- end
- end
- end
-
- context 'adding issues' do
- it 'adds to board' do
- page.within('.add-issues-modal') do
- first('.board-card .board-card-number').click
-
- click_button 'Add 1 issue'
- end
-
- page.within(find('.board:nth-child(2)')) do
- expect(page).to have_selector('.board-card')
- end
- end
-
- it 'adds to second list' do
- page.within('.add-issues-modal') do
- first('.board-card .board-card-number').click
-
- click_button planning.title
-
- click_link label.title
-
- click_button 'Add 1 issue'
- end
-
- page.within(find('.board:nth-child(3)')) do
- expect(page).to have_selector('.board-card')
- end
- end
- end
- end
-end
diff --git a/spec/features/boards/boards_spec.rb b/spec/features/boards/boards_spec.rb
index 2392f9d2f8a..ab544022bff 100644
--- a/spec/features/boards/boards_spec.rb
+++ b/spec/features/boards/boards_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Issue Boards', :js do
+RSpec.describe 'Project issue boards', :js do
include DragTo
include MobileHelpers
@@ -23,7 +23,7 @@ RSpec.describe 'Issue Boards', :js do
context 'no lists' do
before do
- visit project_board_path(project, board)
+ visit_project_board_path_without_query_limit(project, board)
end
it 'creates default lists' do
@@ -52,6 +52,7 @@ RSpec.describe 'Issue Boards', :js do
let_it_be(:a_plus) { create(:label, project: project, name: 'A+') }
let_it_be(:list1) { create(:list, board: board, label: planning, position: 0) }
let_it_be(:list2) { create(:list, board: board, label: development, position: 1) }
+ let_it_be(:backlog_list) { create(:backlog_list, board: board) }
let_it_be(:confidential_issue) { create(:labeled_issue, :confidential, project: project, author: user, labels: [planning], relative_position: 9) }
let_it_be(:issue1) { create(:labeled_issue, project: project, title: 'aaa', description: '111', assignees: [user], labels: [planning], relative_position: 8) }
@@ -68,7 +69,7 @@ RSpec.describe 'Issue Boards', :js do
before do
stub_feature_flags(board_new_list: false)
- visit project_board_path(project, board)
+ visit_project_board_path_without_query_limit(project, board)
wait_for_requests
@@ -121,7 +122,8 @@ RSpec.describe 'Issue Boards', :js do
context 'with the NOT queries feature flag disabled' do
before do
stub_feature_flags(not_issuable_queries: false)
- visit project_board_path(project, board)
+
+ visit_project_board_path_without_query_limit(project, board)
end
it 'does not have the != option' do
@@ -141,7 +143,8 @@ RSpec.describe 'Issue Boards', :js do
context 'with the NOT queries feature flag enabled' do
before do
stub_feature_flags(not_issuable_queries: true)
- visit project_board_path(project, board)
+
+ visit_project_board_path_without_query_limit(project, board)
end
it 'does not have the != option' do
@@ -171,8 +174,7 @@ RSpec.describe 'Issue Boards', :js do
it 'infinite scrolls list' do
create_list(:labeled_issue, 50, project: project, labels: [planning])
- visit project_board_path(project, board)
- wait_for_requests
+ visit_project_board_path_without_query_limit(project, board)
page.within(find('.board:nth-child(2)')) do
expect(page.find('.board-header')).to have_content('58')
@@ -180,15 +182,19 @@ RSpec.describe 'Issue Boards', :js do
expect(page).to have_content('Showing 20 of 58 issues')
find('.board .board-list')
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
- wait_for_requests
+
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
expect(page).to have_selector('.board-card', count: 40)
expect(page).to have_content('Showing 40 of 58 issues')
find('.board .board-list')
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
- wait_for_requests
+
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
expect(page).to have_selector('.board-card', count: 58)
expect(page).to have_content('Showing all issues')
@@ -236,13 +242,13 @@ RSpec.describe 'Issue Boards', :js do
wait_for_board_cards(4, 1)
expect(find('.board:nth-child(2)')).to have_content(development.title)
- expect(find('.board:nth-child(2)')).to have_content(planning.title)
+ expect(find('.board:nth-child(3)')).to have_content(planning.title)
# Make sure list positions are preserved after a reload
- visit project_board_path(project, board)
+ visit_project_board_path_without_query_limit(project, board)
expect(find('.board:nth-child(2)')).to have_content(development.title)
- expect(find('.board:nth-child(2)')).to have_content(planning.title)
+ expect(find('.board:nth-child(3)')).to have_content(planning.title)
end
it 'dragging does not duplicate list' do
@@ -254,7 +260,8 @@ RSpec.describe 'Issue Boards', :js do
expect(page).to have_selector(selector, text: development.title, count: 1)
end
- it 'issue moves between lists and does not show the "Development" label since the card is in the "Development" list label' do
+ # TODO https://gitlab.com/gitlab-org/gitlab/-/issues/323551
+ xit 'issue moves between lists and does not show the "Development" label since the card is in the "Development" list label' do
drag(list_from_index: 1, from_index: 1, list_to_index: 2)
wait_for_board_cards(2, 7)
@@ -467,14 +474,16 @@ RSpec.describe 'Issue Boards', :js do
end
it 'removes filtered labels' do
- set_filter("label", testing.title)
- click_filter_link(testing.title)
- submit_filter
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ set_filter("label", testing.title)
+ click_filter_link(testing.title)
+ submit_filter
- wait_for_board_cards(2, 1)
+ wait_for_board_cards(2, 1)
- find('.clear-search').click
- submit_filter
+ find('.clear-search').click
+ submit_filter
+ end
wait_for_board_cards(2, 8)
end
@@ -484,7 +493,9 @@ RSpec.describe 'Issue Boards', :js do
set_filter("label", testing.title)
click_filter_link(testing.title)
- submit_filter
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ submit_filter
+ end
wait_for_requests
@@ -494,13 +505,18 @@ RSpec.describe 'Issue Boards', :js do
expect(page).to have_content('Showing 20 of 51 issues')
find('.board .board-list')
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
expect(page).to have_selector('.board-card', count: 40)
expect(page).to have_content('Showing 40 of 51 issues')
find('.board .board-list')
- evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ evaluate_script("document.querySelectorAll('.board .board-list')[1].scrollTop = document.querySelectorAll('.board .board-list')[1].scrollHeight")
+ end
expect(page).to have_selector('.board-card', count: 51)
expect(page).to have_content('Showing all issues')
@@ -569,7 +585,7 @@ RSpec.describe 'Issue Boards', :js do
context 'keyboard shortcuts' do
before do
- visit project_board_path(project, board)
+ visit_project_board_path_without_query_limit(project, board)
wait_for_requests
end
@@ -617,15 +633,19 @@ RSpec.describe 'Issue Boards', :js do
def drag(selector: '.board-list', list_from_index: 0, from_index: 0, to_index: 0, list_to_index: 0, perform_drop: true)
# ensure there is enough horizontal space for four boards
- resize_window(2000, 800)
-
- drag_to(selector: selector,
- scrollable: '#board-app',
- list_from_index: list_from_index,
- from_index: from_index,
- to_index: to_index,
- list_to_index: list_to_index,
- perform_drop: perform_drop)
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ resize_window(2000, 800)
+
+ drag_to(selector: selector,
+ scrollable: '#board-app',
+ list_from_index: list_from_index,
+ from_index: from_index,
+ to_index: to_index,
+ list_to_index: list_to_index,
+ perform_drop: perform_drop)
+ end
+
+ wait_for_requests
end
def wait_for_board_cards(board_number, expected_cards)
@@ -666,4 +686,10 @@ RSpec.describe 'Issue Boards', :js do
accept_confirm { find('[data-testid="remove-list"]').click }
end
end
+
+ def visit_project_board_path_without_query_limit(project, board)
+ inspect_requests(inject_headers: { 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/323426' }) do
+ visit project_board_path(project, board)
+ end
+ end
end
diff --git a/spec/features/boards/focus_mode_spec.rb b/spec/features/boards/focus_mode_spec.rb
index b1684ad69a6..2bd1e625236 100644
--- a/spec/features/boards/focus_mode_spec.rb
+++ b/spec/features/boards/focus_mode_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Issue Boards focus mode', :js do
wait_for_requests
end
- it 'shows focus mode button to guest users' do
- expect(page).to have_selector('.board-extra-actions .js-focus-mode-btn')
+ it 'shows focus mode button to anonymous users' do
+ expect(page).to have_selector('.js-focus-mode-btn')
end
end
diff --git a/spec/features/boards/modal_filter_spec.rb b/spec/features/boards/modal_filter_spec.rb
deleted file mode 100644
index 5aeb9eb5e50..00000000000
--- a/spec/features/boards/modal_filter_spec.rb
+++ /dev/null
@@ -1,228 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Issue Boards add issue modal filtering', :js do
- let(:project) { create(:project, :public) }
- let(:board) { create(:board, project: project) }
- let(:planning) { create(:label, project: project, name: 'Planning') }
- let!(:list1) { create(:list, board: board, label: planning, position: 0) }
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let!(:issue1) { create(:issue, project: project) }
-
- before do
- project.add_maintainer(user)
-
- sign_in(user)
- end
-
- it 'shows empty state when no results found' do
- visit_board
-
- page.within('.add-issues-modal') do
- find('.form-control').native.send_keys('testing empty state')
- find('.form-control').native.send_keys(:enter)
-
- wait_for_requests
-
- expect(page).to have_content('There are no issues to show.')
- end
- end
-
- it 'restores filters when closing' do
- visit_board
-
- set_filter('milestone')
- click_filter_link('Upcoming')
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.board-card', count: 0)
-
- click_button 'Cancel'
- end
-
- click_button('Add issues')
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
-
- it 'resotres filters after clicking clear button' do
- visit_board
-
- set_filter('milestone')
- click_filter_link('Upcoming')
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.board-card', count: 0)
-
- find('.clear-search').click
-
- wait_for_requests
-
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
-
- context 'author' do
- let!(:issue) { create(:issue, project: project, author: user2) }
-
- before do
- project.add_developer(user2)
-
- visit_board
- end
-
- it 'filters by selected user' do
- set_filter('author')
- click_filter_link(user2.name)
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.js-visual-token', text: user2.name)
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
- end
-
- context 'assignee' do
- let!(:issue) { create(:issue, project: project, assignees: [user2]) }
-
- before do
- project.add_developer(user2)
-
- visit_board
- end
-
- it 'filters by unassigned' do
- set_filter('assignee')
- click_filter_link('None')
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.js-visual-token', text: 'None')
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
-
- it 'filters by selected user' do
- set_filter('assignee')
- click_filter_link(user2.name)
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.js-visual-token', text: user2.name)
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
- end
-
- context 'milestone' do
- let(:milestone) { create(:milestone, project: project) }
- let!(:issue) { create(:issue, project: project, milestone: milestone) }
-
- before do
- visit_board
- end
-
- it 'filters by upcoming milestone' do
- set_filter('milestone')
- click_filter_link('Upcoming')
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.js-visual-token', text: 'Upcoming')
- expect(page).to have_selector('.board-card', count: 0)
- end
- end
-
- it 'filters by selected milestone' do
- set_filter('milestone')
- click_filter_link(milestone.name)
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.js-visual-token', text: milestone.name)
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
- end
-
- context 'label' do
- let(:label) { create(:label, project: project) }
- let!(:issue) { create(:labeled_issue, project: project, labels: [label]) }
-
- before do
- visit_board
- end
-
- it 'filters by no label' do
- set_filter('label')
- click_filter_link('None')
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.js-visual-token', text: 'None')
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
-
- it 'filters by label' do
- set_filter('label')
- click_filter_link(label.title)
- submit_filter
-
- page.within('.add-issues-modal') do
- wait_for_requests
-
- expect(page).to have_selector('.js-visual-token', text: label.title)
- expect(page).to have_selector('.board-card', count: 1)
- end
- end
- end
-
- def visit_board
- visit project_board_path(project, board)
- wait_for_requests
-
- click_button('Add issues')
- end
-
- def set_filter(type, text = '')
- find('.add-issues-modal .filtered-search').native.send_keys("#{type}:=#{text}")
- end
-
- def submit_filter
- find('.add-issues-modal .filtered-search').native.send_keys(:enter)
- end
-
- def click_filter_link(link_text)
- page.within('.add-issues-modal .filtered-search-box') do
- expect(page).to have_button(link_text)
-
- click_button(link_text)
- end
- end
-end
diff --git a/spec/features/boards/multi_select_spec.rb b/spec/features/boards/multi_select_spec.rb
index 162455f75e6..ca322355b8f 100644
--- a/spec/features/boards/multi_select_spec.rb
+++ b/spec/features/boards/multi_select_spec.rb
@@ -41,6 +41,10 @@ RSpec.describe 'Multi Select Issue', :js do
before do
project.add_maintainer(user)
+ # multi-drag disabled with feature flag for now
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/289797
+ stub_feature_flags(graphql_board_lists: false)
+
sign_in(user)
end
diff --git a/spec/features/boards/multiple_boards_spec.rb b/spec/features/boards/multiple_boards_spec.rb
index 2894d5c7666..219f24f60d7 100644
--- a/spec/features/boards/multiple_boards_spec.rb
+++ b/spec/features/boards/multiple_boards_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Multiple Issue Boards', :js do
let_it_be(:planning) { create(:label, project: project, name: 'Planning') }
let_it_be(:board) { create(:board, name: 'board1', project: project) }
let_it_be(:board2) { create(:board, name: 'board2', project: project) }
+
let(:parent) { project }
let(:boards_path) { project_boards_path(project) }
diff --git a/spec/features/boards/new_issue_spec.rb b/spec/features/boards/new_issue_spec.rb
index f434ea0c66f..20ae569322c 100644
--- a/spec/features/boards/new_issue_spec.rb
+++ b/spec/features/boards/new_issue_spec.rb
@@ -3,10 +3,12 @@
require 'spec_helper'
RSpec.describe 'Issue Boards new issue', :js do
- let(:project) { create(:project, :public) }
- let(:board) { create(:board, project: project) }
- let!(:list) { create(:list, board: board, position: 0) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:backlog_list) { create(:backlog_list, board: board) }
+ let_it_be(:label) { create(:label, project: project, name: 'Label 1') }
+ let_it_be(:list) { create(:list, board: board, label: label, position: 0) }
+ let_it_be(:user) { create(:user) }
context 'authorized user' do
before do
@@ -15,6 +17,7 @@ RSpec.describe 'Issue Boards new issue', :js do
sign_in(user)
visit project_board_path(project, board)
+
wait_for_requests
expect(page).to have_selector('.board', count: 3)
@@ -57,7 +60,7 @@ RSpec.describe 'Issue Boards new issue', :js do
page.within(first('.board-new-issue-form')) do
find('.form-control').set('bug')
- click_button 'Submit issue'
+ click_button 'Create issue'
end
wait_for_requests
@@ -70,23 +73,24 @@ RSpec.describe 'Issue Boards new issue', :js do
issue = project.issues.find_by_title('bug')
expect(page).to have_content(issue.to_reference)
- expect(page).to have_link(issue.title, href: issue_path(issue))
+ expect(page).to have_link(issue.title, href: /#{issue_path(issue)}/)
end
end
- it 'shows sidebar when creating new issue' do
+ # TODO https://gitlab.com/gitlab-org/gitlab/-/issues/323446
+ xit 'shows sidebar when creating new issue' do
page.within(first('.board')) do
find('.issue-count-badge-add-button').click
end
page.within(first('.board-new-issue-form')) do
find('.form-control').set('bug')
- click_button 'Submit issue'
+ click_button 'Create issue'
end
wait_for_requests
- expect(page).to have_selector('.issue-boards-sidebar')
+ expect(page).to have_selector('[data-testid="issue-boards-sidebar"]')
end
it 'successfuly loads labels to be added to newly created issue' do
@@ -96,17 +100,21 @@ RSpec.describe 'Issue Boards new issue', :js do
page.within(first('.board-new-issue-form')) do
find('.form-control').set('new issue')
- click_button 'Submit issue'
+ click_button 'Create issue'
end
wait_for_requests
- page.within(first('.issue-boards-sidebar')) do
- find('.labels .edit-link').click
+ page.within(first('.board')) do
+ find('.board-card').click
+ end
+
+ page.within(first('[data-testid="issue-boards-sidebar"]')) do
+ find('.labels [data-testid="edit-button"]').click
wait_for_requests
- expect(page).to have_selector('.labels .dropdown-content li a')
+ expect(page).to have_selector('.labels-select-contents-list .dropdown-content li a')
end
end
end
diff --git a/spec/features/boards/reload_boards_on_browser_back_spec.rb b/spec/features/boards/reload_boards_on_browser_back_spec.rb
index 181cbcc9811..36682036d48 100644
--- a/spec/features/boards/reload_boards_on_browser_back_spec.rb
+++ b/spec/features/boards/reload_boards_on_browser_back_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Ensure Boards do not show stale data on browser back', :js do
fill_in 'issue_title', with: 'issue should be shown'
- click_button 'Submit issue'
+ click_button 'Create issue'
page.go_back
wait_for_requests
@@ -43,7 +43,7 @@ RSpec.describe 'Ensure Boards do not show stale data on browser back', :js do
issue = project.issues.find_by_title('issue should be shown')
expect(page).to have_content(issue.to_reference)
- expect(page).to have_link(issue.title, href: issue_path(issue))
+ expect(page).to have_link(issue.title, href: /#{issue_path(issue)}/)
end
end
end
diff --git a/spec/features/boards/sidebar_assignee_spec.rb b/spec/features/boards/sidebar_assignee_spec.rb
new file mode 100644
index 00000000000..e938612163f
--- /dev/null
+++ b/spec/features/boards/sidebar_assignee_spec.rb
@@ -0,0 +1,122 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project issue boards sidebar assignee', :js do
+ include BoardHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:regression) { create(:label, project: project, name: 'Regression') }
+ let_it_be(:stretch) { create(:label, project: project, name: 'Stretch') }
+
+ let!(:issue1) { create(:labeled_issue, project: project, assignees: [user], labels: [development], relative_position: 2) }
+ let!(:issue2) { create(:labeled_issue, project: project, labels: [development, stretch], relative_position: 1) }
+ let(:board) { create(:board, project: project) }
+ let!(:list) { create(:list, board: board, label: development, position: 0) }
+ let(:card) { find('.board:nth-child(2)').first('.board-card') }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+
+ visit project_board_path(project, board)
+ wait_for_requests
+ end
+
+ context 'assignee' do
+ it 'updates the issues assignee' do
+ click_card(card)
+
+ page.within('.assignee') do
+ click_button('Edit')
+
+ wait_for_requests
+
+ assignee = first('.gl-avatar-labeled').find('.gl-avatar-labeled-label').text
+
+ page.within('.dropdown-menu-user') do
+ first('.gl-avatar-labeled').click
+ end
+
+ click_button('Apply')
+ wait_for_requests
+
+ expect(page).to have_content(assignee)
+ end
+
+ expect(card).to have_selector('.avatar')
+ end
+
+ it 'removes the assignee' do
+ card_two = find('.board:nth-child(2)').find('.board-card:nth-child(2)')
+ click_card(card_two)
+
+ page.within('.assignee') do
+ click_button('Edit')
+
+ wait_for_requests
+
+ page.within('.dropdown-menu-user') do
+ find('[data-testid="unassign"]').click
+ end
+
+ click_button('Apply')
+ wait_for_requests
+
+ expect(page).to have_content('None')
+ end
+
+ expect(card_two).not_to have_selector('.avatar')
+ end
+
+ it 'assignees to current user' do
+ click_card(card)
+
+ page.within(find('.assignee')) do
+ expect(page).to have_content('None')
+
+ click_button 'assign yourself'
+
+ wait_for_requests
+
+ expect(page).to have_content(user.name)
+ end
+
+ expect(card).to have_selector('.avatar')
+ end
+
+ it 'updates assignee dropdown' do
+ click_card(card)
+
+ page.within('.assignee') do
+ click_button('Edit')
+
+ wait_for_requests
+
+ assignee = first('.gl-avatar-labeled').find('.gl-avatar-labeled-label').text
+
+ page.within('.dropdown-menu-user') do
+ first('.gl-avatar-labeled').click
+ end
+
+ click_button('Apply')
+ wait_for_requests
+
+ expect(page).to have_content(assignee)
+ end
+
+ page.within(find('.board:nth-child(2)')) do
+ find('.board-card:nth-child(2)').click
+ end
+
+ page.within('.assignee') do
+ click_button('Edit')
+
+ expect(find('.dropdown-menu')).to have_selector('.gl-new-dropdown-item-check-icon')
+ end
+ end
+ end
+end
diff --git a/spec/features/boards/sidebar_due_date_spec.rb b/spec/features/boards/sidebar_due_date_spec.rb
new file mode 100644
index 00000000000..141c574ffec
--- /dev/null
+++ b/spec/features/boards/sidebar_due_date_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project issue boards sidebar due date', :js do
+ include BoardHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project, relative_position: 1) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:list) { create(:list, board: board, position: 0) }
+
+ let(:card) { find('.board:nth-child(1)').first('.board-card') }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+
+ visit project_board_path(project, board)
+ wait_for_requests
+ end
+
+ context 'due date' do
+ it 'updates due date' do
+ click_card(card)
+
+ page.within('[data-testid="sidebar-due-date"]') do
+ today = Date.today.day
+
+ click_button 'Edit'
+
+ click_button today.to_s
+
+ wait_for_requests
+
+ expect(page).to have_content(today.to_s(:medium))
+ end
+ end
+ end
+end
diff --git a/spec/features/boards/sidebar_labels_spec.rb b/spec/features/boards/sidebar_labels_spec.rb
new file mode 100644
index 00000000000..2f0230c61d8
--- /dev/null
+++ b/spec/features/boards/sidebar_labels_spec.rb
@@ -0,0 +1,166 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project issue boards sidebar labels', :js do
+ include BoardHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:development) { create(:label, project: project, name: 'Development') }
+ let_it_be(:bug) { create(:label, project: project, name: 'Bug') }
+ let_it_be(:regression) { create(:label, project: project, name: 'Regression') }
+ let_it_be(:stretch) { create(:label, project: project, name: 'Stretch') }
+ let_it_be(:issue1) { create(:labeled_issue, project: project, labels: [development], relative_position: 2) }
+ let_it_be(:issue2) { create(:labeled_issue, project: project, labels: [development, stretch], relative_position: 1) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:list) { create(:list, board: board, label: development, position: 0) }
+
+ let(:card) { find('.board:nth-child(2)').first('.board-card') }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+
+ visit project_board_path(project, board)
+ wait_for_requests
+ end
+
+ context 'labels' do
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/322725
+ xit 'shows current labels when editing' do
+ click_card(card)
+
+ page.within('.labels') do
+ click_link 'Edit'
+
+ wait_for_requests
+
+ page.within('.value') do
+ expect(page).to have_selector('.gl-label-text', count: 2)
+ expect(page).to have_content(development.title)
+ expect(page).to have_content(stretch.title)
+ end
+ end
+ end
+
+ it 'adds a single label' do
+ click_card(card)
+
+ page.within('.labels') do
+ click_button 'Edit'
+
+ wait_for_requests
+
+ click_link bug.title
+
+ find('[data-testid="close-icon"]').click
+
+ wait_for_requests
+
+ page.within('.value') do
+ expect(page).to have_selector('.gl-label-text', count: 3)
+ expect(page).to have_content(bug.title)
+ end
+ end
+
+ # 'Development' label does not show since the card is in a 'Development' list label
+ expect(card).to have_selector('.gl-label', count: 2)
+ expect(card).to have_content(bug.title)
+ end
+
+ it 'adds a multiple labels' do
+ click_card(card)
+
+ page.within('.labels') do
+ click_button 'Edit'
+
+ wait_for_requests
+
+ click_link bug.title
+
+ click_link regression.title
+
+ find('[data-testid="close-icon"]').click
+
+ wait_for_requests
+
+ page.within('.value') do
+ expect(page).to have_selector('.gl-label-text', count: 4)
+ expect(page).to have_content(bug.title)
+ expect(page).to have_content(regression.title)
+ end
+ end
+
+ # 'Development' label does not show since the card is in a 'Development' list label
+ expect(card).to have_selector('.gl-label', count: 3)
+ expect(card).to have_content(bug.title)
+ expect(card).to have_content(regression.title)
+ end
+
+ it 'removes a label' do
+ click_card(card)
+
+ page.within('.labels') do
+ click_button 'Edit'
+
+ wait_for_requests
+
+ click_link stretch.title
+
+ find('[data-testid="close-icon"]').click
+
+ wait_for_requests
+
+ page.within('.value') do
+ expect(page).to have_selector('.gl-label-text', count: 1)
+ expect(page).not_to have_content(stretch.title)
+ end
+ end
+
+ # 'Development' label does not show since the card is in a 'Development' list label
+ expect(card).to have_selector('.gl-label-text', count: 0)
+ expect(card).not_to have_content(stretch.title)
+ end
+
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/324290
+ xit 'creates project label' do
+ click_card(card)
+
+ page.within('.labels') do
+ click_link 'Edit'
+ wait_for_requests
+
+ click_link 'Create project label'
+ fill_in 'new_label_name', with: 'test label'
+ first('.suggest-colors-dropdown a').click
+ click_button 'Create'
+ wait_for_requests
+
+ expect(page).to have_link 'test label'
+ end
+ expect(page).to have_selector('.board', count: 3)
+ end
+
+ # https://gitlab.com/gitlab-org/gitlab/-/issues/324290
+ xit 'creates project label and list' do
+ click_card(card)
+
+ page.within('.labels') do
+ click_link 'Edit'
+ wait_for_requests
+
+ click_link 'Create project label'
+ fill_in 'new_label_name', with: 'test label'
+ first('.suggest-colors-dropdown a').click
+ first('.js-add-list').click
+ click_button 'Create'
+ wait_for_requests
+
+ expect(page).to have_link 'test label'
+ end
+ expect(page).to have_selector('.board', count: 4)
+ end
+ end
+end
diff --git a/spec/features/boards/sidebar_milestones_spec.rb b/spec/features/boards/sidebar_milestones_spec.rb
new file mode 100644
index 00000000000..54182781a30
--- /dev/null
+++ b/spec/features/boards/sidebar_milestones_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Project issue boards sidebar milestones', :js do
+ include BoardHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:issue1) { create(:issue, project: project, relative_position: 1) }
+ let_it_be(:issue2) { create(:issue, project: project, milestone: milestone, relative_position: 2) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:list) { create(:list, board: board, position: 0) }
+
+ let(:card1) { find('.board:nth-child(1) .board-card:nth-of-type(1)') }
+ let(:card2) { find('.board:nth-child(1) .board-card:nth-of-type(2)') }
+
+ before do
+ project.add_maintainer(user)
+
+ sign_in(user)
+
+ visit project_board_path(project, board)
+ wait_for_requests
+ end
+
+ context 'milestone' do
+ it 'adds a milestone' do
+ click_card(card1)
+
+ page.within('[data-testid="sidebar-milestones"]') do
+ click_button 'Edit'
+
+ wait_for_requests
+
+ click_button milestone.title
+
+ wait_for_requests
+
+ page.within('.value') do
+ expect(page).to have_content(milestone.title)
+ end
+ end
+ end
+
+ it 'removes a milestone' do
+ click_card(card2)
+
+ page.within('[data-testid="sidebar-milestones"]') do
+ click_button 'Edit'
+
+ wait_for_requests
+
+ click_button "No milestone"
+
+ wait_for_requests
+
+ page.within('.value') do
+ expect(page).not_to have_content(milestone.title)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/boards/sidebar_spec.rb b/spec/features/boards/sidebar_spec.rb
index c79bf2abff1..977147c3c6b 100644
--- a/spec/features/boards/sidebar_spec.rb
+++ b/spec/features/boards/sidebar_spec.rb
@@ -2,412 +2,33 @@
require 'spec_helper'
-RSpec.describe 'Issue Boards', :js do
+RSpec.describe 'Project issue boards sidebar', :js do
include BoardHelpers
- include FilteredSearchHelpers
- let(:user) { create(:user) }
- let(:user2) { create(:user) }
- let(:project) { create(:project, :public) }
- let!(:milestone) { create(:milestone, project: project) }
- let!(:development) { create(:label, project: project, name: 'Development') }
- let!(:bug) { create(:label, project: project, name: 'Bug') }
- let!(:regression) { create(:label, project: project, name: 'Regression') }
- let!(:stretch) { create(:label, project: project, name: 'Stretch') }
- let!(:issue1) { create(:labeled_issue, project: project, assignees: [user], milestone: milestone, labels: [development], relative_position: 2) }
- let!(:issue2) { create(:labeled_issue, project: project, labels: [development, stretch], relative_position: 1) }
- let(:board) { create(:board, project: project) }
- let!(:list) { create(:list, board: board, label: development, position: 0) }
- let(:card) { find('.board:nth-child(2)').first('.board-card') }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:board) { create(:board, project: project) }
+ let_it_be(:list) { create(:list, board: board, position: 0) }
- let(:application_settings) { {} }
-
- around do |example|
- freeze_time { example.run }
- end
+ let_it_be(:issue, reload: true) { create(:issue, project: project, relative_position: 1) }
before do
project.add_maintainer(user)
sign_in(user)
- stub_application_setting(application_settings)
-
visit project_board_path(project, board)
- wait_for_requests
- end
-
- it 'shows sidebar when clicking issue' do
- click_card(card)
-
- expect(page).to have_selector('.issue-boards-sidebar')
- end
-
- it 'closes sidebar when clicking issue' do
- click_card(card)
-
- expect(page).to have_selector('.issue-boards-sidebar')
-
- click_card(card)
-
- expect(page).not_to have_selector('.issue-boards-sidebar')
- end
-
- it 'closes sidebar when clicking close button' do
- click_card(card)
-
- expect(page).to have_selector('.issue-boards-sidebar')
-
- find('.gutter-toggle').click
-
- expect(page).not_to have_selector('.issue-boards-sidebar')
- end
-
- it 'shows issue details when sidebar is open' do
- click_card(card)
-
- page.within('.issue-boards-sidebar') do
- expect(page).to have_content(issue2.title)
- expect(page).to have_content(issue2.to_reference)
- end
- end
-
- context 'assignee' do
- it 'updates the issues assignee' do
- click_card(card)
-
- page.within('.assignee') do
- click_button('Edit')
-
- wait_for_requests
-
- assignee = first('.gl-avatar-labeled').find('.gl-avatar-labeled-label').text
-
- page.within('.dropdown-menu-user') do
- first('.gl-avatar-labeled').click
- end
-
- click_button('Edit')
- wait_for_requests
-
- expect(page).to have_content(assignee)
- end
-
- expect(card).to have_selector('.avatar')
- end
-
- it 'removes the assignee' do
- card_two = find('.board:nth-child(2)').find('.board-card:nth-child(2)')
- click_card(card_two)
-
- page.within('.assignee') do
- click_button('Edit')
-
- wait_for_requests
-
- page.within('.dropdown-menu-user') do
- find('[data-testid="unassign"]').click
- end
-
- click_button('Edit')
- wait_for_requests
-
- expect(page).to have_content('None')
- end
-
- expect(card_two).not_to have_selector('.avatar')
- end
-
- it 'assignees to current user' do
- click_card(card)
-
- page.within(find('.assignee')) do
- expect(page).to have_content('None')
-
- click_button 'assign yourself'
-
- wait_for_requests
-
- expect(page).to have_content(user.name)
- end
-
- expect(card).to have_selector('.avatar')
- end
-
- it 'updates assignee dropdown' do
- click_card(card)
-
- page.within('.assignee') do
- click_button('Edit')
-
- wait_for_requests
-
- assignee = first('.gl-avatar-labeled').find('.gl-avatar-labeled-label').text
-
- page.within('.dropdown-menu-user') do
- first('.gl-avatar-labeled').click
- end
-
- click_button('Edit')
- wait_for_requests
-
- expect(page).to have_content(assignee)
- end
-
- page.within(find('.board:nth-child(2)')) do
- find('.board-card:nth-child(2)').click
- end
-
- page.within('.assignee') do
- click_button('Edit')
-
- expect(find('.dropdown-menu')).to have_selector('.gl-new-dropdown-item-check-icon')
- end
- end
- end
-
- context 'milestone' do
- it 'adds a milestone' do
- click_card(card)
-
- page.within('.milestone') do
- click_link 'Edit'
-
- wait_for_requests
-
- click_link milestone.title
-
- wait_for_requests
-
- page.within('.value') do
- expect(page).to have_content(milestone.title)
- end
- end
- end
-
- it 'removes a milestone' do
- click_card(card)
- page.within('.milestone') do
- click_link 'Edit'
-
- wait_for_requests
-
- click_link "No milestone"
-
- wait_for_requests
-
- page.within('.value') do
- expect(page).not_to have_content(milestone.title)
- end
- end
- end
- end
-
- context 'time tracking' do
- let(:compare_meter_tooltip) { find('.time-tracking .time-tracking-content .compare-meter')['title'] }
-
- before do
- issue2.timelogs.create(time_spent: 14400, user: user)
- issue2.update!(time_estimate: 128800)
-
- click_card(card)
- end
-
- it 'shows time tracking progress bar' do
- expect(compare_meter_tooltip).to eq('Time remaining: 3d 7h 46m')
- end
-
- context 'when time_tracking_limit_to_hours is true' do
- let(:application_settings) { { time_tracking_limit_to_hours: true } }
-
- it 'shows time tracking progress bar' do
- expect(compare_meter_tooltip).to eq('Time remaining: 31h 46m')
- end
- end
- end
-
- context 'due date' do
- it 'updates due date' do
- click_card(card)
-
- page.within('.due_date') do
- click_link 'Edit'
-
- click_button Date.today.day
-
- wait_for_requests
-
- expect(page).to have_content(Date.today.to_s(:medium))
- end
- end
+ wait_for_requests
end
- context 'labels' do
- it 'shows current labels when editing' do
- click_card(card)
-
- page.within('.labels') do
- click_link 'Edit'
-
- wait_for_requests
-
- page.within('.value') do
- expect(page).to have_selector('.gl-label-text', count: 2)
- expect(page).to have_content(development.title)
- expect(page).to have_content(stretch.title)
- end
- end
- end
-
- it 'adds a single label' do
- click_card(card)
-
- page.within('.labels') do
- click_link 'Edit'
-
- wait_for_requests
-
- click_link bug.title
-
- wait_for_requests
-
- find('.dropdown-menu-close-icon').click
-
- page.within('.value') do
- expect(page).to have_selector('.gl-label-text', count: 3)
- expect(page).to have_content(bug.title)
- end
- end
-
- # 'Development' label does not show since the card is in a 'Development' list label
- expect(card).to have_selector('.gl-label', count: 2)
- expect(card).to have_content(bug.title)
- end
-
- it 'adds a multiple labels' do
- click_card(card)
+ it_behaves_like 'issue boards sidebar'
- page.within('.labels') do
- click_link 'Edit'
-
- wait_for_requests
-
- click_link bug.title
-
- wait_for_requests
-
- click_link regression.title
-
- wait_for_requests
-
- find('.dropdown-menu-close-icon').click
-
- page.within('.value') do
- expect(page).to have_selector('.gl-label-text', count: 4)
- expect(page).to have_content(bug.title)
- expect(page).to have_content(regression.title)
- end
- end
-
- # 'Development' label does not show since the card is in a 'Development' list label
- expect(card).to have_selector('.gl-label', count: 3)
- expect(card).to have_content(bug.title)
- expect(card).to have_content(regression.title)
- end
-
- it 'removes a label' do
- click_card(card)
-
- page.within('.labels') do
- click_link 'Edit'
-
- wait_for_requests
-
- within('.dropdown-menu-labels') do
- click_link stretch.title
- end
-
- wait_for_requests
-
- find('.dropdown-menu-close-icon').click
-
- page.within('.value') do
- expect(page).to have_selector('.gl-label-text', count: 1)
- expect(page).not_to have_content(stretch.title)
- end
- end
-
- # 'Development' label does not show since the card is in a 'Development' list label
- expect(card).to have_selector('.gl-label-text', count: 0)
- expect(card).not_to have_content(stretch.title)
- end
-
- it 'creates project label' do
- click_card(card)
-
- page.within('.labels') do
- click_link 'Edit'
- wait_for_requests
-
- click_link 'Create project label'
- fill_in 'new_label_name', with: 'test label'
- first('.suggest-colors-dropdown a').click
- click_button 'Create'
- wait_for_requests
-
- expect(page).to have_link 'test label'
- end
- expect(page).to have_selector('.board', count: 3)
- end
-
- it 'creates project label and list' do
- click_card(card)
-
- page.within('.labels') do
- click_link 'Edit'
- wait_for_requests
-
- click_link 'Create project label'
- fill_in 'new_label_name', with: 'test label'
- first('.suggest-colors-dropdown a').click
- first('.js-add-list').click
- click_button 'Create'
- wait_for_requests
-
- expect(page).to have_link 'test label'
- end
- expect(page).to have_selector('.board', count: 4)
- end
+ def first_card
+ find('.board:nth-child(1)').first("[data-testid='board_card']")
end
- context 'subscription' do
- it 'changes issue subscription' do
- click_card(card)
- wait_for_requests
-
- page.within('.subscriptions') do
- find('[data-testid="subscription-toggle"] button:not(.is-checked)').click
- wait_for_requests
-
- expect(page).to have_css('[data-testid="subscription-toggle"] button.is-checked')
- end
- end
-
- it 'has checked subscription toggle when already subscribed' do
- create(:subscription, user: user, project: project, subscribable: issue2, subscribed: true)
- visit project_board_path(project, board)
- wait_for_requests
-
- click_card(card)
- wait_for_requests
-
- page.within('.subscriptions') do
- find('[data-testid="subscription-toggle"] button.is-checked').click
- wait_for_requests
-
- expect(page).to have_css('[data-testid="subscription-toggle"] button:not(.is-checked)')
- end
- end
+ def click_first_issue_card
+ click_card(first_card)
end
end
diff --git a/spec/features/boards/sub_group_project_spec.rb b/spec/features/boards/sub_group_project_spec.rb
index cd3d61726f6..bde5f061a67 100644
--- a/spec/features/boards/sub_group_project_spec.rb
+++ b/spec/features/boards/sub_group_project_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe 'Sub-group project issue boards', :js do
wait_for_requests
end
- it 'creates new label from sidebar' do
+ # TODO https://gitlab.com/gitlab-org/gitlab/-/issues/324290
+ xit 'creates new label from sidebar' do
find('.board-card').click
page.within '.labels' do
diff --git a/spec/features/boards/user_adds_lists_to_board_spec.rb b/spec/features/boards/user_adds_lists_to_board_spec.rb
index b9945207bb2..5128fc4004e 100644
--- a/spec/features/boards/user_adds_lists_to_board_spec.rb
+++ b/spec/features/boards/user_adds_lists_to_board_spec.rb
@@ -71,10 +71,13 @@ RSpec.describe 'User adds lists', :js do
def select_label(board_new_list_enabled, label)
if board_new_list_enabled
- page.within('.board-add-new-list') do
- find('label', text: label.title).click
- click_button 'Add'
- end
+ click_button 'Select a label'
+
+ find('label', text: label.title).click
+
+ click_button 'Add to board'
+
+ wait_for_all_requests
else
page.within('.dropdown-menu-issues-board-new') do
click_link label.title
diff --git a/spec/features/calendar_spec.rb b/spec/features/calendar_spec.rb
index ee156bdcab4..0b73226268d 100644
--- a/spec/features/calendar_spec.rb
+++ b/spec/features/calendar_spec.rb
@@ -42,7 +42,7 @@ RSpec.describe 'Contributions Calendar', :js do
"#{contributions} #{'contribution'.pluralize(contributions)}"
end
- "#{get_cell_color_selector(contributions)}[title='#{contribution_text}<br />#{date}']"
+ "#{get_cell_color_selector(contributions)}[title='#{contribution_text}<br /><span class=\"gl-text-gray-300\">#{date}</span>']"
end
def push_code_contribution
@@ -64,7 +64,7 @@ RSpec.describe 'Contributions Calendar', :js do
author_id: user.id
}
- Event.create(note_comment_params)
+ Event.create!(note_comment_params)
end
def selected_day_activities(visible: true)
diff --git a/spec/features/callouts/service_templates_deprecation_spec.rb b/spec/features/callouts/service_templates_deprecation_spec.rb
new file mode 100644
index 00000000000..b6403b54e29
--- /dev/null
+++ b/spec/features/callouts/service_templates_deprecation_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Service templates deprecation callout' do
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:non_admin) { create(:user) }
+ let_it_be(:callout_content) { 'Service templates are deprecated and will be removed in GitLab 14.0.' }
+
+ context 'when a non-admin is logged in' do
+ before do
+ sign_in(non_admin)
+ visit root_dashboard_path
+ end
+
+ it 'does not display callout' do
+ expect(page).not_to have_content callout_content
+ end
+ end
+
+ context 'when an admin is logged in' do
+ before do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+
+ visit root_dashboard_path
+ end
+
+ context 'with no active service templates' do
+ it 'does not display callout' do
+ expect(page).not_to have_content callout_content
+ end
+ end
+
+ context 'with active service template' do
+ before do
+ create(:service, :template, type: 'MattermostService', active: true)
+ visit root_dashboard_path
+ end
+
+ it 'displays callout' do
+ expect(page).to have_content callout_content
+ expect(page).to have_link 'See affected service templates', href: admin_application_settings_services_path
+ end
+
+ context 'when callout is dismissed', :js do
+ before do
+ find('[data-testid="close-service-templates-deprecated-callout"]').click
+
+ visit root_dashboard_path
+ end
+
+ it 'does not display callout' do
+ expect(page).not_to have_content callout_content
+ end
+ end
+ end
+ end
+end
diff --git a/spec/features/clusters/cluster_detail_page_spec.rb b/spec/features/clusters/cluster_detail_page_spec.rb
index 6fe6c099d80..84a18a45d35 100644
--- a/spec/features/clusters/cluster_detail_page_spec.rb
+++ b/spec/features/clusters/cluster_detail_page_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Clusterable > Show page' do
it 'allow the user to set domain', :js do
visit cluster_path
- within '.js-cluster-integration-form' do
+ within '.js-cluster-details-form' do
fill_in('cluster_base_domain', with: 'test.com')
click_on 'Save changes'
end
@@ -39,7 +39,7 @@ RSpec.describe 'Clusterable > Show page' do
end
it 'shows help text with the domain as an alternative to custom domain', :js do
- within '.js-cluster-integration-form' do
+ within '.js-cluster-details-form' do
expect(find(cluster_ingress_help_text_selector).text).to include('192.168.1.100')
end
end
@@ -49,7 +49,7 @@ RSpec.describe 'Clusterable > Show page' do
it 'alternative to custom domain is not shown' do
visit cluster_path
- within '.js-cluster-integration-form' do
+ within '.js-cluster-details-form' do
expect(page).not_to have_selector(cluster_ingress_help_text_selector)
end
end
diff --git a/spec/features/clusters/cluster_health_dashboard_spec.rb b/spec/features/clusters/cluster_health_dashboard_spec.rb
index e9e3b48e9c0..862f34768c4 100644
--- a/spec/features/clusters/cluster_health_dashboard_spec.rb
+++ b/spec/features/clusters/cluster_health_dashboard_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'Cluster Health board', :js, :kubeclient, :use_clean_rails_memory
click_link 'Health'
- expect(page).to have_text('you must first install Prometheus in the Applications tab')
+ expect(page).to have_text('you must first enable Prometheus in the Integrations tab')
end
end
diff --git a/spec/features/commits_spec.rb b/spec/features/commits_spec.rb
index 1622979812d..2dafaedd262 100644
--- a/spec/features/commits_spec.rb
+++ b/spec/features/commits_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe 'Commits' do
context 'when accessing internal project with disallowed access', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/299575' do
before do
- project.update(
+ project.update!(
visibility_level: Gitlab::VisibilityLevel::INTERNAL,
public_builds: false)
create(:ci_job_artifact, :archive, file: artifacts_file, job: build)
diff --git a/spec/features/dashboard/active_tab_spec.rb b/spec/features/dashboard/active_tab_spec.rb
index ee85c136190..3a532cb4161 100644
--- a/spec/features/dashboard/active_tab_spec.rb
+++ b/spec/features/dashboard/active_tab_spec.rb
@@ -4,6 +4,8 @@ require 'spec_helper'
RSpec.describe 'Dashboard Active Tab', :js do
before do
+ stub_feature_flags(combined_menu: false)
+
sign_in(create(:user))
end
diff --git a/spec/features/dashboard/datetime_on_tooltips_spec.rb b/spec/features/dashboard/datetime_on_tooltips_spec.rb
index c14a6001a3e..442b8904974 100644
--- a/spec/features/dashboard/datetime_on_tooltips_spec.rb
+++ b/spec/features/dashboard/datetime_on_tooltips_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Tooltips on .timeago dates', :js do
context 'on the activity tab' do
before do
- Event.create( project: project, author_id: user.id, action: :joined,
+ Event.create!( project: project, author_id: user.id, action: :joined,
updated_at: created_date, created_at: created_date)
sign_in user
diff --git a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
index c2a3b90b6f4..179d9d09905 100644
--- a/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
+++ b/spec/features/dashboard/group_dashboard_with_external_authorization_service_spec.rb
@@ -8,6 +8,8 @@ RSpec.describe 'The group dashboard' do
let(:user) { create(:user) }
before do
+ stub_feature_flags(combined_menu: false)
+
sign_in user
end
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index 3cb7140d253..d4c6b6faa79 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
before do
issue.assignees = [user]
- merge_request.update(assignees: [user])
+ merge_request.update!(assignees: [user])
sign_in(user)
end
@@ -35,7 +35,7 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching d
expect_counters('merge_requests', '1')
- merge_request.update(assignees: [])
+ merge_request.update!(assignees: [])
user.invalidate_cache_counts
diff --git a/spec/features/dashboard/milestones_spec.rb b/spec/features/dashboard/milestones_spec.rb
index 308432b7a1b..992ed2f2ce6 100644
--- a/spec/features/dashboard/milestones_spec.rb
+++ b/spec/features/dashboard/milestones_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Dashboard > Milestones' do
expect(current_path).to eq dashboard_milestones_path
expect(page).to have_content(milestone.title)
expect(page).to have_content(group.name)
- expect(first('.milestone')).to have_content('Merge Requests')
+ expect(first('.milestone')).to have_content('Merge requests')
end
describe 'new milestones dropdown', :js do
diff --git a/spec/features/dashboard/project_member_activity_index_spec.rb b/spec/features/dashboard/project_member_activity_index_spec.rb
index 6e6e466294f..c26a1a0b486 100644
--- a/spec/features/dashboard/project_member_activity_index_spec.rb
+++ b/spec/features/dashboard/project_member_activity_index_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'Project member activity', :js do
end
def visit_activities_and_wait_with_event(event_type)
- Event.create(project: project, author_id: user.id, action: event_type)
+ Event.create!(project: project, author_id: user.id, action: event_type)
visit activity_project_path(project)
wait_for_requests
end
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index d7330b5267b..20c753b1cdb 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe 'Dashboard Projects' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository, name: 'awesome stuff') }
- let(:project2) { create(:project, :public, name: 'Community project') }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
+ let_it_be(:project2) { create(:project, :public) }
before do
project.add_developer(user)
@@ -18,17 +18,10 @@ RSpec.describe 'Dashboard Projects' do
end
end
- it 'shows the project the user in a member of in the list' do
- visit dashboard_projects_path
- expect(page).to have_content('awesome stuff')
- end
-
- it 'shows "New project" button' do
+ it 'shows the customize banner', :js do
visit dashboard_projects_path
- page.within '#content-body' do
- expect(page).to have_link('New project')
- end
+ expect(page).to have_content('Do you want to customize this page?')
end
context 'when user has access to the project' do
@@ -48,7 +41,7 @@ RSpec.describe 'Dashboard Projects' do
expect(page).to have_content('Developer')
end
- project.members.last.update(access_level: 40)
+ project.members.last.update!(access_level: 40)
visit dashboard_projects_path
@@ -153,7 +146,7 @@ RSpec.describe 'Dashboard Projects' do
end
describe 'with a pipeline', :clean_gitlab_redis_shared_state do
- let(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha, ref: project.default_branch) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, sha: project.commit.sha, ref: project.default_branch) }
before do
# Since the cache isn't updated when a new pipeline is created
@@ -190,7 +183,7 @@ RSpec.describe 'Dashboard Projects' do
let(:guest_user) { create(:user) }
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
project.add_guest(guest_user)
sign_in(guest_user)
end
diff --git a/spec/features/dashboard/shortcuts_spec.rb b/spec/features/dashboard/shortcuts_spec.rb
index b2fda28f0ec..e96a60b2ab2 100644
--- a/spec/features/dashboard/shortcuts_spec.rb
+++ b/spec/features/dashboard/shortcuts_spec.rb
@@ -3,6 +3,10 @@
require 'spec_helper'
RSpec.describe 'Dashboard shortcuts', :js do
+ before do
+ stub_feature_flags(combined_menu: false)
+ end
+
context 'logged in' do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -20,7 +24,7 @@ RSpec.describe 'Dashboard shortcuts', :js do
find('body').send_keys([:shift, 'M'])
- check_page_title('Merge Requests')
+ check_page_title('Merge requests')
find('body').send_keys([:shift, 'T'])
diff --git a/spec/features/discussion_comments/merge_request_spec.rb b/spec/features/discussion_comments/merge_request_spec.rb
index f60d7da6a30..a90ff3721d3 100644
--- a/spec/features/discussion_comments/merge_request_spec.rb
+++ b/spec/features/discussion_comments/merge_request_spec.rb
@@ -8,8 +8,6 @@ RSpec.describe 'Thread Comments Merge Request', :js do
let(:merge_request) { create(:merge_request, source_project: project) }
before do
- stub_feature_flags(remove_resolve_note: false)
-
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/error_pages_spec.rb b/spec/features/error_pages_spec.rb
index 77f8aa87237..8dc9e5ade46 100644
--- a/spec/features/error_pages_spec.rb
+++ b/spec/features/error_pages_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Error Pages' do
+RSpec.describe 'Error Pages', :js do
let(:user) { create(:user) }
let(:project) { create(:project, :public) }
@@ -14,7 +14,12 @@ RSpec.describe 'Error Pages' do
it 'shows nav links' do
expect(page).to have_link("Home", href: root_path)
expect(page).to have_link("Help", href: help_path)
- expect(page).to have_link(nil, href: destroy_user_session_path)
+ end
+
+ it 'allows user to sign out' do
+ click_link 'Sign out and sign in with a different account'
+
+ expect(page).to have_current_path(new_user_session_path)
end
end
diff --git a/spec/features/error_tracking/user_filters_errors_by_status_spec.rb b/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
index a0d93b791d9..6846d8f6ade 100644
--- a/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
+++ b/spec/features/error_tracking/user_filters_errors_by_status_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'When a user filters Sentry errors by status', :js, :use_clean_ra
let_it_be(:issues_response_body) { fixture_file('sentry/issues_sample_response.json') }
let_it_be(:filtered_errors_by_status_response) { Gitlab::Json.parse(issues_response_body).filter { |error| error['status'] == 'ignored' }.to_json }
+
let(:issues_api_url) { "#{sentry_api_urls.issues_url}?limit=20&query=is:unresolved" }
let(:issues_api_url_filter) { "#{sentry_api_urls.issues_url}?limit=20&query=is:ignored" }
let(:auth_token) {{ 'Authorization' => 'Bearer access_token_123' }}
diff --git a/spec/features/error_tracking/user_searches_sentry_errors_spec.rb b/spec/features/error_tracking/user_searches_sentry_errors_spec.rb
index 025a6261957..c16c9d3fb1f 100644
--- a/spec/features/error_tracking/user_searches_sentry_errors_spec.rb
+++ b/spec/features/error_tracking/user_searches_sentry_errors_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'When a user searches for Sentry errors', :js, :use_clean_rails_m
let_it_be(:issues_response_body) { fixture_file('sentry/issues_sample_response.json') }
let_it_be(:error_search_response_body) { fixture_file('sentry/error_list_search_response.json') }
+
let(:issues_api_url) { "#{sentry_api_urls.issues_url}?limit=20&query=is:unresolved" }
let(:issues_api_url_search) { "#{sentry_api_urls.issues_url}?limit=20&query=is:unresolved%20NotFound" }
diff --git a/spec/features/error_tracking/user_sees_error_index_spec.rb b/spec/features/error_tracking/user_sees_error_index_spec.rb
index a4b15432ef3..bc6709c659d 100644
--- a/spec/features/error_tracking/user_sees_error_index_spec.rb
+++ b/spec/features/error_tracking/user_sees_error_index_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe 'View error index page', :js, :use_clean_rails_memory_store_cachi
let_it_be(:issues_response_body) { fixture_file('sentry/issues_sample_response.json') }
let_it_be(:issues_response) { Gitlab::Json.parse(issues_response_body) }
+
let(:issues_api_url) { "#{sentry_api_urls.issues_url}?limit=20&query=is:unresolved" }
before do
@@ -41,7 +42,7 @@ RSpec.describe 'View error index page', :js, :use_clean_rails_memory_store_cachi
context 'with error tracking settings disabled' do
before do
- project_error_tracking_settings.update(enabled: false)
+ project_error_tracking_settings.update!(enabled: false)
sign_in(project.owner)
visit project_error_tracking_index_path(project)
diff --git a/spec/features/file_uploads/attachment_spec.rb b/spec/features/file_uploads/attachment_spec.rb
new file mode 100644
index 00000000000..9ad404ce869
--- /dev/null
+++ b/spec/features/file_uploads/attachment_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Upload an attachment', :api, :js do
+ include_context 'file upload requests helpers'
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.owner }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ let(:api_path) { "/projects/#{project_id}/uploads" }
+ let(:url) { capybara_url(api(api_path)) }
+ let(:file) { fixture_file_upload('spec/fixtures/dk.png') }
+
+ subject do
+ HTTParty.post(
+ url,
+ headers: { 'PRIVATE-TOKEN' => personal_access_token.token },
+ body: { file: file }
+ )
+ end
+
+ shared_examples 'for an attachment' do
+ it 'creates files' do
+ expect { subject }
+ .to change { Upload.count }.by(1)
+ end
+
+ it { expect(subject.code).to eq(201) }
+ end
+
+ context 'with an integer project ID' do
+ let(:project_id) { project.id }
+
+ it_behaves_like 'handling file uploads', 'for an attachment'
+ end
+
+ context 'with an encoded project ID' do
+ let(:project_id) { "#{project.namespace.path}%2F#{project.path}" }
+
+ it_behaves_like 'handling file uploads', 'for an attachment'
+ end
+end
diff --git a/spec/features/file_uploads/maven_package_spec.rb b/spec/features/file_uploads/maven_package_spec.rb
index e87eec58618..ab9f023bd8f 100644
--- a/spec/features/file_uploads/maven_package_spec.rb
+++ b/spec/features/file_uploads/maven_package_spec.rb
@@ -6,16 +6,17 @@ RSpec.describe 'Upload a maven package', :api, :js do
include_context 'file upload requests helpers'
let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:user) { project.owner }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
- let(:api_path) { "/projects/#{project.id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar" }
+ let(:project_id) { project.id }
+ let(:api_path) { "/projects/#{project_id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar" }
let(:url) { capybara_url(api(api_path, personal_access_token: personal_access_token)) }
let(:file) { fixture_file_upload('spec/fixtures/dk.png') }
subject { HTTParty.put(url, body: file.read) }
- RSpec.shared_examples 'for a maven package' do
+ shared_examples 'for a maven package' do
it 'creates package files' do
expect { subject }
.to change { Packages::Package.maven.count }.by(1)
@@ -25,9 +26,9 @@ RSpec.describe 'Upload a maven package', :api, :js do
it { expect(subject.code).to eq(200) }
end
- RSpec.shared_examples 'for a maven sha1' do
+ shared_examples 'for a maven sha1' do
let(:dummy_package) { double(Packages::Package) }
- let(:api_path) { "/projects/#{project.id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar.sha1" }
+ let(:api_path) { "/projects/#{project_id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar.sha1" }
before do
# The sha verification done by the maven api is between:
@@ -42,8 +43,8 @@ RSpec.describe 'Upload a maven package', :api, :js do
it { expect(subject.code).to eq(204) }
end
- RSpec.shared_examples 'for a maven md5' do
- let(:api_path) { "/projects/#{project.id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar.md5" }
+ shared_examples 'for a maven md5' do
+ let(:api_path) { "/projects/#{project_id}/packages/maven/com/example/my-app/1.0/my-app-1.0-20180724.124855-1.jar.md5" }
let(:file) { StringIO.new('dummy_package') }
it { expect(subject.code).to eq(200) }
@@ -52,4 +53,10 @@ RSpec.describe 'Upload a maven package', :api, :js do
it_behaves_like 'handling file uploads', 'for a maven package'
it_behaves_like 'handling file uploads', 'for a maven sha1'
it_behaves_like 'handling file uploads', 'for a maven md5'
+
+ context 'with an encoded project ID' do
+ let(:project_id) { "#{project.namespace.path}%2F#{project.path}" }
+
+ it_behaves_like 'handling file uploads', 'for a maven package'
+ end
end
diff --git a/spec/features/file_uploads/nuget_package_spec.rb b/spec/features/file_uploads/nuget_package_spec.rb
index 6e05e5d1a6e..871c0274445 100644
--- a/spec/features/file_uploads/nuget_package_spec.rb
+++ b/spec/features/file_uploads/nuget_package_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe 'Upload a nuget package', :api, :js do
include_context 'file upload requests helpers'
let_it_be(:project) { create(:project) }
- let_it_be(:user) { create(:user, :admin) }
+ let_it_be(:user) { project.owner }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let(:api_path) { "/projects/#{project.id}/packages/nuget/" }
@@ -21,7 +21,7 @@ RSpec.describe 'Upload a nuget package', :api, :js do
)
end
- RSpec.shared_examples 'for a nuget package' do
+ shared_examples 'for a nuget package' do
it 'creates package files' do
expect { subject }
.to change { Packages::Package.nuget.count }.by(1)
diff --git a/spec/features/file_uploads/rubygem_package_spec.rb b/spec/features/file_uploads/rubygem_package_spec.rb
new file mode 100644
index 00000000000..4a5891fdfed
--- /dev/null
+++ b/spec/features/file_uploads/rubygem_package_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Upload a RubyGems package', :api, :js do
+ include_context 'file upload requests helpers'
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.owner }
+ let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
+
+ let(:api_path) { "/projects/#{project_id}/packages/rubygems/api/v1/gems" }
+ let(:url) { capybara_url(api(api_path)) }
+ let(:file) { fixture_file_upload('spec/fixtures/dk.png') }
+
+ subject do
+ HTTParty.post(
+ url,
+ headers: { 'Authorization' => personal_access_token.token },
+ body: { file: file }
+ )
+ end
+
+ shared_examples 'for a Rubygems package' do
+ it 'creates package files' do
+ expect { subject }
+ .to change { Packages::Package.rubygems.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(1)
+ end
+
+ it { expect(subject.code).to eq(201) }
+ end
+
+ context 'with an integer project ID' do
+ let(:project_id) { project.id }
+
+ it_behaves_like 'handling file uploads', 'for a Rubygems package'
+ end
+
+ context 'with an encoded project ID' do
+ let(:project_id) { "#{project.namespace.path}%2F#{project.path}" }
+
+ it_behaves_like 'handling file uploads', 'for a Rubygems package'
+ end
+end
diff --git a/spec/features/frequently_visited_projects_and_groups_spec.rb b/spec/features/frequently_visited_projects_and_groups_spec.rb
index b8797d9c139..6c25afdf6d4 100644
--- a/spec/features/frequently_visited_projects_and_groups_spec.rb
+++ b/spec/features/frequently_visited_projects_and_groups_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe 'Frequently visited items', :js do
let_it_be(:user) { create(:user) }
before do
+ stub_feature_flags(combined_menu: false)
+
sign_in(user)
end
diff --git a/spec/features/gitlab_experiments_spec.rb b/spec/features/gitlab_experiments_spec.rb
new file mode 100644
index 00000000000..76b418adcea
--- /dev/null
+++ b/spec/features/gitlab_experiments_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe "Gitlab::Experiment", :js do
+ # This is part of a set of tests that ensure that tracking remains
+ # consistent at the front end layer. Since we don't want to actually
+ # introduce an experiment in real code, we're going to simulate it
+ # here.
+ let(:user) { create(:user) }
+
+ before do
+ admin = create(:admin)
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ stub_experiments(null_hypothesis: :candidate)
+ end
+
+ describe 'with event tracking' do
+ it 'publishes the experiments that have been run to the client', :experiment do
+ allow_next_instance_of(Admin::AbuseReportsController) do |instance|
+ allow(instance).to receive(:index).and_wrap_original do |original|
+ instance.experiment(:null_hypothesis, user: instance.current_user) do |e|
+ e.use { original.call }
+ e.try { original.call }
+ end
+ end
+ end
+
+ visit admin_abuse_reports_path
+
+ expect(page).to have_content('Abuse Reports')
+
+ published_experiments = page.evaluate_script('window.gon.experiment')
+ expect(published_experiments).to include({
+ 'null_hypothesis' => {
+ 'experiment' => 'null_hypothesis',
+ 'key' => anything,
+ 'variant' => 'candidate'
+ }
+ })
+ end
+ end
+end
diff --git a/spec/features/groups/board_spec.rb b/spec/features/groups/board_spec.rb
index aab3f5e68d5..b4c60ff4fa3 100644
--- a/spec/features/groups/board_spec.rb
+++ b/spec/features/groups/board_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'Group Boards' do
find('.gl-new-dropdown-item button').click
end
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(page).to have_content(issue_title)
end
diff --git a/spec/features/groups/clusters/user_spec.rb b/spec/features/groups/clusters/user_spec.rb
index 97f8864aab2..2a7ededa39b 100644
--- a/spec/features/groups/clusters/user_spec.rb
+++ b/spec/features/groups/clusters/user_spec.rb
@@ -97,7 +97,7 @@ RSpec.describe 'User Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('.js-cluster-integration-form') { click_button 'Save changes' }
+ page.within('.js-cluster-details-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
diff --git a/spec/features/groups/group_page_with_external_authorization_service_spec.rb b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
index 8ef1b60d8ca..187d878472e 100644
--- a/spec/features/groups/group_page_with_external_authorization_service_spec.rb
+++ b/spec/features/groups/group_page_with_external_authorization_service_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe 'The group page' do
expect(page).to have_link('Details')
expect(page).to have_link('Activity')
expect(page).to have_link('Issues')
- expect(page).to have_link('Merge Requests')
+ expect(page).to have_link('Merge requests')
expect(page).to have_link('Members')
end
end
@@ -50,7 +50,7 @@ RSpec.describe 'The group page' do
expect(page).not_to have_link('Contribution')
expect(page).not_to have_link('Issues')
- expect(page).not_to have_link('Merge Requests')
+ expect(page).not_to have_link('Merge requests')
expect(page).to have_link('Members')
end
end
diff --git a/spec/features/groups/group_settings_spec.rb b/spec/features/groups/group_settings_spec.rb
index 60cd1ebbbd7..00ad1006037 100644
--- a/spec/features/groups/group_settings_spec.rb
+++ b/spec/features/groups/group_settings_spec.rb
@@ -175,7 +175,7 @@ RSpec.describe 'Edit group settings' do
end
def updated_emails_disabled?
- group.reload.clear_memoization(:emails_disabled)
+ group.reload.clear_memoization(:emails_disabled_memoized)
group.emails_disabled?
end
end
diff --git a/spec/features/groups/issues_spec.rb b/spec/features/groups/issues_spec.rb
index 8ecd2beba68..b0d2f90145f 100644
--- a/spec/features/groups/issues_spec.rb
+++ b/spec/features/groups/issues_spec.rb
@@ -108,7 +108,7 @@ RSpec.describe 'Group issues page' do
it 'shows projects only with issues feature enabled', :js do
find('.empty-state .js-lazy-loaded')
- find('.new-project-item-link').click
+ find('.empty-state .new-project-item-link').click
page.within('.select2-results') do
expect(page).to have_content(project.full_name)
diff --git a/spec/features/groups/labels/index_spec.rb b/spec/features/groups/labels/index_spec.rb
index 3de29231f5c..68f03368989 100644
--- a/spec/features/groups/labels/index_spec.rb
+++ b/spec/features/groups/labels/index_spec.rb
@@ -24,6 +24,6 @@ RSpec.describe 'Group labels' do
end
it 'shows an edit label button', :js do
- expect(page).to have_selector('.label-action.edit')
+ expect(page).to have_selector('.edit')
end
end
diff --git a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
index 38deee547a3..d31a7977f66 100644
--- a/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
+++ b/spec/features/groups/members/master_adds_member_with_expiration_date_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe 'Groups > Members > Owner adds member with expiration date', :js
let_it_be(:user1) { create(:user, name: 'John Doe') }
let_it_be(:group) { create(:group) }
+
let(:new_member) { create(:user, name: 'Mary Jane') }
before do
diff --git a/spec/features/groups/members/request_access_spec.rb b/spec/features/groups/members/request_access_spec.rb
index 307cb63ec8e..827962fee61 100644
--- a/spec/features/groups/members/request_access_spec.rb
+++ b/spec/features/groups/members/request_access_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Groups > Members > Request access' do
end
it 'request access feature is disabled' do
- group.update(request_access_enabled: false)
+ group.update!(request_access_enabled: false)
visit group_path(group)
expect(page).not_to have_content 'Request Access'
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 43d4b6b23e0..f79c93157dc 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'Group merge requests page' do
end
it 'ignores archived merge request count badges in navbar' do
- expect(first(:link, text: 'Merge Requests').find('.badge').text).to eq("1")
+ expect(first(:link, text: 'Merge requests').find('.badge').text).to eq("1")
end
it 'ignores archived merge request count badges in state-filters' do
diff --git a/spec/features/groups/milestone_spec.rb b/spec/features/groups/milestone_spec.rb
index 8d1008b98a6..1d9ac5ee1e9 100644
--- a/spec/features/groups/milestone_spec.rb
+++ b/spec/features/groups/milestone_spec.rb
@@ -133,7 +133,7 @@ RSpec.describe 'Group milestones' do
href: project_issues_path(project, milestone_title: 'v1.0')
)
expect(page).to have_link(
- '0 Merge Requests',
+ '0 Merge requests',
href: project_merge_requests_path(project, milestone_title: 'v1.0')
)
expect(page).to have_link(
@@ -145,7 +145,7 @@ RSpec.describe 'Group milestones' do
href: issues_group_path(group, milestone_title: 'GL-113')
)
expect(page).to have_link(
- '0 Merge Requests',
+ '0 Merge requests',
href: merge_requests_group_path(group, milestone_title: 'GL-113')
)
end
@@ -179,7 +179,7 @@ RSpec.describe 'Group milestones' do
it 'renders the merge requests tab' do
within('.js-milestone-tabs') do
- click_link('Merge Requests')
+ click_link('Merge requests')
end
within('#tab-merge-requests') do
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 7025874a4ff..021b1af54d4 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe 'Group navbar' do
]
},
{
- nav_item: _('Merge Requests'),
+ nav_item: _('Merge requests'),
nav_sub_items: []
},
(security_and_compliance_nav_item if Gitlab.ee?),
@@ -68,7 +68,7 @@ RSpec.describe 'Group navbar' do
before do
stub_config(registry: { enabled: true })
- insert_container_nav(_('Kubernetes'))
+ insert_container_nav
visit group_path(group)
end
@@ -80,7 +80,7 @@ RSpec.describe 'Group navbar' do
before do
stub_config(dependency_proxy: { enabled: true })
- insert_dependency_proxy_nav(_('Dependency Proxy'))
+ insert_dependency_proxy_nav
visit group_path(group)
end
diff --git a/spec/features/groups/settings/user_searches_in_settings_spec.rb b/spec/features/groups/settings/user_searches_in_settings_spec.rb
index 819d0c4faba..6d7a3871bb1 100644
--- a/spec/features/groups/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/groups/settings/user_searches_in_settings_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'User searches group settings', :js do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, :repository, namespace: group) }
before do
group.add_owner(user)
@@ -13,9 +12,19 @@ RSpec.describe 'User searches group settings', :js do
end
context 'in general settings page' do
- let(:visit_path) { edit_group_path(group) }
+ before do
+ visit edit_group_path(group)
+ end
+
+ it_behaves_like 'can search settings', 'Naming', 'Permissions'
+ end
+
+ context 'in Integrations page' do
+ before do
+ visit group_settings_integrations_path(group)
+ end
- it_behaves_like 'can search settings with feature flag check', 'Naming', 'Permissions'
+ it_behaves_like 'can highlight results', 'set default configuration'
end
context 'in Repository page' do
@@ -33,4 +42,12 @@ RSpec.describe 'User searches group settings', :js do
it_behaves_like 'can search settings', 'Variables', 'Runners'
end
+
+ context 'in Packages & Registries page' do
+ before do
+ visit group_settings_packages_and_registries_path(group)
+ end
+
+ it_behaves_like 'can highlight results', 'GitLab Packages'
+ end
end
diff --git a/spec/features/groups_spec.rb b/spec/features/groups_spec.rb
index 28b22860f0a..33d2ac50628 100644
--- a/spec/features/groups_spec.rb
+++ b/spec/features/groups_spec.rb
@@ -141,6 +141,30 @@ RSpec.describe 'Group' do
end
end
end
+
+ describe 'showing recaptcha on group creation when it is enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ allow(Gitlab::Recaptcha).to receive(:load_configurations!)
+ visit new_group_path
+ end
+
+ it 'renders recaptcha' do
+ expect(page).to have_css('.recaptcha')
+ end
+ end
+
+ describe 'not showing recaptcha on group creation when it is disabled' do
+ before do
+ stub_feature_flags(recaptcha_on_top_level_group_creation: false)
+ stub_application_setting(recaptcha_enabled: true)
+ visit new_group_path
+ end
+
+ it 'does not render recaptcha' do
+ expect(page).not_to have_css('.recaptcha')
+ end
+ end
end
describe 'create a nested group' do
@@ -189,6 +213,46 @@ RSpec.describe 'Group' do
expect(page).to have_content("Group 'bar' was successfully created.")
end
end
+
+ context 'when recaptcha is enabled' do
+ before do
+ stub_application_setting(recaptcha_enabled: true)
+ allow(Gitlab::Recaptcha).to receive(:load_configurations!)
+ end
+
+ context 'when creating subgroup' do
+ let(:path) { new_group_path(group, parent_id: group.id) }
+
+ it 'does not render recaptcha' do
+ visit path
+
+ expect(page).not_to have_css('.recaptcha')
+ end
+ end
+ end
+
+ describe 'real-time group url validation', :js do
+ let_it_be(:subgroup) { create(:group, path: 'sub', parent: group) }
+
+ before do
+ group.add_owner(user)
+ visit new_group_path(parent_id: group.id)
+ end
+
+ it 'shows a message if group url is available' do
+ fill_in 'Group URL', with: group.path
+ wait_for_requests
+
+ expect(page).to have_content('Group path is available')
+ end
+
+ it 'shows an error if group url is taken' do
+ fill_in 'Group URL', with: subgroup.path
+ wait_for_requests
+
+ expect(page).to have_content('Group path is already taken')
+ end
+ end
end
it 'checks permissions to avoid exposing groups by parent_id' do
@@ -203,6 +267,7 @@ RSpec.describe 'Group' do
describe 'group edit', :js do
let_it_be(:group) { create(:group, :public) }
+
let(:path) { edit_group_path(group) }
let(:new_name) { 'new-name' }
@@ -248,6 +313,7 @@ RSpec.describe 'Group' do
describe 'group page with markdown description' do
let_it_be(:group) { create(:group) }
+
let(:path) { group_path(group) }
before do
diff --git a/spec/features/ide/clientside_preview_csp_spec.rb b/spec/features/ide/clientside_preview_csp_spec.rb
index eadcb9cd008..559edb8bf53 100644
--- a/spec/features/ide/clientside_preview_csp_spec.rb
+++ b/spec/features/ide/clientside_preview_csp_spec.rb
@@ -7,9 +7,7 @@ RSpec.describe 'IDE Clientside Preview CSP' do
shared_context 'disable feature' do
before do
- allow_next_instance_of(ApplicationSetting) do |instance|
- allow(instance).to receive(:web_ide_clientside_preview_enabled?).and_return(false)
- end
+ stub_application_setting(web_ide_clientside_preview_enabled: false)
end
end
@@ -24,10 +22,8 @@ RSpec.describe 'IDE Clientside Preview CSP' do
end
before do
- allow_next_instance_of(ApplicationSetting) do |instance|
- allow(instance).to receive(:web_ide_clientside_preview_enabled?).and_return(true)
- allow(instance).to receive(:web_ide_clientside_preview_bundler_url).and_return(whitelisted_url)
- end
+ stub_application_setting(web_ide_clientside_preview_enabled: true)
+ stub_application_setting(web_ide_clientside_preview_bundler_url: whitelisted_url)
sign_in(user)
end
diff --git a/spec/features/import/manifest_import_spec.rb b/spec/features/import/manifest_import_spec.rb
index dfd6211a683..520cf850da2 100644
--- a/spec/features/import/manifest_import_spec.rb
+++ b/spec/features/import/manifest_import_spec.rb
@@ -37,11 +37,20 @@ RSpec.describe 'Import multiple repositories by uploading a manifest file', :js
wait_for_requests
page.within(second_row) do
- expect(page).to have_content 'Done'
+ expect(page).to have_content 'Complete'
expect(page).to have_content("#{group.full_path}/build/blueprint")
end
end
+ it 'renders an error if the remote url scheme starts with javascript' do
+ visit new_import_manifest_path
+
+ attach_file('manifest', Rails.root.join('spec/fixtures/unsafe_javascript.xml'))
+ click_on 'List available repositories'
+
+ expect(page).to have_content 'Make sure the url does not start with javascript'
+ end
+
it 'renders an error if invalid file was provided' do
visit new_import_manifest_path
diff --git a/spec/features/invites_spec.rb b/spec/features/invites_spec.rb
index 2ceffa896eb..e9960802378 100644
--- a/spec/features/invites_spec.rb
+++ b/spec/features/invites_spec.rb
@@ -50,21 +50,23 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
it 'renders sign in page with sign in notice' do
- expect(current_path).to eq(new_user_session_path)
- expect(page).to have_content('To accept this invitation, sign in')
+ expect(current_path).to eq(new_user_registration_path)
+ expect(page).to have_content('To accept this invitation, create an account or sign in')
end
it 'pre-fills the "Username or email" field on the sign in box with the invite_email from the invite' do
+ click_link 'Sign in'
+
expect(find_field('Username or email').value).to eq(group_invite.invite_email)
end
it 'pre-fills the Email field on the sign up box with the invite_email from the invite' do
- click_link 'Register now'
-
expect(find_field('Email').value).to eq(group_invite.invite_email)
end
it 'sign in, grants access and redirects to group page' do
+ click_link 'Sign in'
+
fill_in_sign_in_form(user)
expect(current_path).to eq(group_path(group))
@@ -85,20 +87,19 @@ RSpec.describe 'Group or Project invitations', :aggregate_failures do
end
end
- context 'when inviting a user' do
+ context 'when inviting an unregistered user' do
let(:new_user) { build_stubbed(:user) }
let(:invite_email) { new_user.email }
let(:group_invite) { create(:group_member, :invited, group: group, invite_email: invite_email, created_by: owner) }
let!(:project_invite) { create(:project_member, :invited, project: project, invite_email: invite_email) }
- context 'when user has not signed in yet' do
+ context 'when registering using invitation email' do
before do
stub_application_setting(send_user_confirmation_email: send_email_confirmation)
visit invite_path(group_invite.raw_invite_token)
- click_link 'Register now'
end
- context 'with admin appoval required enabled' do
+ context 'with admin approval required enabled' do
before do
stub_application_setting(require_admin_approval_after_user_signup: true)
end
diff --git a/spec/features/issues/bulk_assignment_labels_spec.rb b/spec/features/issues/bulk_assignment_labels_spec.rb
index aa61aff3b05..80bf964e2ee 100644
--- a/spec/features/issues/bulk_assignment_labels_spec.rb
+++ b/spec/features/issues/bulk_assignment_labels_spec.rb
@@ -295,8 +295,8 @@ RSpec.describe 'Issues > Labels bulk assignment' do
before do
issue1.milestone = milestone
issue2.milestone = milestone
- issue1.save
- issue2.save
+ issue1.save!
+ issue2.save!
issue1.labels << bug
issue2.labels << feature
diff --git a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
index a4e9df604a9..34d78880991 100644
--- a/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_discussions_in_merge_request_spec.rb
@@ -18,10 +18,6 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
end
end
- before do
- stub_feature_flags(remove_resolve_note: false)
- end
-
describe 'as a user with access to the project' do
before do
project.add_maintainer(user)
@@ -37,7 +33,7 @@ RSpec.describe 'Resolving all open threads in a merge request from an issue', :j
context 'resolving the thread' do
before do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
it 'hides the link for creating a new issue' do
diff --git a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
index 99dc71f0559..ac3471e8401 100644
--- a/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
+++ b/spec/features/issues/create_issue_for_single_discussion_in_merge_request_spec.rb
@@ -14,10 +14,6 @@ RSpec.describe 'Resolve an open thread in a merge request by creating an issue',
"a[title=\"#{title}\"][href=\"#{url}\"]"
end
- before do
- stub_feature_flags(remove_resolve_note: false)
- end
-
describe 'As a user with access to the project' do
before do
project.add_maintainer(user)
@@ -39,7 +35,7 @@ RSpec.describe 'Resolve an open thread in a merge request by creating an issue',
context 'resolving the thread' do
before do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
it 'hides the link for creating a new issue' do
diff --git a/spec/features/issues/form_spec.rb b/spec/features/issues/form_spec.rb
index dac066856c0..5ca20028485 100644
--- a/spec/features/issues/form_spec.rb
+++ b/spec/features/issues/form_spec.rb
@@ -156,7 +156,7 @@ RSpec.describe 'New/edit issue', :js do
expect(page.all('input[name="issue[label_ids][]"]', visible: false)[1].value).to match(label.id.to_s)
expect(page.all('input[name="issue[label_ids][]"]', visible: false)[2].value).to match(label2.id.to_s)
- click_button 'Submit issue'
+ click_button 'Create issue'
page.within '.issuable-sidebar' do
page.within '.assignee' do
diff --git a/spec/features/issues/gfm_autocomplete_spec.rb b/spec/features/issues/gfm_autocomplete_spec.rb
index e6ebc37ba59..0cefbae4d37 100644
--- a/spec/features/issues/gfm_autocomplete_spec.rb
+++ b/spec/features/issues/gfm_autocomplete_spec.rb
@@ -3,16 +3,23 @@
require 'spec_helper'
RSpec.describe 'GFM autocomplete', :js do
- let_it_be(:user_xss_title) { 'eve <img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;' }
- let_it_be(:user_xss) { create(:user, name: user_xss_title, username: 'xss.user') }
let_it_be(:user) { create(:user, name: '💃speciąl someone💃', username: 'someone.special') }
let_it_be(:user2) { create(:user, name: 'Marge Simpson', username: 'msimpson') }
+
let_it_be(:group) { create(:group, name: 'Ancestor') }
let_it_be(:child_group) { create(:group, parent: group, name: 'My group') }
let_it_be(:project) { create(:project, group: child_group) }
+
+ let_it_be(:issue) { create(:issue, project: project, assignees: [user]) }
let_it_be(:label) { create(:label, project: project, title: 'special+') }
+ let_it_be(:label_scoped) { create(:label, project: project, title: 'scoped::label') }
+ let_it_be(:label_with_spaces) { create(:label, project: project, title: 'Accepting merge requests') }
+ let_it_be(:snippet) { create(:project_snippet, project: project, title: 'code snippet') }
- let(:issue) { create(:issue, project: project) }
+ let_it_be(:user_xss_title) { 'eve <img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;' }
+ let_it_be(:user_xss) { create(:user, name: user_xss_title, username: 'xss.user') }
+ let_it_be(:label_xss_title) { 'alert label &lt;img src=x onerror="alert(\'Hello xss\');" a' }
+ let_it_be(:label_xss) { create(:label, project: project, title: label_xss_title) }
before_all do
project.add_maintainer(user)
@@ -21,418 +28,366 @@ RSpec.describe 'GFM autocomplete', :js do
end
describe 'when tribute_autocomplete feature flag is off' do
- before do
- stub_feature_flags(tribute_autocomplete: false)
-
- sign_in(user)
- visit project_issue_path(project, issue)
-
- wait_for_requests
- end
-
- it 'updates issue description with GFM reference' do
- click_button 'Edit title and description'
-
- wait_for_requests
-
- fill_in 'Description', with: "@#{user.name[0...3]}"
-
- wait_for_requests
-
- find_highlighted_autocomplete_item.click
-
- click_button 'Save changes'
-
- wait_for_requests
-
- expect(find('.description')).to have_text(user.to_reference)
- end
-
- it 'opens quick action autocomplete when updating description' do
- click_button 'Edit title and description'
-
- fill_in 'Description', with: '/'
-
- expect(find_autocomplete_menu).to be_visible
- end
-
- it 'opens autocomplete menu when field starts with text' do
- fill_in 'Comment', with: '@'
-
- expect(find_autocomplete_menu).to be_visible
- end
-
- it 'opens autocomplete menu for Issues when field starts with text with item escaping HTML characters' do
- issue_xss_title = 'This will execute alert<img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;'
- create(:issue, project: project, title: issue_xss_title)
-
- fill_in 'Comment', with: '#'
-
- wait_for_requests
+ describe 'new issue page' do
+ before do
+ stub_feature_flags(tribute_autocomplete: false)
- expect(find_autocomplete_menu).to have_text(issue_xss_title)
- end
+ sign_in(user)
+ visit new_project_issue_path(project)
- it 'opens autocomplete menu for Username when field starts with text with item escaping HTML characters' do
- fill_in 'Comment', with: '@ev'
+ wait_for_requests
+ end
- wait_for_requests
+ it 'allows quick actions' do
+ fill_in 'Description', with: '/'
- expect(find_highlighted_autocomplete_item).to have_text(user_xss.username)
+ expect(find_autocomplete_menu).to be_visible
+ end
end
- it 'opens autocomplete menu for Milestone when field starts with text with item escaping HTML characters' do
- milestone_xss_title = 'alert milestone &lt;img src=x onerror="alert(\'Hello xss\');" a'
- create(:milestone, project: project, title: milestone_xss_title)
+ describe 'issue description' do
+ let(:issue_to_edit) { create(:issue, project: project) }
- fill_in 'Comment', with: '%'
+ before do
+ stub_feature_flags(tribute_autocomplete: false)
- wait_for_requests
+ sign_in(user)
+ visit project_issue_path(project, issue_to_edit)
- expect(find_autocomplete_menu).to have_text('alert milestone')
- end
+ wait_for_requests
+ end
- it 'doesnt open autocomplete menu character is prefixed with text' do
- fill_in 'Comment', with: 'testing@'
+ it 'updates with GFM reference' do
+ click_button 'Edit title and description'
- expect(page).not_to have_css('.atwho-view')
- end
+ wait_for_requests
- it 'doesnt select the first item for non-assignee dropdowns' do
- fill_in 'Comment', with: ':'
+ fill_in 'Description', with: "@#{user.name[0...3]}"
- wait_for_requests
+ wait_for_requests
- expect(find_autocomplete_menu).not_to have_css('.cur')
- end
+ find_highlighted_autocomplete_item.click
- it 'does not open autocomplete menu when ":" is prefixed by a number and letters' do
- # Number.
- fill_in 'Comment', with: '7:'
- expect(page).not_to have_css('.atwho-view')
+ click_button 'Save changes'
- # ASCII letter.
- fill_in 'Comment', with: 'w:'
- expect(page).not_to have_css('.atwho-view')
+ wait_for_requests
- # Non-ASCII letter.
- fill_in 'Comment', with: 'Ё:'
- expect(page).not_to have_css('.atwho-view')
- end
+ expect(find('.description')).to have_text(user.to_reference)
+ end
- it 'selects the first item for assignee dropdowns' do
- fill_in 'Comment', with: '@'
+ it 'allows quick actions' do
+ click_button 'Edit title and description'
- wait_for_requests
+ fill_in 'Description', with: '/'
- expect(find_autocomplete_menu).to have_css('.cur:first-of-type')
+ expect(find_autocomplete_menu).to be_visible
+ end
end
- it 'includes items for assignee dropdowns with non-ASCII characters in name' do
- fill_in 'Comment', with: "@#{user.name[0...8]}"
+ describe 'issue comment' do
+ before do
+ stub_feature_flags(tribute_autocomplete: false)
- wait_for_requests
+ sign_in(user)
+ visit project_issue_path(project, issue)
- expect(find_autocomplete_menu).to have_text(user.name)
- end
+ wait_for_requests
+ end
- it 'searches across full name for assignees' do
- fill_in 'Comment', with: '@speciąlsome'
+ describe 'triggering autocomplete' do
+ it 'only opens autocomplete menu when trigger character is after whitespace', :aggregate_failures do
+ fill_in 'Comment', with: 'testing@'
+ expect(page).not_to have_css('.atwho-view')
- wait_for_requests
+ fill_in 'Comment', with: '@@'
+ expect(page).not_to have_css('.atwho-view')
- expect(find_highlighted_autocomplete_item).to have_text(user.name)
- end
+ fill_in 'Comment', with: "@#{user.username[0..2]}!"
+ expect(page).not_to have_css('.atwho-view')
- it 'shows names that start with the query as the top result' do
- fill_in 'Comment', with: '@mar'
+ fill_in 'Comment', with: "hello:#{user.username[0..2]}"
+ expect(page).not_to have_css('.atwho-view')
- wait_for_requests
+ fill_in 'Comment', with: '7:'
+ expect(page).not_to have_css('.atwho-view')
- expect(find_highlighted_autocomplete_item).to have_text(user2.name)
- end
-
- it 'shows usernames that start with the query as the top result' do
- fill_in 'Comment', with: '@msi'
+ fill_in 'Comment', with: 'w:'
+ expect(page).not_to have_css('.atwho-view')
- wait_for_requests
+ fill_in 'Comment', with: 'Ё:'
+ expect(page).not_to have_css('.atwho-view')
- expect(find_highlighted_autocomplete_item).to have_text(user2.name)
- end
+ fill_in 'Comment', with: "test\n\n@"
+ expect(find_autocomplete_menu).to be_visible
+ end
+ end
- # Regression test for https://gitlab.com/gitlab-org/gitlab/-/issues/321925
- it 'shows username when pasting then pressing Enter' do
- fill_in 'Comment', with: "@#{user.username}\n"
+ context 'xss checks' do
+ it 'opens autocomplete menu for Issues when field starts with text with item escaping HTML characters' do
+ issue_xss_title = 'This will execute alert<img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;'
+ create(:issue, project: project, title: issue_xss_title)
- expect(find_field('Comment').value).to have_text "@#{user.username}"
- end
+ fill_in 'Comment', with: '#'
- it 'does not show `@undefined` when pressing `@` then Enter' do
- fill_in 'Comment', with: "@\n"
+ wait_for_requests
- expect(find_field('Comment').value).to have_text '@'
- expect(find_field('Comment').value).not_to have_text '@undefined'
- end
+ expect(find_autocomplete_menu).to have_text(issue_xss_title)
+ end
- it 'selects the first item for non-assignee dropdowns if a query is entered' do
- fill_in 'Comment', with: ':1'
+ it 'opens autocomplete menu for Username when field starts with text with item escaping HTML characters' do
+ fill_in 'Comment', with: '@ev'
- wait_for_requests
+ wait_for_requests
- expect(find_autocomplete_menu).to have_css('.cur:first-of-type')
- end
+ expect(find_highlighted_autocomplete_item).to have_text(user_xss.username)
+ end
- context 'if a selected value has special characters' do
- it 'wraps the result in double quotes' do
- fill_in 'Comment', with: "~#{label.title[0]}"
+ it 'opens autocomplete menu for Milestone when field starts with text with item escaping HTML characters' do
+ milestone_xss_title = 'alert milestone &lt;img src=x onerror="alert(\'Hello xss\');" a'
+ create(:milestone, project: project, title: milestone_xss_title)
- find_highlighted_autocomplete_item.click
+ fill_in 'Comment', with: '%'
- expect(find_field('Comment').value).to have_text("~\"#{label.title}\"")
- end
+ wait_for_requests
- it "shows dropdown after a new line" do
- fill_in 'Comment', with: "test\n\n@"
+ expect(find_autocomplete_menu).to have_text('alert milestone')
+ end
- expect(find_autocomplete_menu).to be_visible
- end
+ it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
+ fill_in 'Comment', with: '~'
- it "does not show dropdown when preceded with a special character" do
- fill_in 'Comment', with: '@@'
+ wait_for_requests
- expect(page).not_to have_css('.atwho-view')
+ expect(find_autocomplete_menu).to have_text('alert label')
+ end
end
- it 'doesn\'t wrap for assignee values' do
- fill_in 'Comment', with: "@#{user.username[0]}"
+ describe 'autocomplete highlighting' do
+ it 'auto-selects the first item when there is a query, and only for assignees with no query', :aggregate_failures do
+ fill_in 'Comment', with: ':'
+ wait_for_requests
+ expect(find_autocomplete_menu).not_to have_css('.cur')
- find_highlighted_autocomplete_item.click
+ fill_in 'Comment', with: ':1'
+ wait_for_requests
+ expect(find_autocomplete_menu).to have_css('.cur:first-of-type')
- expect(find_field('Comment').value).to have_text("@#{user.username}")
+ fill_in 'Comment', with: '@'
+ wait_for_requests
+ expect(find_autocomplete_menu).to have_css('.cur:first-of-type')
+ end
end
- it 'doesn\'t wrap for emoji values' do
- fill_in 'Comment', with: ':cartwheel_'
-
- find_highlighted_autocomplete_item.click
+ describe 'assignees' do
+ it 'does not wrap with quotes for assignee values' do
+ fill_in 'Comment', with: "@#{user.username[0]}"
- expect(find_field('Comment').value).to have_text('cartwheel_tone1')
- end
+ find_highlighted_autocomplete_item.click
- it 'doesn\'t open autocomplete after non-word character' do
- fill_in 'Comment', with: "@#{user.username[0..2]}!"
+ expect(find_field('Comment').value).to have_text("@#{user.username}")
+ end
- expect(page).not_to have_css('.atwho-view')
- end
+ it 'includes items for assignee dropdowns with non-ASCII characters in name' do
+ fill_in 'Comment', with: "@#{user.name[0...8]}"
- it 'doesn\'t open autocomplete if there is no space before' do
- fill_in 'Comment', with: "hello:#{user.username[0..2]}"
+ wait_for_requests
- expect(page).not_to have_css('.atwho-view')
- end
+ expect(find_autocomplete_menu).to have_text(user.name)
+ end
- it 'triggers autocomplete after selecting a quick action' do
- fill_in 'Comment', with: '/as'
+ it 'searches across full name for assignees' do
+ fill_in 'Comment', with: '@speciąlsome'
- find_highlighted_autocomplete_item.click
+ wait_for_requests
- expect(find_autocomplete_menu).to have_text(user.username)
- end
+ expect(find_highlighted_autocomplete_item).to have_text(user.name)
+ end
- it 'does not limit quick actions autocomplete list to 5' do
- fill_in 'Comment', with: '/'
+ it 'shows names that start with the query as the top result' do
+ fill_in 'Comment', with: '@mar'
- expect(find_autocomplete_menu).to have_css('li', minimum: 6)
- end
- end
+ wait_for_requests
- context 'assignees' do
- let(:issue_assignee) { create(:issue, project: project) }
- let(:unassigned_user) { create(:user) }
+ expect(find_highlighted_autocomplete_item).to have_text(user2.name)
+ end
- before do
- issue_assignee.update(assignees: [user])
+ it 'shows usernames that start with the query as the top result' do
+ fill_in 'Comment', with: '@msi'
- project.add_maintainer(unassigned_user)
- end
+ wait_for_requests
- it 'lists users who are currently not assigned to the issue when using /assign' do
- visit project_issue_path(project, issue_assignee)
+ expect(find_highlighted_autocomplete_item).to have_text(user2.name)
+ end
- fill_in 'Comment', with: '/as'
+ # Regression test for https://gitlab.com/gitlab-org/gitlab/-/issues/321925
+ it 'shows username when pasting then pressing Enter' do
+ fill_in 'Comment', with: "@#{user.username}\n"
- find_highlighted_autocomplete_item.click
+ expect(find_field('Comment').value).to have_text "@#{user.username}"
+ end
- expect(find_autocomplete_menu).not_to have_text(user.username)
- expect(find_autocomplete_menu).to have_text(unassigned_user.username)
- end
+ it 'does not show `@undefined` when pressing `@` then Enter' do
+ fill_in 'Comment', with: "@\n"
- it 'shows dropdown on new issue form' do
- visit new_project_issue_path(project)
+ expect(find_field('Comment').value).to have_text '@'
+ expect(find_field('Comment').value).not_to have_text '@undefined'
+ end
- fill_in 'Description', with: '/ass'
+ context 'when /assign quick action is selected' do
+ it 'triggers user autocomplete and lists users who are currently not assigned to the issue' do
+ fill_in 'Comment', with: '/as'
- find_highlighted_autocomplete_item.click
+ find_highlighted_autocomplete_item.click
- expect(find_autocomplete_menu).to have_text(unassigned_user.username)
- expect(find_autocomplete_menu).to have_text(user.username)
+ expect(find_autocomplete_menu).not_to have_text(user.username)
+ expect(find_autocomplete_menu).to have_text(user2.username)
+ end
+ end
end
- end
-
- context 'labels' do
- it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
- label_xss_title = 'alert label &lt;img src=x onerror="alert(\'Hello xss\');" a'
- create(:label, project: project, title: label_xss_title)
- fill_in 'Comment', with: '~'
-
- wait_for_requests
+ context 'if a selected value has special characters' do
+ it 'wraps the result in double quotes' do
+ fill_in 'Comment', with: "~#{label.title[0..2]}"
- expect(find_autocomplete_menu).to have_text('alert label')
- end
+ find_highlighted_autocomplete_item.click
- it 'allows colons when autocompleting scoped labels' do
- create(:label, project: project, title: 'scoped:label')
+ expect(find_field('Comment').value).to have_text("~\"#{label.title}\"")
+ end
- fill_in 'Comment', with: '~scoped:'
+ it 'doesn\'t wrap for emoji values' do
+ fill_in 'Comment', with: ':cartwheel_'
- wait_for_requests
+ find_highlighted_autocomplete_item.click
- expect(find_autocomplete_menu).to have_text('scoped:label')
+ expect(find_field('Comment').value).to have_text('cartwheel_tone1')
+ end
end
- it 'allows colons when autocompleting scoped labels with double colons' do
- create(:label, project: project, title: 'scoped::label')
+ context 'quick actions' do
+ it 'does not limit quick actions autocomplete list to 5' do
+ fill_in 'Comment', with: '/'
- fill_in 'Comment', with: '~scoped::'
-
- wait_for_requests
-
- expect(find_autocomplete_menu).to have_text('scoped::label')
+ expect(find_autocomplete_menu).to have_css('li', minimum: 6)
+ end
end
- it 'allows spaces when autocompleting multi-word labels' do
- create(:label, project: project, title: 'Accepting merge requests')
+ context 'labels' do
+ it 'allows colons when autocompleting scoped labels' do
+ fill_in 'Comment', with: '~scoped:'
- fill_in 'Comment', with: '~Accepting merge'
+ wait_for_requests
- wait_for_requests
+ expect(find_autocomplete_menu).to have_text('scoped::label')
+ end
- expect(find_autocomplete_menu).to have_text('Accepting merge requests')
- end
+ it 'allows spaces when autocompleting multi-word labels' do
+ fill_in 'Comment', with: '~Accepting merge'
- it 'only autocompletes the latest label' do
- create(:label, project: project, title: 'Accepting merge requests')
- create(:label, project: project, title: 'Accepting job applicants')
+ wait_for_requests
- fill_in 'Comment', with: '~Accepting merge requests foo bar ~Accepting job'
+ expect(find_autocomplete_menu).to have_text('Accepting merge requests')
+ end
- wait_for_requests
+ it 'only autocompletes the last label' do
+ fill_in 'Comment', with: '~scoped:: foo bar ~Accepting merge'
- expect(find_autocomplete_menu).to have_text('Accepting job applicants')
- end
+ wait_for_requests
- it 'does not autocomplete labels if no tilde is typed' do
- create(:label, project: project, title: 'Accepting merge requests')
+ expect(find_autocomplete_menu).to have_text('Accepting merge requests')
+ end
- fill_in 'Comment', with: 'Accepting merge'
+ it 'does not autocomplete labels if no tilde is typed' do
+ fill_in 'Comment', with: 'Accepting merge'
- wait_for_requests
+ wait_for_requests
- expect(page).not_to have_css('.atwho-view')
+ expect(page).not_to have_css('.atwho-view')
+ end
end
- end
- context 'when other notes are destroyed' do
- let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
+ context 'when other notes are destroyed' do
+ let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
- # This is meant to protect against this issue https://gitlab.com/gitlab-org/gitlab/-/issues/228729
- it 'keeps autocomplete key listeners' do
- visit project_issue_path(project, issue)
- note = find_field('Comment')
+ # This is meant to protect against this issue https://gitlab.com/gitlab-org/gitlab/-/issues/228729
+ it 'keeps autocomplete key listeners' do
+ note = find_field('Comment')
- start_comment_with_emoji(note, '.atwho-view li')
+ start_comment_with_emoji(note, '.atwho-view li')
- start_and_cancel_discussion
+ start_and_cancel_discussion
- note.fill_in(with: '')
- start_comment_with_emoji(note, '.atwho-view li')
- note.native.send_keys(:enter)
+ note.fill_in(with: '')
+ start_comment_with_emoji(note, '.atwho-view li')
+ note.native.send_keys(:enter)
- expect(note.value).to eql('Hello :100: ')
+ expect(note.value).to eql('Hello :100: ')
+ end
end
- end
- shared_examples 'autocomplete suggestions' do
- it 'suggests objects correctly' do
- fill_in 'Comment', with: object.class.reference_prefix
+ shared_examples 'autocomplete suggestions' do
+ it 'suggests objects correctly' do
+ fill_in 'Comment', with: object.class.reference_prefix
- find_autocomplete_menu.find('li').click
+ find_autocomplete_menu.find('li').click
- expect(find_field('Comment').value).to have_text(expected_body)
+ expect(find_field('Comment').value).to have_text(expected_body)
+ end
end
- end
- context 'issues' do
- let(:object) { issue }
- let(:expected_body) { object.to_reference }
+ context 'issues' do
+ let(:object) { issue }
+ let(:expected_body) { object.to_reference }
- it_behaves_like 'autocomplete suggestions'
- end
-
- context 'merge requests' do
- let(:object) { create(:merge_request, source_project: project) }
- let(:expected_body) { object.to_reference }
-
- it_behaves_like 'autocomplete suggestions'
- end
+ it_behaves_like 'autocomplete suggestions'
+ end
- context 'project snippets' do
- let!(:object) { create(:project_snippet, project: project, title: 'code snippet') }
- let(:expected_body) { object.to_reference }
+ context 'merge requests' do
+ let(:object) { create(:merge_request, source_project: project) }
+ let(:expected_body) { object.to_reference }
- it_behaves_like 'autocomplete suggestions'
- end
+ it_behaves_like 'autocomplete suggestions'
+ end
- context 'label' do
- let!(:object) { label }
- let(:expected_body) { object.title }
+ context 'project snippets' do
+ let!(:object) { snippet }
+ let(:expected_body) { object.to_reference }
- it_behaves_like 'autocomplete suggestions'
- end
+ it_behaves_like 'autocomplete suggestions'
+ end
- context 'milestone' do
- let_it_be(:milestone_expired) { create(:milestone, project: project, due_date: 5.days.ago) }
- let_it_be(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') }
- let_it_be(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) }
- let_it_be(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) }
- let_it_be(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) }
+ context 'milestone' do
+ let_it_be(:milestone_expired) { create(:milestone, project: project, due_date: 5.days.ago) }
+ let_it_be(:milestone_no_duedate) { create(:milestone, project: project, title: 'Foo - No due date') }
+ let_it_be(:milestone1) { create(:milestone, project: project, title: 'Milestone-1', due_date: 20.days.from_now) }
+ let_it_be(:milestone2) { create(:milestone, project: project, title: 'Milestone-2', due_date: 15.days.from_now) }
+ let_it_be(:milestone3) { create(:milestone, project: project, title: 'Milestone-3', due_date: 10.days.from_now) }
- before do
- fill_in 'Comment', with: '/milestone %'
+ before do
+ fill_in 'Comment', with: '/milestone %'
- wait_for_requests
- end
+ wait_for_requests
+ end
- it 'shows milestons list in the autocomplete menu' do
- page.within(find_autocomplete_menu) do
- expect(page).to have_selector('li', count: 5)
+ it 'shows milestons list in the autocomplete menu' do
+ page.within(find_autocomplete_menu) do
+ expect(page).to have_selector('li', count: 5)
+ end
end
- end
- it 'shows expired milestone at the bottom of the list' do
- page.within(find_autocomplete_menu) do
- expect(page.find('li:last-child')).to have_content milestone_expired.title
+ it 'shows expired milestone at the bottom of the list' do
+ page.within(find_autocomplete_menu) do
+ expect(page.find('li:last-child')).to have_content milestone_expired.title
+ end
end
- end
- it 'shows milestone due earliest at the top of the list' do
- page.within(find_autocomplete_menu) do
- aggregate_failures do
- expect(page.all('li')[0]).to have_content milestone3.title
- expect(page.all('li')[1]).to have_content milestone2.title
- expect(page.all('li')[2]).to have_content milestone1.title
- expect(page.all('li')[3]).to have_content milestone_no_duedate.title
+ it 'shows milestone due earliest at the top of the list' do
+ page.within(find_autocomplete_menu) do
+ aggregate_failures do
+ expect(page.all('li')[0]).to have_content milestone3.title
+ expect(page.all('li')[1]).to have_content milestone2.title
+ expect(page.all('li')[2]).to have_content milestone1.title
+ expect(page.all('li')[3]).to have_content milestone_no_duedate.title
+ end
end
end
end
@@ -440,346 +395,303 @@ RSpec.describe 'GFM autocomplete', :js do
end
describe 'when tribute_autocomplete feature flag is on' do
- before do
- stub_feature_flags(tribute_autocomplete: true)
-
- sign_in(user)
- visit project_issue_path(project, issue)
-
- wait_for_requests
- end
+ describe 'issue description' do
+ let(:issue_to_edit) { create(:issue, project: project) }
- it 'updates issue description with GFM reference' do
- click_button 'Edit title and description'
-
- wait_for_requests
-
- fill_in 'Description', with: "@#{user.name[0...3]}"
-
- wait_for_requests
+ before do
+ stub_feature_flags(tribute_autocomplete: true)
- find_highlighted_tribute_autocomplete_menu.click
+ sign_in(user)
+ visit project_issue_path(project, issue_to_edit)
- click_button 'Save changes'
+ wait_for_requests
+ end
- wait_for_requests
+ it 'updates with GFM reference' do
+ click_button 'Edit title and description'
- expect(find('.description')).to have_text(user.to_reference)
- end
+ wait_for_requests
- it 'opens autocomplete menu when field starts with text' do
- fill_in 'Comment', with: '@'
+ fill_in 'Description', with: "@#{user.name[0...3]}"
- expect(find_tribute_autocomplete_menu).to be_visible
- end
+ wait_for_requests
- it 'opens autocomplete menu for Issues when field starts with text with item escaping HTML characters' do
- issue_xss_title = 'This will execute alert<img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;'
- create(:issue, project: project, title: issue_xss_title)
+ find_highlighted_tribute_autocomplete_menu.click
- fill_in 'Comment', with: '#'
+ click_button 'Save changes'
- wait_for_requests
+ wait_for_requests
- expect(find_tribute_autocomplete_menu).to have_text(issue_xss_title)
+ expect(find('.description')).to have_text(user.to_reference)
+ end
end
- it 'opens autocomplete menu for Username when field starts with text with item escaping HTML characters' do
- fill_in 'Comment', with: '@ev'
-
- wait_for_requests
-
- expect(find_tribute_autocomplete_menu).to have_text(user_xss.username)
- end
+ describe 'issue comment' do
+ before do
+ stub_feature_flags(tribute_autocomplete: true)
- it 'opens autocomplete menu for Milestone when field starts with text with item escaping HTML characters' do
- milestone_xss_title = 'alert milestone &lt;img src=x onerror="alert(\'Hello xss\');" a'
- create(:milestone, project: project, title: milestone_xss_title)
+ sign_in(user)
+ visit project_issue_path(project, issue)
- fill_in 'Comment', with: '%'
+ wait_for_requests
+ end
- wait_for_requests
+ describe 'triggering autocomplete' do
+ it 'only opens autocomplete menu when trigger character is after whitespace', :aggregate_failures do
+ fill_in 'Comment', with: 'testing@'
+ expect(page).not_to have_css('.tribute-container')
- expect(find_tribute_autocomplete_menu).to have_text('alert milestone')
- end
+ fill_in 'Comment', with: "hello:#{user.username[0..2]}"
+ expect(page).not_to have_css('.tribute-container')
- it 'does not open autocomplete menu when trigger character is prefixed with text' do
- fill_in 'Comment', with: 'testing@'
+ fill_in 'Comment', with: '7:'
+ expect(page).not_to have_css('.tribute-container')
- expect(page).not_to have_css('.tribute-container')
- end
+ fill_in 'Comment', with: 'w:'
+ expect(page).not_to have_css('.tribute-container')
- it 'does not open autocomplete menu when ":" is prefixed by a number and letters' do
- # Number.
- fill_in 'Comment', with: '7:'
- expect(page).not_to have_css('.tribute-container')
+ fill_in 'Comment', with: 'Ё:'
+ expect(page).not_to have_css('.tribute-container')
- # ASCII letter.
- fill_in 'Comment', with: 'w:'
- expect(page).not_to have_css('.tribute-container')
+ fill_in 'Comment', with: "test\n\n@"
+ expect(find_tribute_autocomplete_menu).to be_visible
+ end
+ end
- # Non-ASCII letter.
- fill_in 'Comment', with: 'Ё:'
- expect(page).not_to have_css('.tribute-container')
- end
+ context 'xss checks' do
+ it 'opens autocomplete menu for Issues when field starts with text with item escaping HTML characters' do
+ issue_xss_title = 'This will execute alert<img src=x onerror=alert(2)&lt;img src=x onerror=alert(1)&gt;'
+ create(:issue, project: project, title: issue_xss_title)
- it 'selects the first item for assignee dropdowns' do
- fill_in 'Comment', with: '@'
+ fill_in 'Comment', with: '#'
- wait_for_requests
+ wait_for_requests
- expect(find_tribute_autocomplete_menu).to have_css('.highlight:first-of-type')
- end
+ expect(find_tribute_autocomplete_menu).to have_text(issue_xss_title)
+ end
- it 'includes items for assignee dropdowns with non-ASCII characters in name' do
- fill_in 'Comment', with: "@#{user.name[0...8]}"
+ it 'opens autocomplete menu for Username when field starts with text with item escaping HTML characters' do
+ fill_in 'Comment', with: '@ev'
- wait_for_requests
+ wait_for_requests
- expect(find_tribute_autocomplete_menu).to have_text(user.name)
- end
+ expect(find_tribute_autocomplete_menu).to have_text(user_xss.username)
+ end
- it 'selects the first item for non-assignee dropdowns if a query is entered' do
- fill_in 'Comment', with: ':1'
+ it 'opens autocomplete menu for Milestone when field starts with text with item escaping HTML characters' do
+ milestone_xss_title = 'alert milestone &lt;img src=x onerror="alert(\'Hello xss\');" a'
+ create(:milestone, project: project, title: milestone_xss_title)
- wait_for_requests
+ fill_in 'Comment', with: '%'
- expect(find_tribute_autocomplete_menu).to have_css('.highlight:first-of-type')
- end
+ wait_for_requests
- context 'when autocompleting for groups' do
- it 'shows the group when searching for the name of the group' do
- fill_in 'Comment', with: '@mygroup'
+ expect(find_tribute_autocomplete_menu).to have_text('alert milestone')
+ end
- expect(find_tribute_autocomplete_menu).to have_text('My group')
- end
+ it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
+ fill_in 'Comment', with: '~'
- it 'does not show the group when searching for the name of the parent of the group' do
- fill_in 'Comment', with: '@ancestor'
+ wait_for_requests
- expect(find_tribute_autocomplete_menu).not_to have_text('My group')
+ expect(find_tribute_autocomplete_menu).to have_text('alert label')
+ end
end
- end
- context 'if a selected value has special characters' do
- it 'wraps the result in double quotes' do
- fill_in 'Comment', with: "~#{label.title[0]}"
+ describe 'autocomplete highlighting' do
+ it 'auto-selects the first item with query', :aggregate_failures do
+ fill_in 'Comment', with: ':1'
+ wait_for_requests
+ expect(find_tribute_autocomplete_menu).to have_css('.highlight:first-of-type')
- find_highlighted_tribute_autocomplete_menu.click
-
- expect(find_field('Comment').value).to have_text("~\"#{label.title}\"")
+ fill_in 'Comment', with: '@'
+ wait_for_requests
+ expect(find_tribute_autocomplete_menu).to have_css('.highlight:first-of-type')
+ end
end
- it "shows dropdown after a new line" do
- fill_in 'Comment', with: "test\n\n@"
-
- expect(find_tribute_autocomplete_menu).to be_visible
- end
+ describe 'assignees' do
+ it 'does not wrap with quotes for assignee values' do
+ fill_in 'Comment', with: "@#{user.username[0..2]}"
- it 'doesn\'t wrap for assignee values' do
- fill_in 'Comment', with: "@#{user.username[0..2]}"
+ find_highlighted_tribute_autocomplete_menu.click
- find_highlighted_tribute_autocomplete_menu.click
+ expect(find_field('Comment').value).to have_text("@#{user.username}")
+ end
- expect(find_field('Comment').value).to have_text("@#{user.username}")
- end
+ it 'includes items for assignee dropdowns with non-ASCII characters in name' do
+ fill_in 'Comment', with: "@#{user.name[0...8]}"
- it 'does not wrap for emoji values' do
- fill_in 'Comment', with: ':cartwheel_'
+ wait_for_requests
- find_highlighted_tribute_autocomplete_menu.click
+ expect(find_tribute_autocomplete_menu).to have_text(user.name)
+ end
- expect(find_field('Comment').value).to have_text('cartwheel_tone1')
- end
+ context 'when autocompleting for groups' do
+ it 'shows the group when searching for the name of the group' do
+ fill_in 'Comment', with: '@mygroup'
- it 'does not open autocomplete if there is no space before' do
- fill_in 'Comment', with: "hello:#{user.username[0..2]}"
+ expect(find_tribute_autocomplete_menu).to have_text('My group')
+ end
- expect(page).not_to have_css('.tribute-container')
- end
+ it 'does not show the group when searching for the name of the parent of the group' do
+ fill_in 'Comment', with: '@ancestor'
- it 'autocompletes for quick actions' do
- fill_in 'Comment', with: '/as'
+ expect(find_tribute_autocomplete_menu).not_to have_text('My group')
+ end
+ end
- find_highlighted_tribute_autocomplete_menu.click
+ context 'when /assign quick action is selected' do
+ it 'lists users who are currently not assigned to the issue' do
+ note = find_field('Comment')
+ note.native.send_keys('/assign ')
+ # The `/assign` ajax response might replace the one by `@` below causing a failed test
+ # so we need to wait for the `/assign` ajax request to finish first
+ wait_for_requests
+ note.native.send_keys('@')
+ wait_for_requests
+
+ expect(find_tribute_autocomplete_menu).not_to have_text(user.username)
+ expect(find_tribute_autocomplete_menu).to have_text(user2.username)
+ end
- expect(find_field('Comment').value).to have_text('/assign')
+ it 'lists users who are currently not assigned to the issue when using /assign on the second line' do
+ note = find_field('Comment')
+ note.native.send_keys('/assign @user2')
+ note.native.send_keys(:enter)
+ note.native.send_keys('/assign ')
+ # The `/assign` ajax response might replace the one by `@` below causing a failed test
+ # so we need to wait for the `/assign` ajax request to finish first
+ wait_for_requests
+ note.native.send_keys('@')
+ wait_for_requests
+
+ expect(find_tribute_autocomplete_menu).not_to have_text(user.username)
+ expect(find_tribute_autocomplete_menu).to have_text(user2.username)
+ end
+ end
end
- end
- context 'assignees' do
- let(:issue_assignee) { create(:issue, project: project) }
- let(:unassigned_user) { create(:user) }
+ context 'if a selected value has special characters' do
+ it 'wraps the result in double quotes' do
+ fill_in 'Comment', with: "~#{label.title[0..2]}"
- before do
- issue_assignee.update(assignees: [user])
+ find_highlighted_tribute_autocomplete_menu.click
- project.add_maintainer(unassigned_user)
- end
+ expect(find_field('Comment').value).to have_text("~\"#{label.title}\"")
+ end
- it 'lists users who are currently not assigned to the issue when using /assign' do
- visit project_issue_path(project, issue_assignee)
+ it 'does not wrap for emoji values' do
+ fill_in 'Comment', with: ':cartwheel_'
- note = find_field('Comment')
- note.native.send_keys('/assign ')
- # The `/assign` ajax response might replace the one by `@` below causing a failed test
- # so we need to wait for the `/assign` ajax request to finish first
- wait_for_requests
- note.native.send_keys('@')
- wait_for_requests
+ find_highlighted_tribute_autocomplete_menu.click
- expect(find_tribute_autocomplete_menu).not_to have_text(user.username)
- expect(find_tribute_autocomplete_menu).to have_text(unassigned_user.username)
+ expect(find_field('Comment').value).to have_text('cartwheel_tone1')
+ end
end
- it 'lists users who are currently not assigned to the issue when using /assign on the second line' do
- visit project_issue_path(project, issue_assignee)
+ context 'quick actions' do
+ it 'autocompletes for quick actions' do
+ fill_in 'Comment', with: '/as'
- note = find_field('Comment')
- note.native.send_keys('/assign @user2')
- note.native.send_keys(:enter)
- note.native.send_keys('/assign ')
- # The `/assign` ajax response might replace the one by `@` below causing a failed test
- # so we need to wait for the `/assign` ajax request to finish first
- wait_for_requests
- note.native.send_keys('@')
- wait_for_requests
+ find_highlighted_tribute_autocomplete_menu.click
- expect(find_tribute_autocomplete_menu).not_to have_text(user.username)
- expect(find_tribute_autocomplete_menu).to have_text(unassigned_user.username)
+ expect(find_field('Comment').value).to have_text('/assign')
+ end
end
- end
- context 'labels' do
- it 'opens autocomplete menu for Labels when field starts with text with item escaping HTML characters' do
- label_xss_title = 'alert label &lt;img src=x onerror="alert(\'Hello xss\');" a'
- create(:label, project: project, title: label_xss_title)
+ context 'labels' do
+ it 'allows colons when autocompleting scoped labels' do
+ fill_in 'Comment', with: '~scoped:'
- fill_in 'Comment', with: '~'
+ wait_for_requests
- wait_for_requests
+ expect(find_tribute_autocomplete_menu).to have_text('scoped::label')
+ end
- expect(find_tribute_autocomplete_menu).to have_text('alert label')
- end
+ it 'autocompletes multi-word labels' do
+ fill_in 'Comment', with: '~Acceptingmerge'
- it 'allows colons when autocompleting scoped labels' do
- create(:label, project: project, title: 'scoped:label')
+ wait_for_requests
- fill_in 'Comment', with: '~scoped:'
+ expect(find_tribute_autocomplete_menu).to have_text('Accepting merge requests')
+ end
- wait_for_requests
+ it 'only autocompletes the last label' do
+ fill_in 'Comment', with: '~scoped:: foo bar ~Acceptingmerge'
+ # Invoke autocompletion
+ find_field('Comment').native.send_keys(:right)
- expect(find_tribute_autocomplete_menu).to have_text('scoped:label')
- end
+ wait_for_requests
- it 'allows colons when autocompleting scoped labels with double colons' do
- create(:label, project: project, title: 'scoped::label')
+ expect(find_tribute_autocomplete_menu).to have_text('Accepting merge requests')
+ end
- fill_in 'Comment', with: '~scoped::'
+ it 'does not autocomplete labels if no tilde is typed' do
+ fill_in 'Comment', with: 'Accepting'
- wait_for_requests
+ wait_for_requests
- expect(find_tribute_autocomplete_menu).to have_text('scoped::label')
+ expect(page).not_to have_css('.tribute-container')
+ end
end
- it 'autocompletes multi-word labels' do
- create(:label, project: project, title: 'Accepting merge requests')
+ context 'when other notes are destroyed' do
+ let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
- fill_in 'Comment', with: '~Acceptingmerge'
+ # This is meant to protect against this issue https://gitlab.com/gitlab-org/gitlab/-/issues/228729
+ it 'keeps autocomplete key listeners' do
+ note = find_field('Comment')
- wait_for_requests
-
- expect(find_tribute_autocomplete_menu).to have_text('Accepting merge requests')
- end
-
- it 'only autocompletes the latest label' do
- create(:label, project: project, title: 'documentation')
- create(:label, project: project, title: 'feature')
+ start_comment_with_emoji(note, '.tribute-container li')
- fill_in 'Comment', with: '~documentation foo bar ~feat'
- # Invoke autocompletion
- find_field('Comment').native.send_keys(:right)
+ start_and_cancel_discussion
- wait_for_requests
+ note.fill_in(with: '')
+ start_comment_with_emoji(note, '.tribute-container li')
+ note.native.send_keys(:enter)
- expect(find_tribute_autocomplete_menu).to have_text('feature')
- expect(find_tribute_autocomplete_menu).not_to have_text('documentation')
+ expect(note.value).to eql('Hello :100: ')
+ end
end
- it 'does not autocomplete labels if no tilde is typed' do
- create(:label, project: project, title: 'documentation')
+ shared_examples 'autocomplete suggestions' do
+ it 'suggests objects correctly' do
+ fill_in 'Comment', with: object.class.reference_prefix
- fill_in 'Comment', with: 'document'
-
- wait_for_requests
+ find_tribute_autocomplete_menu.find('li').click
- expect(page).not_to have_css('.tribute-container')
+ expect(find_field('Comment').value).to have_text(expected_body)
+ end
end
- end
-
- context 'when other notes are destroyed' do
- let!(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
-
- # This is meant to protect against this issue https://gitlab.com/gitlab-org/gitlab/-/issues/228729
- it 'keeps autocomplete key listeners' do
- visit project_issue_path(project, issue)
- note = find_field('Comment')
-
- start_comment_with_emoji(note, '.tribute-container li')
-
- start_and_cancel_discussion
- note.fill_in(with: '')
- start_comment_with_emoji(note, '.tribute-container li')
- note.native.send_keys(:enter)
+ context 'issues' do
+ let(:object) { issue }
+ let(:expected_body) { object.to_reference }
- expect(note.value).to eql('Hello :100: ')
+ it_behaves_like 'autocomplete suggestions'
end
- end
-
- shared_examples 'autocomplete suggestions' do
- it 'suggests objects correctly' do
- fill_in 'Comment', with: object.class.reference_prefix
- find_tribute_autocomplete_menu.find('li').click
+ context 'merge requests' do
+ let(:object) { create(:merge_request, source_project: project) }
+ let(:expected_body) { object.to_reference }
- expect(find_field('Comment').value).to have_text(expected_body)
+ it_behaves_like 'autocomplete suggestions'
end
- end
-
- context 'issues' do
- let(:object) { issue }
- let(:expected_body) { object.to_reference }
- it_behaves_like 'autocomplete suggestions'
- end
-
- context 'merge requests' do
- let(:object) { create(:merge_request, source_project: project) }
- let(:expected_body) { object.to_reference }
-
- it_behaves_like 'autocomplete suggestions'
- end
+ context 'project snippets' do
+ let!(:object) { snippet }
+ let(:expected_body) { object.to_reference }
- context 'project snippets' do
- let!(:object) { create(:project_snippet, project: project, title: 'code snippet') }
- let(:expected_body) { object.to_reference }
-
- it_behaves_like 'autocomplete suggestions'
- end
-
- context 'label' do
- let!(:object) { label }
- let(:expected_body) { object.title }
-
- it_behaves_like 'autocomplete suggestions'
- end
+ it_behaves_like 'autocomplete suggestions'
+ end
- context 'milestone' do
- let!(:object) { create(:milestone, project: project) }
- let(:expected_body) { object.to_reference }
+ context 'milestone' do
+ let!(:object) { create(:milestone, project: project) }
+ let(:expected_body) { object.to_reference }
- it_behaves_like 'autocomplete suggestions'
+ it_behaves_like 'autocomplete suggestions'
+ end
end
end
diff --git a/spec/features/issues/issue_sidebar_spec.rb b/spec/features/issues/issue_sidebar_spec.rb
index ca44978d223..04b4caa52fe 100644
--- a/spec/features/issues/issue_sidebar_spec.rb
+++ b/spec/features/issues/issue_sidebar_spec.rb
@@ -30,79 +30,199 @@ RSpec.describe 'Issue Sidebar' do
let(:user2) { create(:user) }
let(:issue2) { create(:issue, project: project, author: user2) }
- include_examples 'issuable invite members experiments' do
- let(:issuable_path) { project_issue_path(project, issue2) }
- end
-
- context 'when user is a developer' do
+ context 'when GraphQL assignees widget feature flag is disabled' do
before do
- project.add_developer(user)
- visit_issue(project, issue2)
-
- find('.block.assignee .edit-link').click
+ stub_feature_flags(issue_assignees_widget: false)
+ end
- wait_for_requests
+ include_examples 'issuable invite members experiments' do
+ let(:issuable_path) { project_issue_path(project, issue2) }
end
- it 'shows author in assignee dropdown' do
- page.within '.dropdown-menu-user' do
- expect(page).to have_content(user2.name)
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ visit_issue(project, issue2)
+
+ find('.block.assignee .edit-link').click
+ wait_for_requests
+ end
+
+ it 'shows author in assignee dropdown' do
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_content(user2.name)
+ end
+ end
+
+ it 'shows author when filtering assignee dropdown' do
+ page.within '.dropdown-menu-user' do
+ find('.dropdown-input-field').set(user2.name)
+
+ wait_for_requests
+
+ expect(page).to have_content(user2.name)
+ end
+ end
+
+ it 'assigns yourself' do
+ find('.block.assignee .dropdown-menu-toggle').click
+
+ click_button 'assign yourself'
+
+ wait_for_requests
+
+ find('.block.assignee .edit-link').click
+
+ page.within '.dropdown-menu-user' do
+ expect(page.find('.dropdown-header')).to be_visible
+ expect(page.find('.dropdown-menu-user-link.is-active')).to have_content(user.name)
+ end
end
- end
- it 'shows author when filtering assignee dropdown' do
- page.within '.dropdown-menu-user' do
+ it 'keeps your filtered term after filtering and dismissing the dropdown' do
find('.dropdown-input-field').set(user2.name)
wait_for_requests
- expect(page).to have_content(user2.name)
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_content 'Unassigned'
+ click_link user2.name
+ end
+
+ find('.js-right-sidebar').click
+ find('.block.assignee .edit-link').click
+
+ expect(page.all('.dropdown-menu-user li').length).to eq(1)
+ expect(find('.dropdown-input-field').value).to eq(user2.name)
+ end
+
+ it 'shows label text as "Apply" when assignees are changed' do
+ project.add_developer(user)
+ visit_issue(project, issue2)
+
+ find('.block.assignee .edit-link').click
+ wait_for_requests
+
+ click_on 'Unassigned'
+
+ expect(page).to have_link('Apply')
end
end
+ end
- it 'assigns yourself' do
- find('.block.assignee .dropdown-menu-toggle').click
+ context 'when GraphQL assignees widget feature flag is enabled' do
+ context 'when a privileged user can invite' do
+ it 'shows a link for inviting members and launches invite modal' do
+ project.add_maintainer(user)
+ visit_issue(project, issue2)
- click_button 'assign yourself'
+ open_assignees_dropdown
- wait_for_requests
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_link('Invite members')
+ expect(page).to have_selector('[data-track-event="click_invite_members"]')
+ expect(page).to have_selector('[data-track-label="edit_assignee"]')
+ end
- find('.block.assignee .edit-link').click
+ click_link 'Invite members'
- page.within '.dropdown-menu-user' do
- expect(page.find('.dropdown-header')).to be_visible
- expect(page.find('.dropdown-menu-user-link.is-active')).to have_content(user.name)
+ expect(page).to have_content("You're inviting members to the")
end
end
- it 'keeps your filtered term after filtering and dismissing the dropdown' do
- find('.dropdown-input-field').set(user2.name)
+ context 'when invite_members_version_b experiment is enabled' do
+ before do
+ stub_experiment_for_subject(invite_members_version_b: true)
+ end
+
+ it 'shows a link for inviting members and follows through to modal' do
+ project.add_developer(user)
+ visit_issue(project, issue2)
- wait_for_requests
+ open_assignees_dropdown
- page.within '.dropdown-menu-user' do
- expect(page).not_to have_content 'Unassigned'
- click_link user2.name
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_link('Invite members', href: '#')
+ expect(page).to have_selector('[data-track-event="click_invite_members_version_b"]')
+ expect(page).to have_selector('[data-track-label="edit_assignee"]')
+ end
+
+ click_link 'Invite members'
+
+ expect(page).to have_content("Oops, this feature isn't ready yet")
end
+ end
+
+ context 'when invite_members_version_b experiment is disabled' do
+ it 'shows author in assignee dropdown and no invite link' do
+ project.add_developer(user)
+ visit_issue(project, issue2)
- find('.js-right-sidebar').click
- find('.block.assignee .edit-link').click
+ open_assignees_dropdown
- expect(page.all('.dropdown-menu-user li').length).to eq(1)
- expect(find('.dropdown-input-field').value).to eq(user2.name)
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_link('Invite members')
+ end
+ end
end
- end
- it 'shows label text as "Apply" when assignees are changed' do
- project.add_developer(user)
- visit_issue(project, issue2)
+ context 'when user is a developer' do
+ before do
+ project.add_developer(user)
+ visit_issue(project, issue2)
+ end
+
+ it 'shows author in assignee dropdown' do
+ open_assignees_dropdown
+
+ page.within '.dropdown-menu-user' do
+ expect(page).to have_content(user2.name)
+ end
+ end
+
+ it 'shows author when filtering assignee dropdown' do
+ open_assignees_dropdown
+
+ page.within '.dropdown-menu-user' do
+ find('.js-dropdown-input-field').find('input').set(user2.name)
+
+ wait_for_requests
+
+ expect(page).to have_content(user2.name)
+ end
+ end
+
+ it 'assigns yourself' do
+ click_button 'assign yourself'
+ wait_for_requests
+
+ page.within '.assignee' do
+ expect(page).to have_content(user.name)
+ end
+ end
- find('.block.assignee .edit-link').click
- wait_for_requests
+ it 'keeps your filtered term after filtering and dismissing the dropdown' do
+ open_assignees_dropdown
- click_on 'Unassigned'
+ find('.js-dropdown-input-field').find('input').set(user2.name)
+ wait_for_requests
+
+ page.within '.dropdown-menu-user' do
+ expect(page).not_to have_content 'Unassigned'
+ click_link user2.name
+ end
- expect(page).to have_link('Apply')
+ find('.js-right-sidebar').click
+
+ open_assignees_dropdown
+
+ page.within('.assignee') do
+ expect(page.all('[data-testid="selected-participant"]').length).to eq(1)
+ end
+
+ expect(find('.js-dropdown-input-field').find('input').value).to eq(user2.name)
+ end
+ end
end
end
@@ -171,7 +291,7 @@ RSpec.describe 'Issue Sidebar' do
context 'editing issue labels', :js do
before do
- issue.update(labels: [label])
+ issue.update!(labels: [label])
page.within('.block.labels') do
click_on 'Edit'
end
@@ -334,4 +454,11 @@ RSpec.describe 'Issue Sidebar' do
find('aside.right-sidebar.right-sidebar-collapsed .js-sidebar-toggle').click
find('aside.right-sidebar.right-sidebar-expanded')
end
+
+ def open_assignees_dropdown
+ page.within('.assignee') do
+ click_button('Edit')
+ wait_for_requests
+ end
+ end
end
diff --git a/spec/features/issues/markdown_toolbar_spec.rb b/spec/features/issues/markdown_toolbar_spec.rb
index 6dc1cbfb2d7..aad5d319bc4 100644
--- a/spec/features/issues/markdown_toolbar_spec.rb
+++ b/spec/features/issues/markdown_toolbar_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe 'Issue markdown toolbar', :js do
- let(:project) { create(:project, :public) }
- let(:issue) { create(:issue, project: project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:user) { create(:user) }
before do
sign_in(user)
@@ -14,28 +14,22 @@ RSpec.describe 'Issue markdown toolbar', :js do
end
it "doesn't include first new line when adding bold" do
- find('#note-body').native.send_keys('test')
- find('#note-body').native.send_key(:enter)
- find('#note-body').native.send_keys('bold')
+ fill_in 'Comment', with: "test\nbold"
- find('.js-main-target-form #note-body')
- page.evaluate_script('document.querySelectorAll(".js-main-target-form #note-body")[0].setSelectionRange(4, 9)')
+ page.evaluate_script('document.getElementById("note-body").setSelectionRange(4, 9)')
- first('.toolbar-btn').click
+ click_button 'Add bold text'
- expect(find('#note-body')[:value]).to eq("test\n**bold**\n")
+ expect(find_field('Comment').value).to eq("test\n**bold**\n")
end
it "doesn't include first new line when adding underline" do
- find('#note-body').native.send_keys('test')
- find('#note-body').native.send_key(:enter)
- find('#note-body').native.send_keys('underline')
+ fill_in 'Comment', with: "test\nunderline"
- find('.js-main-target-form #note-body')
- page.evaluate_script('document.querySelectorAll(".js-main-target-form #note-body")[0].setSelectionRange(4, 50)')
+ page.evaluate_script('document.getElementById("note-body").setSelectionRange(4, 50)')
- all('.toolbar-btn')[1].click
+ click_button 'Add italic text'
- expect(find('#note-body')[:value]).to eq("test\n_underline_\n")
+ expect(find_field('Comment').value).to eq("test\n_underline_\n")
end
end
diff --git a/spec/features/issues/note_polling_spec.rb b/spec/features/issues/note_polling_spec.rb
index bc4c67fdd79..5e02d5ad038 100644
--- a/spec/features/issues/note_polling_spec.rb
+++ b/spec/features/issues/note_polling_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe 'Issue notes polling', :js do
end
def update_note(note, new_text)
- note.update(note: new_text)
+ note.update!(note: new_text)
wait_for_requests
end
diff --git a/spec/features/issues/spam_issues_spec.rb b/spec/features/issues/spam_issues_spec.rb
index aec806c566d..461030d3176 100644
--- a/spec/features/issues/spam_issues_spec.rb
+++ b/spec/features/issues/spam_issues_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'New issue', :js do
end
it 'rejects issue creation' do
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(page).to have_content('discarded')
expect(page).not_to have_content('potential spam')
@@ -51,7 +51,7 @@ RSpec.describe 'New issue', :js do
end
it 'creates a spam log record' do
- expect { click_button 'Submit issue' }
+ expect { click_button 'Create issue' }
.to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
end
end
@@ -63,14 +63,14 @@ RSpec.describe 'New issue', :js do
end
it 'allows issue creation' do
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(page.find('.issue-details h2.title')).to have_content('issue title')
expect(page.find('.issue-details .description')).to have_content('issue description')
end
it 'creates a spam log record' do
- expect { click_button 'Submit issue' }
+ expect { click_button 'Create issue' }
.to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
end
end
@@ -101,14 +101,14 @@ RSpec.describe 'New issue', :js do
fill_in 'issue_title', with: 'issue title'
fill_in 'issue_description', with: 'issue description'
- click_button 'Submit issue'
+ click_button 'Create issue'
# it is impossible to test reCAPTCHA automatically and there is no possibility to fill in recaptcha
# reCAPTCHA verification is skipped in test environment and it always returns true
expect(page).not_to have_content('issue title')
expect(page).to have_css('.recaptcha')
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(page.find('.issue-details h2.title')).to have_content('issue title')
expect(page.find('.issue-details .description')).to have_content('issue description')
@@ -122,7 +122,7 @@ RSpec.describe 'New issue', :js do
end
it 'creates an issue without a need to solve reCAPTCHA' do
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(page).not_to have_css('.recaptcha')
expect(page.find('.issue-details h2.title')).to have_content('issue title')
@@ -130,7 +130,7 @@ RSpec.describe 'New issue', :js do
end
it 'creates a spam log record' do
- expect { click_button 'Submit issue' }
+ expect { click_button 'Create issue' }
.to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
end
end
@@ -148,7 +148,7 @@ RSpec.describe 'New issue', :js do
end
it 'creates an issue without a need to solve reCaptcha' do
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(page).not_to have_css('.recaptcha')
expect(page.find('.issue-details h2.title')).to have_content('issue title')
@@ -156,7 +156,7 @@ RSpec.describe 'New issue', :js do
end
it 'creates a spam log record' do
- expect { click_button 'Submit issue' }
+ expect { click_button 'Create issue' }
.to log_spam(title: 'issue title', description: 'issue description', user_id: user.id, noteable_type: 'Issue')
end
end
@@ -178,7 +178,7 @@ RSpec.describe 'New issue', :js do
fill_in 'issue_title', with: 'issue title'
fill_in 'issue_description', with: 'issue description'
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(page.find('.issue-details h2.title')).to have_content('issue title')
expect(page.find('.issue-details .description')).to have_content('issue description')
diff --git a/spec/features/issues/user_comments_on_issue_spec.rb b/spec/features/issues/user_comments_on_issue_spec.rb
index 004488f2f64..09d3ad15641 100644
--- a/spec/features/issues/user_comments_on_issue_spec.rb
+++ b/spec/features/issues/user_comments_on_issue_spec.rb
@@ -57,17 +57,9 @@ RSpec.describe "User comments on issue", :js do
project.add_maintainer(user)
create(:label, project: project, title: 'label')
- page.within '.timeline-content-form' do
- find('#note-body').native.send_keys('/l')
- end
-
- wait_for_requests
-
- expect(page).to have_selector('.atwho-container')
+ fill_in 'Comment', with: '/l'
- page.within '.atwho-container #at-view-commands' do
- expect(find('li', match: :first)).to have_content('/label')
- end
+ expect(find_highlighted_autocomplete_item).to have_content('/label')
end
end
@@ -110,4 +102,10 @@ RSpec.describe "User comments on issue", :js do
end
end
end
+
+ private
+
+ def find_highlighted_autocomplete_item
+ find('.atwho-view li.cur', visible: true)
+ end
end
diff --git a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
index e225a45481d..6e8b3e4fb7c 100644
--- a/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_branch_and_merge_request_spec.rb
@@ -150,7 +150,7 @@ RSpec.describe 'User creates branch and merge request on issue page', :js do
context 'when merge requests are disabled' do
before do
- project.project_feature.update(merge_requests_access_level: 0)
+ project.project_feature.update!(merge_requests_access_level: 0)
visit project_issue_path(project, issue)
end
diff --git a/spec/features/issues/user_creates_confidential_merge_request_spec.rb b/spec/features/issues/user_creates_confidential_merge_request_spec.rb
index ea96165d7b7..6b4526cd624 100644
--- a/spec/features/issues/user_creates_confidential_merge_request_spec.rb
+++ b/spec/features/issues/user_creates_confidential_merge_request_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'User creates confidential merge request on issue page', :js do
let(:forked_project) { fork_project(project, user, repository: true) }
before do
- forked_project.update(visibility: Gitlab::VisibilityLevel::PRIVATE)
+ forked_project.update!(visibility: Gitlab::VisibilityLevel::PRIVATE)
visit_confidential_issue
end
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index 98f9ed6c6a2..e2e204f03db 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe "User creates issue" do
.and have_no_content("Milestone")
expect(page.find('#issue_title')['placeholder']).to eq 'Title'
- expect(page.find('#issue_description')['placeholder']).to eq 'Write a comment or drag your files here…'
+ expect(page.find('#issue_description')['placeholder']).to eq 'Write a description or drag your files here…'
end
issue_title = "500 error on profile"
@@ -54,7 +54,7 @@ RSpec.describe "User creates issue" do
first('.js-md').click
first('.rspec-issuable-form-description').native.send_keys('Description')
- click_button("Submit issue")
+ click_button("Create issue")
expect(page).to have_content(issue_title)
.and have_content(user.name)
@@ -112,7 +112,7 @@ RSpec.describe "User creates issue" do
fill_in("Title", with: issue_title)
click_button("Label")
click_link(label_titles.first)
- click_button("Submit issue")
+ click_button("Create issue")
expect(page).to have_content(issue_title)
.and have_content(user.name)
@@ -135,7 +135,7 @@ RSpec.describe "User creates issue" do
expect(find('#issuable-due-date').value).to eq date.to_s
- click_button 'Submit issue'
+ click_button 'Create issue'
page.within '.issuable-sidebar' do
expect(page).to have_content date.to_s(:medium)
@@ -259,7 +259,7 @@ RSpec.describe "User creates issue" do
fill_in 'issue_title', with: 'bug 345'
fill_in 'issue_description', with: 'bug description'
- click_button 'Submit issue'
+ click_button 'Create issue'
end
end
end
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 9d4a6cdb522..1bbb96ff479 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -78,7 +78,7 @@ RSpec.describe "Issues > User edits issue", :js do
end
it 'warns about version conflict' do
- issue.update(title: "New title")
+ issue.update!(title: "New title")
fill_in 'issue_title', with: 'bug 345'
fill_in 'issue_description', with: 'bug description'
@@ -142,10 +142,8 @@ RSpec.describe "Issues > User edits issue", :js do
it 'can remove label without removing label added via quick action', :aggregate_failures do
# Add `syzygy` label with a quick action
- note = find('#note-body')
- page.within '.timeline-content-form' do
- note.native.send_keys('/label ~syzygy')
- end
+ fill_in 'Comment', with: '/label ~syzygy'
+
click_button 'Comment'
wait_for_requests
@@ -169,80 +167,165 @@ RSpec.describe "Issues > User edits issue", :js do
end
describe 'update assignee' do
- context 'by authorized user' do
- def close_dropdown_menu_if_visible
- find('.dropdown-menu-toggle', visible: :all).tap do |toggle|
- toggle.click if toggle.visible?
- end
+ context 'when GraphQL assignees widget feature flag is disabled' do
+ before do
+ stub_feature_flags(issue_assignees_widget: false)
end
- it 'allows user to select unassigned' do
- visit project_issue_path(project, issue)
+ context 'by authorized user' do
+ def close_dropdown_menu_if_visible
+ find('.dropdown-menu-toggle', visible: :all).tap do |toggle|
+ toggle.click if toggle.visible?
+ end
+ end
- page.within('.assignee') do
- expect(page).to have_content "#{user.name}"
+ it 'allows user to select unassigned' do
+ visit project_issue_path(project, issue)
- click_link 'Edit'
- click_link 'Unassigned'
- first('.title').click
- expect(page).to have_content 'None - assign yourself'
+ page.within('.assignee') do
+ expect(page).to have_content "#{user.name}"
+
+ click_link 'Edit'
+ click_link 'Unassigned'
+ first('.title').click
+
+ expect(page).to have_content 'None - assign yourself'
+ end
end
- end
- it 'allows user to select an assignee' do
- issue2 = create(:issue, project: project, author: user)
- visit project_issue_path(project, issue2)
+ it 'allows user to select an assignee' do
+ issue2 = create(:issue, project: project, author: user)
+ visit project_issue_path(project, issue2)
- page.within('.assignee') do
- expect(page).to have_content "None"
+ page.within('.assignee') do
+ expect(page).to have_content "None"
+ end
+
+ page.within '.assignee' do
+ click_link 'Edit'
+ end
+
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+
+ page.within('.assignee') do
+ expect(page).to have_content user.name
+ end
end
- page.within '.assignee' do
- click_link 'Edit'
+ it 'allows user to unselect themselves' do
+ issue2 = create(:issue, project: project, author: user, assignees: [user])
+
+ visit project_issue_path(project, issue2)
+
+ page.within '.assignee' do
+ expect(page).to have_content user.name
+
+ click_link 'Edit'
+ click_link user.name
+
+ close_dropdown_menu_if_visible
+
+ page.within '.value .assign-yourself' do
+ expect(page).to have_content "None"
+ end
+ end
end
+ end
+
+ context 'by unauthorized user' do
+ let(:guest) { create(:user) }
- page.within '.dropdown-menu-user' do
- click_link user.name
+ before do
+ project.add_guest(guest)
end
- page.within('.assignee') do
- expect(page).to have_content user.name
+ it 'shows assignee text' do
+ sign_out(:user)
+ sign_in(guest)
+
+ visit project_issue_path(project, issue)
+ expect(page).to have_content issue.assignees.first.name
end
end
+ end
- it 'allows user to unselect themselves' do
- issue2 = create(:issue, project: project, author: user, assignees: [user])
+ context 'when GraphQL assignees widget feature flag is enabled' do
+ context 'by authorized user' do
+ it 'allows user to select unassigned' do
+ visit project_issue_path(project, issue)
- visit project_issue_path(project, issue2)
+ page.within('.assignee') do
+ expect(page).to have_content "#{user.name}"
- page.within '.assignee' do
- expect(page).to have_content user.name
+ click_button('Edit')
+ wait_for_requests
- click_link 'Edit'
- click_link user.name
+ find('[data-testid="unassign"]').click
+ find('[data-testid="title"]').click
+ wait_for_requests
+
+ expect(page).to have_content 'None - assign yourself'
+ end
+ end
- close_dropdown_menu_if_visible
+ it 'allows user to select an assignee' do
+ issue2 = create(:issue, project: project, author: user)
+ visit project_issue_path(project, issue2)
- page.within '.value .assign-yourself' do
+ page.within('.assignee') do
expect(page).to have_content "None"
+ click_button('Edit')
+ wait_for_requests
+ end
+
+ page.within '.dropdown-menu-user' do
+ click_link user.name
+ end
+
+ page.within('.assignee') do
+ find('[data-testid="title"]').click
+ wait_for_requests
+
+ expect(page).to have_content user.name
end
end
- end
- end
- context 'by unauthorized user' do
- let(:guest) { create(:user) }
+ it 'allows user to unselect themselves' do
+ issue2 = create(:issue, project: project, author: user, assignees: [user])
- before do
- project.add_guest(guest)
+ visit project_issue_path(project, issue2)
+
+ page.within '.assignee' do
+ expect(page).to have_content user.name
+
+ click_button('Edit')
+ wait_for_requests
+ click_link user.name
+
+ find('[data-testid="title"]').click
+ wait_for_requests
+
+ expect(page).to have_content "None"
+ end
+ end
end
- it 'shows assignee text' do
- sign_out(:user)
- sign_in(guest)
+ context 'by unauthorized user' do
+ let(:guest) { create(:user) }
- visit project_issue_path(project, issue)
- expect(page).to have_content issue.assignees.first.name
+ before do
+ project.add_guest(guest)
+ end
+
+ it 'shows assignee text' do
+ sign_out(:user)
+ sign_in(guest)
+
+ visit project_issue_path(project, issue)
+ expect(page).to have_content issue.assignees.first.name
+ end
end
end
end
@@ -309,7 +392,7 @@ RSpec.describe "Issues > User edits issue", :js do
before do
project.add_guest(guest)
issue.milestone = milestone
- issue.save
+ issue.save!
end
it 'shows milestone text' do
@@ -326,24 +409,23 @@ RSpec.describe "Issues > User edits issue", :js do
it 'adds due date to issue' do
date = Date.today.at_beginning_of_month + 2.days
- page.within '.due_date' do
- click_link 'Edit'
-
+ page.within '[data-testid="due-date"]' do
+ click_button 'Edit'
page.within '.pika-single' do
click_button date.day
end
wait_for_requests
- expect(find('.value').text).to have_content date.strftime('%b %-d, %Y')
+ expect(find('[data-testid="sidebar-duedate-value"]').text).to have_content date.strftime('%b %-d, %Y')
end
end
it 'removes due date from issue' do
date = Date.today.at_beginning_of_month + 2.days
- page.within '.due_date' do
- click_link 'Edit'
+ page.within '[data-testid="due-date"]' do
+ click_button 'Edit'
page.within '.pika-single' do
click_button date.day
@@ -353,7 +435,7 @@ RSpec.describe "Issues > User edits issue", :js do
expect(page).to have_no_content 'None'
- click_link 'remove due date'
+ click_button 'remove due date'
expect(page).to have_content 'None'
end
end
diff --git a/spec/features/issues/user_filters_issues_spec.rb b/spec/features/issues/user_filters_issues_spec.rb
index 1b246181523..5d05df6aaf0 100644
--- a/spec/features/issues/user_filters_issues_spec.rb
+++ b/spec/features/issues/user_filters_issues_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'User filters issues', :js do
@issue = Issue.find_by(title: 'foobar')
@issue.milestone = create(:milestone, project: project)
@issue.assignees = []
- @issue.save
+ @issue.save!
end
let(:issue) { @issue }
diff --git a/spec/features/issues/user_interacts_with_awards_spec.rb b/spec/features/issues/user_interacts_with_awards_spec.rb
index 1c7bc5f239f..e862f7030c0 100644
--- a/spec/features/issues/user_interacts_with_awards_spec.rb
+++ b/spec/features/issues/user_interacts_with_awards_spec.rb
@@ -5,6 +5,10 @@ require 'spec_helper'
RSpec.describe 'User interacts with awards' do
let(:user) { create(:user) }
+ before do
+ stub_feature_flags(improved_emoji_picker: false)
+ end
+
describe 'User interacts with awards in an issue', :js do
let(:issue) { create(:issue, project: project)}
let(:project) { create(:project) }
diff --git a/spec/features/issues/user_invites_from_a_comment_spec.rb b/spec/features/issues/user_invites_from_a_comment_spec.rb
new file mode 100644
index 00000000000..82061f6ed79
--- /dev/null
+++ b/spec/features/issues/user_invites_from_a_comment_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe "User invites from a comment", :js do
+ let_it_be(:project) { create(:project_empty_repo, :public) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:user) { project.owner }
+
+ before do
+ sign_in(user)
+ end
+
+ it "launches the invite modal from invite link on a comment" do
+ stub_experiments(invite_members_in_comment: :invite_member_link)
+
+ visit project_issue_path(project, issue)
+
+ page.within(".new-note") do
+ click_button 'Invite Member'
+ end
+
+ expect(page).to have_content("You're inviting members to the")
+ end
+end
diff --git a/spec/features/issues/user_sees_live_update_spec.rb b/spec/features/issues/user_sees_live_update_spec.rb
index 79c6978cbc0..7e4880f209e 100644
--- a/spec/features/issues/user_sees_live_update_spec.rb
+++ b/spec/features/issues/user_sees_live_update_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'Issues > User sees live update', :js do
expect(page).to have_text("new title")
- issue.update(title: "updated title")
+ issue.update!(title: "updated title")
wait_for_requests
expect(page).to have_text("updated title")
diff --git a/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb b/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
index 7a2b637e48e..6473fe01052 100644
--- a/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
+++ b/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
@@ -19,11 +19,14 @@ RSpec.describe 'Issues > Real-time sidebar', :js do
expect(page.find('.assignee')).to have_content 'None'
end
- gitlab_sign_in(user)
+ sign_in(user)
+
visit project_issue_path(project, issue)
expect(page.find('.assignee')).to have_content 'None'
click_button 'assign yourself'
+ wait_for_requests
+ expect(page.find('.assignee')).to have_content user.name
using_session :other_session do
expect(page.find('.assignee')).to have_content user.name
diff --git a/spec/features/issues/user_sorts_issues_spec.rb b/spec/features/issues/user_sorts_issues_spec.rb
index f0bb055c6f2..c161e1deb83 100644
--- a/spec/features/issues/user_sorts_issues_spec.rb
+++ b/spec/features/issues/user_sorts_issues_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe "User sorts issues" do
it 'sorts by most recently updated', :js do
issue3.updated_at = Time.now + 100
- issue3.save
+ issue3.save!
visit project_issues_path(project, sort: sort_value_recently_updated)
expect(first_issue).to include('baz')
@@ -85,8 +85,8 @@ RSpec.describe "User sorts issues" do
describe 'sorting by due date', :js do
before do
- issue1.update(due_date: 1.day.from_now)
- issue2.update(due_date: 6.days.from_now)
+ issue1.update!(due_date: 1.day.from_now)
+ issue2.update!(due_date: 6.days.from_now)
end
it 'sorts by due date' do
@@ -96,7 +96,7 @@ RSpec.describe "User sorts issues" do
end
it 'sorts by due date by excluding nil due dates' do
- issue2.update(due_date: nil)
+ issue2.update!(due_date: nil)
visit project_issues_path(project, sort: sort_value_due_date)
@@ -111,7 +111,7 @@ RSpec.describe "User sorts issues" do
end
it 'sorts by least recently due date by excluding nil due dates' do
- issue2.update(due_date: nil)
+ issue2.update!(due_date: nil)
visit project_issues_path(project, label_names: [label.name], sort: sort_value_due_date_later)
@@ -122,8 +122,8 @@ RSpec.describe "User sorts issues" do
describe 'filtering by due date', :js do
before do
- issue1.update(due_date: 1.day.from_now)
- issue2.update(due_date: 6.days.from_now)
+ issue1.update!(due_date: 1.day.from_now)
+ issue2.update!(due_date: 6.days.from_now)
end
it 'filters by none' do
@@ -147,9 +147,9 @@ RSpec.describe "User sorts issues" do
end
it 'filters by due this week' do
- issue1.update(due_date: Date.today.beginning_of_week + 2.days)
- issue2.update(due_date: Date.today.end_of_week)
- issue3.update(due_date: Date.today - 8.days)
+ issue1.update!(due_date: Date.today.beginning_of_week + 2.days)
+ issue2.update!(due_date: Date.today.end_of_week)
+ issue3.update!(due_date: Date.today - 8.days)
visit project_issues_path(project, due_date: Issue::DueThisWeek.name)
@@ -161,9 +161,9 @@ RSpec.describe "User sorts issues" do
end
it 'filters by due this month' do
- issue1.update(due_date: Date.today.beginning_of_month + 2.days)
- issue2.update(due_date: Date.today.end_of_month)
- issue3.update(due_date: Date.today - 50.days)
+ issue1.update!(due_date: Date.today.beginning_of_month + 2.days)
+ issue2.update!(due_date: Date.today.end_of_month)
+ issue3.update!(due_date: Date.today - 50.days)
visit project_issues_path(project, due_date: Issue::DueThisMonth.name)
@@ -175,9 +175,9 @@ RSpec.describe "User sorts issues" do
end
it 'filters by overdue' do
- issue1.update(due_date: Date.today + 2.days)
- issue2.update(due_date: Date.today + 20.days)
- issue3.update(due_date: Date.yesterday)
+ issue1.update!(due_date: Date.today + 2.days)
+ issue2.update!(due_date: Date.today + 20.days)
+ issue3.update!(due_date: Date.yesterday)
visit project_issues_path(project, due_date: Issue::Overdue.name)
@@ -189,9 +189,9 @@ RSpec.describe "User sorts issues" do
end
it 'filters by due next month and previous two weeks' do
- issue1.update(due_date: Date.today - 4.weeks)
- issue2.update(due_date: (Date.today + 2.months).beginning_of_month)
- issue3.update(due_date: Date.yesterday)
+ issue1.update!(due_date: Date.today - 4.weeks)
+ issue2.update!(due_date: (Date.today + 2.months).beginning_of_month)
+ issue3.update!(due_date: Date.yesterday)
visit project_issues_path(project, due_date: Issue::DueNextMonthAndPreviousTwoWeeks.name)
@@ -206,9 +206,9 @@ RSpec.describe "User sorts issues" do
describe 'sorting by milestone', :js do
before do
issue1.milestone = newer_due_milestone
- issue1.save
+ issue1.save!
issue2.milestone = later_due_milestone
- issue2.save
+ issue2.save!
end
it 'sorts by milestone' do
@@ -224,9 +224,9 @@ RSpec.describe "User sorts issues" do
before do
issue1.assignees << user2
- issue1.save
+ issue1.save!
issue2.assignees << user2
- issue2.save
+ issue2.save!
end
it 'sorts with a filter applied' do
diff --git a/spec/features/markdown/markdown_spec.rb b/spec/features/markdown/markdown_spec.rb
index e84b300a748..3208ad82c03 100644
--- a/spec/features/markdown/markdown_spec.rb
+++ b/spec/features/markdown/markdown_spec.rb
@@ -63,8 +63,8 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures do
end
aggregate_failures 'parses fenced code blocks' do
- expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.c')
- expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.python')
+ expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.language-c')
+ expect(doc).to have_selector('pre.code.highlight.js-syntax-highlight.language-python')
end
aggregate_failures 'parses mermaid code block' do
@@ -288,9 +288,10 @@ RSpec.describe 'GitLab Markdown', :aggregate_failures do
@wiki = @feat.wiki
@wiki_page = @feat.wiki_page
- path = 'images/example.jpg'
- gitaly_wiki_file = Gitlab::GitalyClient::WikiFile.new(path: path)
- expect(@wiki).to receive(:find_file).with(path, load_content: false).and_return(Gitlab::Git::WikiFile.new(gitaly_wiki_file))
+ name = 'example.jpg'
+ path = "images/#{name}"
+ blob = double(name: name, path: path, mime_type: 'image/jpeg', data: nil)
+ expect(@wiki).to receive(:find_file).with(path, load_content: false).and_return(Gitlab::Git::WikiFile.new(blob))
allow(@wiki).to receive(:wiki_base_path) { '/namespace1/gitlabhq/wikis' }
@html = markdown(@feat.raw_markdown, { pipeline: :wiki, wiki: @wiki, page_slug: @wiki_page.slug })
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index 25f2707146d..19680a827bf 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
it 'adds draft note' do
- write_comment
+ write_diff_comment
expect(find('.draft-note-component')).to have_content('Line is wrong')
@@ -38,7 +38,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
it 'publishes review' do
- write_comment
+ write_diff_comment
page.within('.review-bar-content') do
click_button 'Submit review'
@@ -52,7 +52,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
it 'publishes single comment' do
- write_comment
+ write_diff_comment
click_button 'Add comment now'
@@ -64,7 +64,7 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
it 'deletes draft note' do
- write_comment
+ write_diff_comment
accept_alert { find('.js-note-delete').click }
@@ -74,21 +74,70 @@ RSpec.describe 'Merge request > Batch comments', :js do
end
it 'edits draft note' do
- write_comment
+ write_diff_comment
find('.js-note-edit').click
# make sure comment form is in view
execute_script("window.scrollBy(0, 200)")
- page.within('.js-discussion-note-form') do
- fill_in('note_note', with: 'Testing update')
- click_button('Save comment')
+ write_comment(text: 'Testing update', button_text: 'Save comment')
+
+ expect(page).to have_selector('.draft-note-component', text: 'Testing update')
+ end
+
+ context 'with image and file draft note' do
+ let(:merge_request) { create(:merge_request_with_diffs, :with_image_diffs, source_project: project) }
+ let!(:draft_on_text) { create(:draft_note_on_text_diff, merge_request: merge_request, author: user, path: 'README.md', note: 'Lorem ipsum on text...') }
+ let!(:draft_on_image) { create(:draft_note_on_image_diff, merge_request: merge_request, author: user, path: 'files/images/ee_repo_logo.png', note: 'Lorem ipsum on an image...') }
+
+ it 'does not show in overview' do
+ visit_overview
+
+ expect(page).to have_no_text(draft_on_text.note)
+ expect(page).to have_no_text(draft_on_image.note)
end
+ end
- wait_for_requests
+ context 'adding single comment to review' do
+ before do
+ visit_overview
+ end
- expect(page).to have_selector('.draft-note-component', text: 'Testing update')
+ it 'at first does not show `Add to review` and `Add comment now` buttons' do
+ expect(page).to have_no_button('Add to review')
+ expect(page).to have_no_button('Add comment now')
+ end
+
+ context 'when review has started' do
+ before do
+ visit_diffs
+
+ write_diff_comment
+
+ visit_overview
+ end
+
+ it 'can add comment to review' do
+ write_comment(selector: '.js-main-target-form', field: 'note-body', text: 'Its a draft comment', button_text: 'Add to review')
+
+ expect(page).to have_selector('.draft-note-component', text: 'Its a draft comment')
+
+ click_button('Pending comments')
+
+ expect(page).to have_text('2 pending comments')
+ end
+
+ it 'can add comment right away' do
+ write_comment(selector: '.js-main-target-form', field: 'note-body', text: 'Its a regular comment', button_text: 'Add comment now')
+
+ expect(page).to have_selector('.note:not(.draft-note)', text: 'Its a regular comment')
+
+ click_button('Pending comments')
+
+ expect(page).to have_text('1 pending comment')
+ end
+ end
end
context 'in parallel diff' do
@@ -197,46 +246,51 @@ RSpec.describe 'Merge request > Batch comments', :js do
wait_for_requests
end
- def write_comment(button_text: 'Start a review', text: 'Line is wrong')
- click_diff_line(find("[id='#{sample_compare.changes[0][:line_code]}']"))
-
- page.within('.js-discussion-note-form') do
- fill_in('note_note', with: text)
- click_button(button_text)
- end
+ def visit_overview
+ visit project_merge_request_path(merge_request.project, merge_request)
wait_for_requests
end
- def write_parallel_comment(line, button_text: 'Start a review', text: 'Line is wrong')
+ def write_diff_comment(**params)
+ click_diff_line(find("[id='#{sample_compare.changes[0][:line_code]}']"))
+
+ write_comment(**params)
+ end
+
+ def write_parallel_comment(line, **params)
find("td[id='#{line}']").hover
find(".is-over button").click
- page.within("form[data-line-code='#{line}']") do
- fill_in('note_note', with: text)
+ write_comment(selector: "form[data-line-code='#{line}']", **params)
+ end
+
+ def write_comment(selector: '.js-discussion-note-form', field: 'note_note', button_text: 'Start a review', text: 'Line is wrong')
+ page.within(selector) do
+ fill_in(field, with: text)
click_button(button_text)
end
wait_for_requests
end
-end
-def write_reply_to_discussion(button_text: 'Start a review', text: 'Line is wrong', resolve: false, unresolve: false)
- page.within(first('.diff-files-holder .discussion-reply-holder')) do
- find_field('Reply…', match: :first).click
+ def write_reply_to_discussion(button_text: 'Start a review', text: 'Line is wrong', resolve: false, unresolve: false)
+ page.within(first('.diff-files-holder .discussion-reply-holder')) do
+ find_field('Reply…', match: :first).click
- fill_in('note_note', with: text)
+ fill_in('note_note', with: text)
- if resolve
- page.check('Resolve thread')
- end
+ if resolve
+ page.check('Resolve thread')
+ end
+
+ if unresolve
+ page.check('Unresolve thread')
+ end
- if unresolve
- page.check('Unresolve thread')
+ click_button(button_text)
end
- click_button(button_text)
+ wait_for_requests
end
-
- wait_for_requests
end
diff --git a/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb b/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
index fd13083c185..d36abf86518 100644
--- a/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
+++ b/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'create a merge request, allowing commits from members who can me
check 'Allow commits from members who can merge to the target branch'
- click_button 'Submit merge request'
+ click_button 'Create merge request'
wait_for_requests
diff --git a/spec/features/merge_request/user_awards_emoji_spec.rb b/spec/features/merge_request/user_awards_emoji_spec.rb
index 62e4209f386..6f46cc20cba 100644
--- a/spec/features/merge_request/user_awards_emoji_spec.rb
+++ b/spec/features/merge_request/user_awards_emoji_spec.rb
@@ -17,33 +17,28 @@ RSpec.describe 'Merge request > User awards emoji', :js do
end
it 'adds award to merge request' do
- first('.js-emoji-btn').click
- expect(page).to have_selector('.js-emoji-btn.active')
- expect(first('.js-emoji-btn')).to have_content '1'
+ first('[data-testid="award-button"]').click
+ expect(page).to have_selector('[data-testid="award-button"].is-active')
+ expect(first('[data-testid="award-button"]')).to have_content '1'
visit project_merge_request_path(project, merge_request)
- expect(first('.js-emoji-btn')).to have_content '1'
+ expect(first('[data-testid="award-button"]')).to have_content '1'
end
it 'removes award from merge request' do
- first('.js-emoji-btn').click
- find('.js-emoji-btn.active').click
- expect(first('.js-emoji-btn')).to have_content '0'
+ first('[data-testid="award-button"]').click
+ find('[data-testid="award-button"].is-active').click
+ expect(first('[data-testid="award-button"]')).to have_content '0'
visit project_merge_request_path(project, merge_request)
- expect(first('.js-emoji-btn')).to have_content '0'
- end
-
- it 'has only one menu on the page' do
- first('.js-add-award').click
- expect(page).to have_selector('.emoji-menu')
-
- expect(page).to have_selector('.emoji-menu', count: 1)
+ expect(first('[data-testid="award-button"]')).to have_content '0'
end
it 'adds awards to note' do
- first('.js-note-emoji').click
- first('.emoji-menu .js-emoji-btn').click
+ page.within('.note-actions') do
+ first('.note-emoji-button').click
+ find('gl-emoji[data-name="8ball"]').click
+ end
wait_for_requests
diff --git a/spec/features/merge_request/user_creates_merge_request_spec.rb b/spec/features/merge_request/user_creates_merge_request_spec.rb
index 37d329d4d5d..119cf31098c 100644
--- a/spec/features/merge_request/user_creates_merge_request_spec.rb
+++ b/spec/features/merge_request/user_creates_merge_request_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe "User creates a merge request", :js do
end
fill_in("Title", with: title)
- click_button("Submit merge request")
+ click_button("Create merge request")
page.within(".merge-request") do
expect(page).to have_content(title)
@@ -87,7 +87,7 @@ RSpec.describe "User creates a merge request", :js do
click_button("Compare branches and continue")
- expect(page).to have_css("h3.page-title", text: "New Merge Request")
+ expect(page).to have_css("h3.page-title", text: "New merge request")
page.within("form#new_merge_request") do
fill_in("Title", with: title)
@@ -103,7 +103,7 @@ RSpec.describe "User creates a merge request", :js do
end
find('.js-assignee-search').click
- click_button("Submit merge request")
+ click_button("Create merge request")
expect(page).to have_content(title).and have_content("Request to merge #{user.namespace.path}:#{source_branch} into master")
end
diff --git a/spec/features/merge_request/user_invites_from_a_comment_spec.rb b/spec/features/merge_request/user_invites_from_a_comment_spec.rb
new file mode 100644
index 00000000000..79865094fd0
--- /dev/null
+++ b/spec/features/merge_request/user_invites_from_a_comment_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe "User invites from a comment", :js do
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+ let_it_be(:user) { project.owner }
+
+ before do
+ sign_in(user)
+ end
+
+ it "launches the invite modal from invite link on a comment" do
+ stub_experiments(invite_members_in_comment: :invite_member_link)
+
+ visit project_merge_request_path(project, merge_request)
+
+ page.within(".new-note") do
+ click_button 'Invite Member'
+ end
+
+ expect(page).to have_content("You're inviting members to the")
+ end
+end
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 3099a893dc2..a6dfae72912 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -173,7 +173,7 @@ RSpec.describe 'Merge request > User posts notes', :js do
it 'allows using markdown buttons after saving a note and then trying to edit it again' do
page.within('.current-note-edit-form') do
fill_in 'note[note]', with: 'This is the new content'
- find('.btn-success').click
+ find('.btn-confirm').click
end
find('.note').hover
@@ -182,16 +182,16 @@ RSpec.describe 'Merge request > User posts notes', :js do
find('.js-note-edit').click
page.within('.current-note-edit-form') do
- expect(find('#note_note').value).to include('This is the new content')
+ expect(find_field('note[note]').value).to include('This is the new content')
first('.js-md').click
- expect(find('#note_note').value).to include('This is the new content****')
+ expect(find_field('note[note]').value).to include('This is the new content****')
end
end
it 'appends the edited at time to the note' do
page.within('.current-note-edit-form') do
fill_in 'note[note]', with: 'Some new content'
- find('.btn-success').click
+ find('.btn-confirm').click
end
page.within("#note_#{note.id}") do
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index caa04059469..9a3f97a0943 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -15,10 +15,6 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
diff_refs: merge_request.diff_refs)
end
- before do
- stub_feature_flags(remove_resolve_note: false)
- end
-
context 'no threads' do
before do
project.add_maintainer(user)
@@ -67,7 +63,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to mark thread as resolved' do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
expect(page).to have_selector('.discussion-body', visible: false)
@@ -84,7 +80,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to unresolve thread' do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
click_button 'Unresolve thread'
end
@@ -96,7 +92,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
describe 'resolved thread' do
before do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
visit_merge_request
@@ -197,7 +193,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to resolve from reply form without a comment' do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
page.within '.line-resolve-all-container' do
@@ -234,7 +230,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'hides jump to next button when all resolved' do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
expect(page).to have_selector('.discussion-next-btn', visible: false)
@@ -264,7 +260,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
visit_merge_request
end
- it 'does not mark thread as resolved when resolving single note' do
+ it 'marks thread as resolved when resolving single note' do
page.within("#note_#{note.id}") do
first('.line-resolve-btn').click
@@ -273,15 +269,13 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
expect(first('.line-resolve-btn')['aria-label']).to eq("Resolved by #{user.name}")
end
- expect(page).to have_content('Last updated')
-
page.within '.line-resolve-all-container' do
- expect(page).to have_content('1 unresolved thread')
+ expect(page).to have_content('All threads resolved')
end
end
it 'resolves thread' do
- resolve_buttons = page.all('.note .line-resolve-btn', count: 2)
+ resolve_buttons = page.all('.note .line-resolve-btn', count: 1)
resolve_buttons.each do |button|
button.click
end
@@ -332,7 +326,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to mark all threads as resolved' do
page.all('.discussion-reply-holder', count: 2).each do |reply_holder|
page.within reply_holder do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
end
@@ -344,7 +338,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to quickly scroll to next unresolved thread' do
page.within('.discussion-reply-holder', match: :first) do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
page.within '.line-resolve-all-container' do
@@ -416,7 +410,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to mark thread as resolved' do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
end
page.within '.diff-content .note' do
@@ -431,7 +425,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to unresolve thread' do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
click_button 'Unresolve thread'
end
@@ -459,7 +453,7 @@ RSpec.describe 'Merge request > User resolves diff notes and threads', :js do
it 'allows user to comment & unresolve thread' do
page.within '.diff-content' do
- click_button 'Resolve thread'
+ find('button[data-qa-selector="resolve_discussion_button"]').click
find_field('Reply…').click
diff --git a/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb b/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
index 95e435a333e..d8b258bac47 100644
--- a/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
+++ b/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'New merge request breadcrumb' do
it 'displays link to project merge requests and new merge request' do
page.within '.breadcrumbs' do
- expect(find_link('Merge Requests')[:href]).to end_with(project_merge_requests_path(project))
+ expect(find_link('Merge requests')[:href]).to end_with(project_merge_requests_path(project))
expect(find_link('New')[:href]).to end_with(project_new_merge_request_path(project))
end
end
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index ad0e9b48903..733b5a97fea 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -26,7 +26,6 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
before do
- stub_feature_flags(new_pipelines_table: false)
stub_application_setting(auto_devops_enabled: false)
stub_ci_pipeline_yaml_file(YAML.dump(config))
project.add_maintainer(user)
@@ -62,7 +61,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees branch pipelines and detached merge request pipelines in correct order' do
page.within('.ci-table') do
- expect(page).to have_selector('.ci-pending', count: 2)
+ expect(page).to have_selector('.ci-created', count: 2)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{detached_merge_request_pipeline.id}")
end
end
@@ -154,7 +153,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline succeeds' do
before do
- detached_merge_request_pipeline.succeed!
+ detached_merge_request_pipeline.reload.succeed!
wait_for_requests
end
@@ -168,7 +167,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when branch pipeline succeeds' do
before do
click_link 'Overview'
- push_pipeline.succeed!
+ push_pipeline.reload.succeed!
wait_for_requests
end
@@ -197,7 +196,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
it 'sees a branch pipeline in pipeline tab' do
page.within('.ci-table') do
- expect(page).to have_selector('.ci-pending', count: 1)
+ expect(page).to have_selector('.ci-created', count: 1)
expect(first('[data-testid="pipeline-url-link"]')).to have_content("##{push_pipeline.id}")
end
end
@@ -333,6 +332,31 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
end
end
+ context 'when the latest pipeline is running in the parent project' do
+ before do
+ Ci::CreatePipelineService.new(project, user, ref: 'feature')
+ .execute(:merge_request_event, merge_request: merge_request)
+ end
+
+ context 'when the previous pipeline failed in the fork project' do
+ before do
+ detached_merge_request_pipeline.reload.drop!
+ end
+
+ context 'when the parent project enables pipeline must succeed' do
+ before do
+ project.update!(only_allow_merge_if_pipeline_succeeds: true)
+ end
+
+ it 'shows MWPS button' do
+ visit project_merge_request_path(project, merge_request)
+
+ expect(page).to have_button('Merge when pipeline succeeds')
+ end
+ end
+ end
+ end
+
context 'when a user merges a merge request from a forked project to the parent project' do
before do
click_link("Overview")
@@ -351,7 +375,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when detached merge request pipeline succeeds' do
before do
- detached_merge_request_pipeline.succeed!
+ detached_merge_request_pipeline.reload.succeed!
wait_for_requests
end
@@ -364,7 +388,7 @@ RSpec.describe 'Merge request > User sees pipelines triggered by merge request',
context 'when branch pipeline succeeds' do
before do
- push_pipeline.succeed!
+ push_pipeline.reload.succeed!
wait_for_requests
end
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index 05fa5459e06..0cb4107c21d 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe 'Merge request > User sees merge widget', :js do
end
it 'shows widget status after creating new merge request' do
- click_button 'Submit merge request'
+ click_button 'Create merge request'
wait_for_requests
diff --git a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
index c0dc2ec3baf..e5592ae9535 100644
--- a/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
+++ b/spec/features/merge_request/user_sees_mini_pipeline_graph_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
end
it 'displays a mini pipeline graph' do
- expect(page).to have_selector('.mr-widget-pipeline-graph')
+ expect(page).to have_selector('[data-testid="pipeline-mini-graph"]')
end
context 'as json' do
@@ -57,6 +57,10 @@ RSpec.describe 'Merge request < User sees mini pipeline graph', :js do
first(dropdown_selector)
end
+ before do
+ wait_for_requests
+ end
+
# Status icon button styles should update as described in
# https://gitlab.com/gitlab-org/gitlab-foss/issues/42769
it 'has unique styles for default, :hover, :active, and :focus states' do
diff --git a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
index ea46ae06329..b8b7fc2009f 100644
--- a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
@@ -28,8 +28,8 @@ RSpec.describe 'Merge request > User sees notes from forked project', :js do
page.within('.discussion-notes') do
find_field('Reply…').click
- scroll_to(page.find('#note_note', visible: false))
- find('#note_note').send_keys('A reply comment')
+ scroll_to(find_field('note[note]', visible: false))
+ fill_in 'note[note]', with: 'A reply comment'
find('.js-comment-button').click
end
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 77d2cb77ae3..a5047c8d550 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
context 'with a detached merge request pipeline' do
let(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
- it 'displays the Run Pipeline button' do
+ it 'displays the "Run pipeline" button' do
visit project_merge_request_path(project, merge_request)
page.within('.merge-request-tabs') do
@@ -50,14 +50,14 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
wait_for_requests
- expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run Pipeline')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
end
end
context 'with a merged results pipeline' do
let(:merge_request) { create(:merge_request, :with_merge_request_pipeline) }
- it 'displays the Run Pipeline button' do
+ it 'displays the "Run pipeline" button' do
visit project_merge_request_path(project, merge_request)
page.within('.merge-request-tabs') do
@@ -66,7 +66,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
wait_for_requests
- expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run Pipeline')
+ expect(page.find('[data-testid="run_pipeline_button"]')).to have_text('Run pipeline')
end
end
end
@@ -131,7 +131,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
visit project_merge_request_path(parent_project, merge_request)
create_merge_request_pipeline
- act_on_security_warning(action: 'Run Pipeline')
+ act_on_security_warning(action: 'Run pipeline')
check_pipeline(expected_project: parent_project)
check_head_pipeline(expected_project: parent_project)
@@ -175,7 +175,7 @@ RSpec.describe 'Merge request > User sees pipelines', :js do
def create_merge_request_pipeline
page.within('.merge-request-tabs') { click_link('Pipelines') }
- click_button('Run Pipeline')
+ click_button('Run pipeline')
end
def check_pipeline(expected_project:)
diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
index 9850ca3f173..275a87ca391 100644
--- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
@@ -60,7 +60,7 @@ RSpec.describe 'Merge request > User selects branches for new MR', :js do
expect(page).to have_content "wm.png"
fill_in "merge_request_title", with: "Orphaned MR test"
- click_button "Submit merge request"
+ click_button "Create merge request"
click_button "Check out branch"
@@ -200,7 +200,7 @@ RSpec.describe 'Merge request > User selects branches for new MR', :js do
click_button "Compare branches"
- expect(page).to have_button("Submit merge request")
+ expect(page).to have_button("Create merge request")
end
end
end
diff --git a/spec/features/merge_request/user_squashes_merge_request_spec.rb b/spec/features/merge_request/user_squashes_merge_request_spec.rb
index 84964bd0637..15f59c0d7bc 100644
--- a/spec/features/merge_request/user_squashes_merge_request_spec.rb
+++ b/spec/features/merge_request/user_squashes_merge_request_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe 'User squashes a merge request', :js do
before do
visit project_new_merge_request_path(project, merge_request: { target_branch: 'master', source_branch: source_branch })
check 'merge_request[squash]'
- click_on 'Submit merge request'
+ click_on 'Create merge request'
wait_for_requests
end
@@ -121,7 +121,7 @@ RSpec.describe 'User squashes a merge request', :js do
context 'when squash is not enabled on merge request creation', :sidekiq_might_not_need_inline do
before do
visit project_new_merge_request_path(project, merge_request: { target_branch: 'master', source_branch: source_branch })
- click_on 'Submit merge request'
+ click_on 'Create merge request'
wait_for_requests
end
diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb
index 5f99d762ecb..f1b44010f63 100644
--- a/spec/features/merge_request/user_views_open_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb
@@ -125,7 +125,7 @@ RSpec.describe 'User views an open merge request' do
end
it 'encodes branch name' do
- expect(find('cite.ref-name')[:title]).to eq(source_branch)
+ expect(find("[data-testid='ref-name']")[:title]).to eq(source_branch)
end
end
end
diff --git a/spec/features/milestones/user_creates_milestone_spec.rb b/spec/features/milestones/user_creates_milestone_spec.rb
index d80796b8f9a..dd377aa4a26 100644
--- a/spec/features/milestones/user_creates_milestone_spec.rb
+++ b/spec/features/milestones/user_creates_milestone_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe "User creates milestone", :js do
end
it "creates milestone" do
- title = "v2.3".freeze
+ title = "v2.3"
fill_in("Title", with: title)
fill_in("Description", with: "# Description header")
diff --git a/spec/features/milestones/user_views_milestone_spec.rb b/spec/features/milestones/user_views_milestone_spec.rb
index 9c19f842427..8674d59afdf 100644
--- a/spec/features/milestones/user_views_milestone_spec.rb
+++ b/spec/features/milestones/user_views_milestone_spec.rb
@@ -98,7 +98,7 @@ RSpec.describe "User views milestone" do
visit(project_milestone_path(project, milestone))
within('.js-milestone-tabs') do
- click_link('Merge Requests')
+ click_link('Merge requests')
end
wait_for_requests
@@ -116,7 +116,7 @@ RSpec.describe "User views milestone" do
visit(group_milestone_path(group, group_milestone))
within('.js-milestone-tabs') do
- click_link('Merge Requests')
+ click_link('Merge requests')
end
expect(page.find('#tab-merge-requests')).to have_text(project.name)
diff --git a/spec/features/milestones/user_views_milestones_spec.rb b/spec/features/milestones/user_views_milestones_spec.rb
index f8b4b802a60..58439df92ba 100644
--- a/spec/features/milestones/user_views_milestones_spec.rb
+++ b/spec/features/milestones/user_views_milestones_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe "User views milestones" do
expect(page).to have_content(milestone.title)
.and have_content(milestone.expires_at)
.and have_content("Issues")
- .and have_content("Merge Requests")
+ .and have_content("Merge requests")
end
context "with issues", :js do
@@ -80,7 +80,6 @@ RSpec.describe "User views milestones with no MR" do
expect(page).to have_content(milestone.title)
.and have_content(milestone.expires_at)
.and have_content("Issues")
- .and have_no_content("Merge Requests")
end
it "opens milestone" do
diff --git a/spec/features/participants_autocomplete_spec.rb b/spec/features/participants_autocomplete_spec.rb
index b22778012a8..2781cfffbaf 100644
--- a/spec/features/participants_autocomplete_spec.rb
+++ b/spec/features/participants_autocomplete_spec.rb
@@ -3,9 +3,9 @@
require 'spec_helper'
RSpec.describe 'Member autocomplete', :js do
- let(:project) { create(:project, :public) }
- let(:user) { create(:user) }
- let(:author) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:author) { create(:user) }
let(:note) { create(:note, noteable: noteable, project: noteable.project) }
before do
@@ -15,20 +15,16 @@ RSpec.describe 'Member autocomplete', :js do
shared_examples "open suggestions when typing @" do |resource_name|
before do
- page.within('.new-note') do
- if resource_name == 'commit'
- find('#note_note').send_keys('@')
- else
- find('#note-body').send_keys('@')
- end
+ if resource_name == 'commit'
+ fill_in 'note[note]', with: '@'
+ else
+ fill_in 'Comment', with: '@'
end
end
it 'suggests noteable author and note author' do
- page.within('.atwho-view', visible: true) do
- expect(page).to have_content(author.username)
- expect(page).to have_content(note.author.username)
- end
+ expect(find_autocomplete_menu).to have_text(author.username)
+ expect(find_autocomplete_menu).to have_text(note.author.username)
end
end
@@ -51,22 +47,17 @@ RSpec.describe 'Member autocomplete', :js do
stub_feature_flags(tribute_autocomplete: true)
visit project_issue_path(project, noteable)
- page.within('.new-note') do
- find('#note-body').send_keys('@')
- end
+ fill_in 'Comment', with: '@'
end
it 'suggests noteable author and note author' do
- page.within('.tribute-container', visible: true) do
- expect(page).to have_content(author.username)
- expect(page).to have_content(note.author.username)
- end
+ expect(find_tribute_autocomplete_menu).to have_content(author.username)
+ expect(find_tribute_autocomplete_menu).to have_content(note.author.username)
end
end
end
context 'adding a new note on a Merge Request' do
- let(:project) { create(:project, :public, :repository) }
let(:noteable) do
create(:merge_request, source_project: project,
target_project: project, author: author)
@@ -80,7 +71,6 @@ RSpec.describe 'Member autocomplete', :js do
end
context 'adding a new note on a Commit' do
- let(:project) { create(:project, :public, :repository) }
let(:noteable) { project.commit }
let(:note) { create(:note_on_commit, project: project, commit_id: project.commit.id) }
@@ -94,4 +84,14 @@ RSpec.describe 'Member autocomplete', :js do
include_examples "open suggestions when typing @", 'commit'
end
+
+ private
+
+ def find_autocomplete_menu
+ find('.atwho-view ul', visible: true)
+ end
+
+ def find_tribute_autocomplete_menu
+ find('.tribute-container ul', visible: true)
+ end
end
diff --git a/spec/features/profiles/emails_spec.rb b/spec/features/profiles/emails_spec.rb
index bdf1f8b022a..6b6f628e2d5 100644
--- a/spec/features/profiles/emails_spec.rb
+++ b/spec/features/profiles/emails_spec.rb
@@ -43,7 +43,7 @@ RSpec.describe 'Profile > Emails' do
end
it 'user removes email' do
- user.emails.create(email: 'my@email.com')
+ user.emails.create!(email: 'my@email.com')
visit profile_emails_path
expect(page).to have_content("my@email.com")
@@ -52,7 +52,7 @@ RSpec.describe 'Profile > Emails' do
end
it 'user confirms email' do
- email = user.emails.create(email: 'my@email.com')
+ email = user.emails.create!(email: 'my@email.com')
visit profile_emails_path
expect(page).to have_content("#{email.email} Unverified")
@@ -64,7 +64,7 @@ RSpec.describe 'Profile > Emails' do
end
it 'user re-sends confirmation email' do
- email = user.emails.create(email: 'my@email.com')
+ email = user.emails.create!(email: 'my@email.com')
visit profile_emails_path
expect { click_link("Resend confirmation email") }.to have_enqueued_job.on_queue('mailers')
@@ -72,7 +72,7 @@ RSpec.describe 'Profile > Emails' do
end
it 'old unconfirmed emails show Send Confirmation button' do
- email = user.emails.create(email: 'my@email.com')
+ email = user.emails.create!(email: 'my@email.com')
email.update_attribute(:confirmation_sent_at, nil)
visit profile_emails_path
diff --git a/spec/features/profiles/password_spec.rb b/spec/features/profiles/password_spec.rb
index 039966080d8..c9059395377 100644
--- a/spec/features/profiles/password_spec.rb
+++ b/spec/features/profiles/password_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe 'Profile > Password' do
before do
sign_in(user)
- user.update(password_expires_at: 1.hour.ago)
+ user.update!(password_expires_at: 1.hour.ago)
user.identities.delete
expect(user.ldap_user?).to eq false
end
diff --git a/spec/features/profiles/personal_access_tokens_spec.rb b/spec/features/profiles/personal_access_tokens_spec.rb
index 9e56ef087ae..c85657c89d5 100644
--- a/spec/features/profiles/personal_access_tokens_spec.rb
+++ b/spec/features/profiles/personal_access_tokens_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe 'Profile > Personal Access Tokens', :js do
end
it "removes expired tokens from 'active' section" do
- personal_access_token.update(expires_at: 5.days.ago)
+ personal_access_token.update!(expires_at: 5.days.ago)
visit profile_personal_access_tokens_path
expect(page).to have_selector(".settings-message")
diff --git a/spec/features/profiles/user_edit_profile_spec.rb b/spec/features/profiles/user_edit_profile_spec.rb
index bd4917824d1..57f7c7878e3 100644
--- a/spec/features/profiles/user_edit_profile_spec.rb
+++ b/spec/features/profiles/user_edit_profile_spec.rb
@@ -212,8 +212,10 @@ RSpec.describe 'User edit profile' do
end
it 'shows author as busy in the assignee dropdown' do
- find('.block.assignee .edit-link').click
- wait_for_requests
+ page.within('.assignee') do
+ click_button('Edit')
+ wait_for_requests
+ end
page.within '.dropdown-menu-user' do
expect(page).to have_content("#{user.name} (Busy)")
@@ -227,7 +229,7 @@ RSpec.describe 'User edit profile' do
visit project_issue_path(project, issue)
wait_for_requests
- expect(page.find('[data-testid="expanded-assignee"]')).to have_text("#{user.name} (Busy)")
+ expect(page.find('.issuable-assignees')).to have_content("#{user.name} (Busy)")
end
end
diff --git a/spec/features/profiles/user_search_settings_spec.rb b/spec/features/profiles/user_search_settings_spec.rb
index 60df0d7532b..64a8556e349 100644
--- a/spec/features/profiles/user_search_settings_spec.rb
+++ b/spec/features/profiles/user_search_settings_spec.rb
@@ -10,9 +10,11 @@ RSpec.describe 'User searches their settings', :js do
end
context 'in profile page' do
- let(:visit_path) { profile_path }
+ before do
+ visit profile_path
+ end
- it_behaves_like 'can search settings with feature flag check', 'Public Avatar', 'Main settings'
+ it_behaves_like 'can search settings', 'Public Avatar', 'Main settings'
end
context 'in preferences page' do
diff --git a/spec/features/projects/active_tabs_spec.rb b/spec/features/projects/active_tabs_spec.rb
index 86fe59f003f..9de43e7d18c 100644
--- a/spec/features/projects/active_tabs_spec.rb
+++ b/spec/features/projects/active_tabs_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe 'Project active tab' do
visit project_merge_requests_path(project)
end
- it_behaves_like 'page has active tab', 'Merge Requests'
+ it_behaves_like 'page has active tab', 'Merge requests'
end
context 'on project Wiki' do
diff --git a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
index a65a82fab43..1a368676a5e 100644
--- a/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
+++ b/spec/features/projects/blobs/blob_line_permalink_updater_spec.rb
@@ -44,17 +44,6 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
end
- it 'changes fragment hash if icon inside line number link is clicked' do
- ending_fragment = "L7"
-
- visit_blob
-
- find("##{ending_fragment}").hover
- find("##{ending_fragment} svg").click
-
- expect(find('.js-data-file-blob-permalink-url')['href']).to eq(get_absolute_url(project_blob_path(project, tree_join(sha, path), anchor: ending_fragment)))
- end
-
it 'with initial fragment hash, changes fragment hash if line number clicked' do
fragment = "L1"
ending_fragment = "L5"
@@ -94,17 +83,6 @@ RSpec.describe 'Blob button line permalinks (BlobLinePermalinkUpdater)', :js do
expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
end
- it 'changes fragment hash if icon inside line number link is clicked' do
- ending_fragment = "L7"
-
- visit_blob
-
- find("##{ending_fragment}").hover
- find("##{ending_fragment} svg").click
-
- expect(find('.js-blob-blame-link')['href']).to eq(get_absolute_url(project_blame_path(project, tree_join('master', path), anchor: ending_fragment)))
- end
-
it 'with initial fragment hash, changes fragment hash if line number clicked' do
fragment = "L1"
ending_fragment = "L5"
diff --git a/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
new file mode 100644
index 00000000000..b872fa701c8
--- /dev/null
+++ b/spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'User views pipeline editor button on root ci config file', :js do
+ include BlobSpecHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+
+ context "when the ci config is the root file" do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'shows the button to the Pipeline Editor' do
+ project.update!(ci_config_path: '.my-config.yml')
+ project.repository.create_file(user, project.ci_config_path_or_default, 'test', message: 'testing', branch_name: 'master')
+ visit project_blob_path(project, File.join('master', '.my-config.yml'))
+
+ expect(page).to have_content('Pipeline Editor')
+ end
+
+ it 'does not shows the Pipeline Editor button' do
+ project.repository.create_file(user, '.my-sub-config.yml', 'test', message: 'testing', branch_name: 'master')
+ visit project_blob_path(project, File.join('master', '.my-sub-config.yml'))
+
+ expect(page).not_to have_content('Pipeline Editor')
+ end
+ end
+
+ context "when user cannot collaborate" do
+ before do
+ sign_in(user)
+ end
+ it 'does not shows the Pipeline Editor button' do
+ visit project_blob_path(project, File.join('master', '.my-config.yml'))
+ expect(page).not_to have_content('Pipeline Editor')
+ end
+ end
+end
diff --git a/spec/features/projects/branches/user_creates_branch_spec.rb b/spec/features/projects/branches/user_creates_branch_spec.rb
index 52c860bfe36..18d083f7d88 100644
--- a/spec/features/projects/branches/user_creates_branch_spec.rb
+++ b/spec/features/projects/branches/user_creates_branch_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe "User creates branch", :js do
end
it "creates new branch" do
- branch_name = "deploy_keys".freeze
+ branch_name = "deploy_keys"
create_branch(branch_name)
@@ -25,7 +25,7 @@ RSpec.describe "User creates branch", :js do
context "when branch name is invalid" do
it "does not create new branch" do
- invalid_branch_name = "1.0 stable".freeze
+ invalid_branch_name = "1.0 stable"
fill_in("branch_name", with: invalid_branch_name)
page.find("body").click # defocus the branch_name input
diff --git a/spec/features/projects/branches/user_deletes_branch_spec.rb b/spec/features/projects/branches/user_deletes_branch_spec.rb
index c480c41709c..bebb4bb679b 100644
--- a/spec/features/projects/branches/user_deletes_branch_spec.rb
+++ b/spec/features/projects/branches/user_deletes_branch_spec.rb
@@ -9,12 +9,15 @@ RSpec.describe "User deletes branch", :js do
before do
project.add_developer(user)
sign_in(user)
-
- visit(project_branches_path(project))
end
it "deletes branch" do
- fill_in("branch-search", with: "improve/awesome").native.send_keys(:enter)
+ visit(project_branches_path(project))
+
+ branch_search = find('input[data-testid="branch-search"]')
+
+ branch_search.set('improve/awesome')
+ branch_search.native.send_keys(:enter)
page.within(".js-branch-improve\\/awesome") do
accept_alert { find(".btn-danger").click }
diff --git a/spec/features/projects/branches_spec.rb b/spec/features/projects/branches_spec.rb
index 4bfe8852291..f805416b03d 100644
--- a/spec/features/projects/branches_spec.rb
+++ b/spec/features/projects/branches_spec.rb
@@ -88,8 +88,10 @@ RSpec.describe 'Branches' do
it 'shows filtered branches', :js do
visit project_branches_path(project)
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
+ branch_search = find('input[data-testid="branch-search"]')
+
+ branch_search.set('fix')
+ branch_search.native.send_keys(:enter)
expect(page).to have_content('fix')
expect(find('.all-branches')).to have_selector('li', count: 1)
@@ -114,20 +116,24 @@ RSpec.describe 'Branches' do
expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :updated_desc))
end
- it 'sorts the branches by name' do
+ it 'sorts the branches by name', :js do
visit project_branches_filtered_path(project, state: 'all')
click_button "Last updated" # Open sorting dropdown
- click_link "Name"
+ within '[data-testid="branches-dropdown"]' do
+ find('p', text: 'Name').click
+ end
expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :name))
end
- it 'sorts the branches by oldest updated' do
+ it 'sorts the branches by oldest updated', :js do
visit project_branches_filtered_path(project, state: 'all')
click_button "Last updated" # Open sorting dropdown
- click_link "Oldest updated"
+ within '[data-testid="branches-dropdown"]' do
+ find('p', text: 'Oldest updated').click
+ end
expect(page).to have_content(sorted_branches(repository, count: 20, sort_by: :updated_asc))
end
@@ -145,8 +151,10 @@ RSpec.describe 'Branches' do
it 'shows filtered branches', :js do
visit project_branches_filtered_path(project, state: 'all')
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
+ branch_search = find('input[data-testid="branch-search"]')
+
+ branch_search.set('fix')
+ branch_search.native.send_keys(:enter)
expect(page).to have_content('fix')
expect(find('.all-branches')).to have_selector('li', count: 1)
@@ -157,9 +165,10 @@ RSpec.describe 'Branches' do
it 'removes branch after confirmation', :js do
visit project_branches_filtered_path(project, state: 'all')
- fill_in 'branch-search', with: 'fix'
+ branch_search = find('input[data-testid="branch-search"]')
- find('#branch-search').native.send_keys(:enter)
+ branch_search.set('fix')
+ branch_search.native.send_keys(:enter)
expect(page).to have_content('fix')
expect(find('.all-branches')).to have_selector('li', count: 1)
diff --git a/spec/features/projects/clusters/gcp_spec.rb b/spec/features/projects/clusters/gcp_spec.rb
index d34dde6a8f2..8c497cded8e 100644
--- a/spec/features/projects/clusters/gcp_spec.rb
+++ b/spec/features/projects/clusters/gcp_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe 'Gcp Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('.js-cluster-integration-form') { click_button 'Save changes' }
+ page.within('.js-cluster-details-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
diff --git a/spec/features/projects/clusters/user_spec.rb b/spec/features/projects/clusters/user_spec.rb
index 748eba558aa..5b60edbcf87 100644
--- a/spec/features/projects/clusters/user_spec.rb
+++ b/spec/features/projects/clusters/user_spec.rb
@@ -84,7 +84,7 @@ RSpec.describe 'User Cluster', :js do
context 'when user disables the cluster' do
before do
page.find(:css, '.js-cluster-enable-toggle-area .js-project-feature-toggle').click
- page.within('.js-cluster-integration-form') { click_button 'Save changes' }
+ page.within('.js-cluster-details-form') { click_button 'Save changes' }
end
it 'user sees the successful message' do
diff --git a/spec/features/projects/clusters_spec.rb b/spec/features/projects/clusters_spec.rb
index 6da66989b09..6b03301aa74 100644
--- a/spec/features/projects/clusters_spec.rb
+++ b/spec/features/projects/clusters_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe 'Clusters', :js do
before do
click_link 'default-cluster'
fill_in 'cluster_environment_scope', with: 'production/*'
- within '.js-cluster-integration-form' do
+ within '.js-cluster-details-form' do
click_button 'Save changes'
end
end
@@ -149,7 +149,7 @@ RSpec.describe 'Clusters', :js do
before do
click_link 'default-cluster'
fill_in 'cluster_environment_scope', with: 'production/*'
- within ".js-cluster-integration-form" do
+ within ".js-cluster-details-form" do
click_button 'Save changes'
end
end
diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb
index 489a90cc8fc..cd944436228 100644
--- a/spec/features/projects/commit/cherry_pick_spec.rb
+++ b/spec/features/projects/commit/cherry_pick_spec.rb
@@ -91,7 +91,7 @@ RSpec.describe 'Cherry-pick Commits', :js do
context 'when the project is archived' do
let(:project) { create(:project, :repository, :archived, namespace: user.namespace) }
- it 'does not show the cherry-pick link' do
+ it 'does not show the cherry-pick button' do
open_dropdown
expect(page).not_to have_text("Cherry-pick")
@@ -106,12 +106,15 @@ RSpec.describe 'Cherry-pick Commits', :js do
end
def open_dropdown
- find('.header-action-buttons .dropdown').click
+ find(dropdown_selector).click
end
def open_modal
open_dropdown
- find('[data-testid="cherry-pick-commit-link"]').click
+
+ page.within(dropdown_selector) do
+ click_button 'Cherry-pick'
+ end
end
def submit_cherry_pick(create_merge_request: false)
@@ -121,6 +124,10 @@ RSpec.describe 'Cherry-pick Commits', :js do
end
end
+ def dropdown_selector
+ '[data-testid="commit-options-dropdown"]'
+ end
+
def modal_selector
'[data-testid="modal-commit"]'
end
diff --git a/spec/features/projects/commit/comments/user_edits_comments_spec.rb b/spec/features/projects/commit/comments/user_edits_comments_spec.rb
index 787d8cdb02b..8ac15c9cb7f 100644
--- a/spec/features/projects/commit/comments/user_edits_comments_spec.rb
+++ b/spec/features/projects/commit/comments/user_edits_comments_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe "User edits a comment on a commit", :js do
end
it "edits comment" do
- new_comment_text = "+1 Awesome!".freeze
+ new_comment_text = "+1 Awesome!"
page.within(".main-notes-list") do
note = find(".note")
diff --git a/spec/features/projects/commit/mini_pipeline_graph_spec.rb b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
index 7d206f76031..6de02556175 100644
--- a/spec/features/projects/commit/mini_pipeline_graph_spec.rb
+++ b/spec/features/projects/commit/mini_pipeline_graph_spec.rb
@@ -16,46 +16,28 @@ RSpec.describe 'Mini Pipeline Graph in Commit View', :js do
let(:build) { create(:ci_build, pipeline: pipeline, status: :running) }
- shared_examples 'shows ci icon and mini pipeline' do
- before do
- build.run
- visit project_commit_path(project, project.commit.id)
- end
-
- it 'display icon with status' do
- expect(page).to have_selector('.ci-status-icon-running')
- end
-
- it 'displays a mini pipeline graph' do
- expect(page).to have_selector('.mr-widget-pipeline-graph')
-
- first('.mini-pipeline-graph-dropdown-toggle').click
-
- wait_for_requests
-
- page.within '.js-builds-dropdown-list' do
- expect(page).to have_selector('.ci-status-icon-running')
- expect(page).to have_content(build.stage)
- end
+ before do
+ build.run
+ visit project_commit_path(project, project.commit.id)
+ end
- build.drop
- end
+ it 'display icon with status' do
+ expect(page).to have_selector('.ci-status-icon-running')
end
- context 'when ci_commit_pipeline_mini_graph_vue is disabled' do
- before do
- stub_feature_flags(ci_commit_pipeline_mini_graph_vue: false)
- end
+ it 'displays a mini pipeline graph' do
+ expect(page).to have_selector('[data-testid="pipeline-mini-graph"]')
- it_behaves_like 'shows ci icon and mini pipeline'
- end
+ first('.mini-pipeline-graph-dropdown-toggle').click
- context 'when ci_commit_pipeline_mini_graph_vue is enabled' do
- before do
- stub_feature_flags(ci_commit_pipeline_mini_graph_vue: true)
+ wait_for_requests
+
+ page.within '.js-builds-dropdown-list' do
+ expect(page).to have_selector('.ci-status-icon-running')
+ expect(page).to have_content(build.stage)
end
- it_behaves_like 'shows ci icon and mini pipeline'
+ build.drop
end
end
@@ -65,7 +47,7 @@ RSpec.describe 'Mini Pipeline Graph in Commit View', :js do
end
it 'does not display a mini pipeline graph' do
- expect(page).not_to have_selector('.mr-widget-pipeline-graph')
+ expect(page).not_to have_selector('[data-testid="pipeline-mini-graph"]')
end
end
end
diff --git a/spec/features/projects/commit/user_reverts_commit_spec.rb b/spec/features/projects/commit/user_reverts_commit_spec.rb
index 72c639a027e..ad327b86aa7 100644
--- a/spec/features/projects/commit/user_reverts_commit_spec.rb
+++ b/spec/features/projects/commit/user_reverts_commit_spec.rb
@@ -62,10 +62,10 @@ RSpec.describe 'User reverts a commit', :js do
context 'when the project is archived' do
let(:project) { create(:project, :repository, :archived, namespace: user.namespace) }
- it 'does not show the revert link' do
+ it 'does not show the revert button' do
open_dropdown
- expect(page).not_to have_link('Revert')
+ expect(page).not_to have_button('Revert')
end
end
end
@@ -75,17 +75,24 @@ RSpec.describe 'User reverts a commit', :js do
page.within(modal_selector) do
uncheck('create_merge_request') unless create_merge_request
- click_button('Revert')
+ click_button 'Revert'
end
end
def open_dropdown
- find('.header-action-buttons .dropdown').click
+ find(dropdown_selector).click
end
def open_modal
open_dropdown
- find('[data-testid="revert-commit-link"]').click
+
+ page.within(dropdown_selector) do
+ click_button 'Revert'
+ end
+ end
+
+ def dropdown_selector
+ '[data-testid="commit-options-dropdown"]'
end
def modal_selector
diff --git a/spec/features/projects/commits/user_browses_commits_spec.rb b/spec/features/projects/commits/user_browses_commits_spec.rb
index 4894e2b7f3e..76162fb800a 100644
--- a/spec/features/projects/commits/user_browses_commits_spec.rb
+++ b/spec/features/projects/commits/user_browses_commits_spec.rb
@@ -20,9 +20,14 @@ RSpec.describe 'User browses commits' do
.and have_content('Side-by-side')
end
- it 'fill commit sha when click new tag from commit page' do
+ it 'fill commit sha when click new tag from commit page', :js do
+ dropdown_selector = '[data-testid="commit-options-dropdown"]'
visit project_commit_path(project, sample_commit.id)
- click_link 'Tag'
+ find(dropdown_selector).click
+
+ page.within(dropdown_selector) do
+ click_link 'Tag'
+ end
expect(page).to have_selector("input[value='#{sample_commit.id}']", visible: false)
end
diff --git a/spec/features/projects/features_visibility_spec.rb b/spec/features/projects/features_visibility_spec.rb
index c94247f65d2..ab82a4750d3 100644
--- a/spec/features/projects/features_visibility_spec.rb
+++ b/spec/features/projects/features_visibility_spec.rb
@@ -134,7 +134,7 @@ RSpec.describe 'Edit Project Settings' do
it 'renders 200 if user is member of group' do
group = create(:group)
project.group = group
- project.save
+ project.save!
group.add_owner(member)
diff --git a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
index 55b9f38d8e7..b0ccb5fca94 100644
--- a/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
+++ b/spec/features/projects/files/gitlab_ci_yml_dropdown_spec.rb
@@ -5,10 +5,14 @@ require 'spec_helper'
RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js do
include Spec::Support::Helpers::Features::EditorLiteSpecHelpers
+ let(:params) { {} }
+ let(:filename) { '.gitlab-ci.yml' }
+
+ let_it_be(:project) { create(:project, :repository) }
+
before do
- project = create(:project, :repository)
sign_in project.owner
- visit project_new_blob_path(project, 'master', file_name: '.gitlab-ci.yml')
+ visit project_new_blob_path(project, 'master', file_name: filename, **params)
end
it 'user can pick a template from the dropdown' do
@@ -29,4 +33,38 @@ RSpec.describe 'Projects > Files > User wants to add a .gitlab-ci.yml file', :js
expect(editor_get_value).to have_content('This file is a template, and might need editing before it works on your project')
expect(editor_get_value).to have_content('jekyll build -d test')
end
+
+ context 'when template param is provided' do
+ let(:params) { { template: 'Jekyll' } }
+
+ it 'uses the given template' do
+ wait_for_requests
+
+ expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
+ expect(editor_get_value).to have_content('This file is a template, and might need editing before it works on your project')
+ expect(editor_get_value).to have_content('jekyll build -d test')
+ end
+ end
+
+ context 'when provided template param is not a valid template name' do
+ let(:params) { { template: 'non-existing-template' } }
+
+ it 'leaves the editor empty' do
+ wait_for_requests
+
+ expect(page).to have_css('.gitlab-ci-yml-selector .dropdown-toggle-text', text: 'Apply a template')
+ expect(editor_get_value).to have_content('')
+ end
+ end
+
+ context 'when template is not available for the given file' do
+ let(:filename) { 'Dockerfile' }
+ let(:params) { { template: 'Jekyll' } }
+
+ it 'leaves the editor empty' do
+ wait_for_requests
+
+ expect(editor_get_value).to have_content('')
+ end
+ end
end
diff --git a/spec/features/projects/files/user_creates_directory_spec.rb b/spec/features/projects/files/user_creates_directory_spec.rb
index f2074c78dba..46b93d738e1 100644
--- a/spec/features/projects/files/user_creates_directory_spec.rb
+++ b/spec/features/projects/files/user_creates_directory_spec.rb
@@ -77,7 +77,7 @@ RSpec.describe 'Projects > Files > User creates a directory', :js do
it 'creates the directory in the new branch and redirect to the merge request' do
expect(page).to have_content('new-feature')
expect(page).to have_content('The directory has been successfully created')
- expect(page).to have_content('New Merge Request')
+ expect(page).to have_content('New merge request')
expect(page).to have_content('From new-feature into master')
expect(page).to have_content('Add new directory')
diff --git a/spec/features/projects/files/user_uploads_files_spec.rb b/spec/features/projects/files/user_uploads_files_spec.rb
index 944d08df3f3..54e816d3d13 100644
--- a/spec/features/projects/files/user_uploads_files_spec.rb
+++ b/spec/features/projects/files/user_uploads_files_spec.rb
@@ -3,8 +3,6 @@
require 'spec_helper'
RSpec.describe 'Projects > Files > User uploads files' do
- include DropzoneHelper
-
let(:user) { create(:user) }
let(:project) { create(:project, :repository, name: 'Shop', creator: user) }
let(:project2) { create(:project, :repository, name: 'Another Project', path: 'another-project') }
@@ -17,36 +15,17 @@ RSpec.describe 'Projects > Files > User uploads files' do
context 'when a user has write access' do
before do
visit(project_tree_path(project))
- end
-
- include_examples 'it uploads and commit a new text file'
-
- include_examples 'it uploads and commit a new image file'
- it 'uploads a file to a sub-directory', :js do
- click_link 'files'
-
- page.within('.repo-breadcrumb') do
- expect(page).to have_content('files')
- end
-
- find('.add-to-tree').click
- click_link('Upload file')
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ wait_for_requests
+ end
- page.within('#modal-upload-blob') do
- fill_in(:commit_message, with: 'New commit message')
- end
+ include_examples 'it uploads and commits a new text file'
- click_button('Upload file')
+ include_examples 'it uploads and commits a new image file'
- expect(page).to have_content('New commit message')
+ include_examples 'it uploads and commits a new pdf file'
- page.within('.repo-breadcrumb') do
- expect(page).to have_content('files')
- expect(page).to have_content('doc_sample.txt')
- end
- end
+ include_examples 'it uploads a file to a sub-directory'
end
context 'when a user does not have write access' do
@@ -56,6 +35,6 @@ RSpec.describe 'Projects > Files > User uploads files' do
visit(project_tree_path(project2))
end
- include_examples 'it uploads and commit a new file to a forked project'
+ include_examples 'it uploads and commits a new file to a forked project'
end
end
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 7abbd207b24..2b7ea70fe5a 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe 'Project fork' do
let(:forking_access_level) { ProjectFeature::PRIVATE }
before do
- project.update(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
end
context 'user is not a team member' do
@@ -118,6 +118,50 @@ RSpec.describe 'Project fork' do
it_behaves_like 'fork button on project page'
it_behaves_like 'create fork page', 'Fork project'
+ context 'fork form', :js do
+ let(:group) { create(:group) }
+ let(:user) { create(:group_member, :maintainer, user: create(:user), group: group ).user }
+
+ def submit_form
+ select(group.name)
+ click_button 'Fork project'
+ end
+
+ it 'forks the project', :sidekiq_might_not_need_inline do
+ visit new_project_fork_path(project)
+ submit_form
+
+ expect(page).to have_content 'Forked from'
+ end
+
+ it 'shows the new forked project on the forks page' do
+ visit new_project_fork_path(project)
+ submit_form
+ wait_for_requests
+
+ visit project_forks_path(project)
+
+ page.within('.js-projects-list-holder') do
+ expect(page).to have_content("#{group.name} / #{project.name}")
+ end
+ end
+
+ it 'shows the filled in info forked project on the forks page' do
+ fork_name = 'some-name'
+ visit new_project_fork_path(project)
+ fill_in('fork-name', with: fork_name, fill_options: { clear: :backspace })
+ fill_in('fork-slug', with: fork_name, fill_options: { clear: :backspace })
+ submit_form
+ wait_for_requests
+
+ visit project_forks_path(project)
+
+ page.within('.js-projects-list-holder') do
+ expect(page).to have_content("#{group.name} / #{fork_name}")
+ end
+ end
+ end
+
context 'with fork_project_form feature flag disabled' do
before do
stub_feature_flags(fork_project_form: false)
@@ -164,7 +208,7 @@ RSpec.describe 'Project fork' do
expect(page).to have_content(/new merge request/i)
page.within '.nav-sidebar' do
- first(:link, 'Merge Requests').click
+ first(:link, 'Merge requests').click
end
expect(page).to have_content(/new merge request/i)
diff --git a/spec/features/projects/jobs/permissions_spec.rb b/spec/features/projects/jobs/permissions_spec.rb
index e87880d74b1..140d5dee270 100644
--- a/spec/features/projects/jobs/permissions_spec.rb
+++ b/spec/features/projects/jobs/permissions_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe 'Project Jobs Permissions' do
let_it_be(:job) { create(:ci_build, :running, :coverage, :trace_artifact, pipeline: pipeline) }
before do
+ stub_feature_flags(jobs_table_vue: false)
+
sign_in(user)
project.enable_ci
diff --git a/spec/features/projects/jobs/user_browses_jobs_spec.rb b/spec/features/projects/jobs/user_browses_jobs_spec.rb
index 5abebf2320e..dbcd7b5caf5 100644
--- a/spec/features/projects/jobs/user_browses_jobs_spec.rb
+++ b/spec/features/projects/jobs/user_browses_jobs_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe 'User browses jobs' do
let(:user) { create(:user) }
before do
+ stub_feature_flags(jobs_table_vue: false)
project.add_maintainer(user)
project.enable_ci
project.update_attribute(:build_coverage_regex, /Coverage (\d+)%/)
@@ -24,14 +25,6 @@ RSpec.describe 'User browses jobs' do
end
end
- it 'shows the "CI Lint" button' do
- page.within('.nav-controls') do
- ci_lint_tool_link = page.find_link('CI Lint')
-
- expect(ci_lint_tool_link[:href]).to end_with(project_ci_lint_path(project))
- end
- end
-
context 'with a failed job' do
let!(:build) { create(:ci_build, :coverage, :failed, pipeline: pipeline) }
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 7811394b541..18a6ad12240 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
end
before do
+ stub_feature_flags(jobs_table_vue: false)
project.add_role(user, user_access_level)
sign_in(user)
end
@@ -32,7 +33,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
it 'shows the empty state page' do
expect(page).to have_content('Use jobs to automate your tasks')
- expect(page).to have_link('Create CI/CD configuration file', href: project.present(current_user: user).add_ci_yml_path)
+ expect(page).to have_link('Create CI/CD configuration file', href: project_ci_pipeline_editor_path(project))
end
end
@@ -1057,7 +1058,7 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state do
before do
job.run!
job.cancel!
- project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
sign_out(:user)
sign_in(create(:user))
diff --git a/spec/features/projects/labels/user_removes_labels_spec.rb b/spec/features/projects/labels/user_removes_labels_spec.rb
index 217f86b92cf..11d73a56965 100644
--- a/spec/features/projects/labels/user_removes_labels_spec.rb
+++ b/spec/features/projects/labels/user_removes_labels_spec.rb
@@ -18,17 +18,17 @@ RSpec.describe "User removes labels" do
visit(project_labels_path(project))
end
- it "removes label" do
+ it "removes label", :js do
page.within(".other-labels") do
page.first(".label-list-item") do
first('.js-label-options-dropdown').click
- first(".remove-row").click
+ first('.js-delete-label-modal-button').click
end
+ end
- expect(page).to have_content("#{label.title} will be permanently deleted from #{project.name}. This cannot be undone.")
+ expect(page).to have_content("#{label.title} will be permanently deleted from #{project.name}. This cannot be undone.")
- first(:link, "Delete label").click
- end
+ first(:link, "Delete label").click
expect(page).to have_content("Label was removed").and have_no_content(label.title)
end
diff --git a/spec/features/projects/members/list_spec.rb b/spec/features/projects/members/list_spec.rb
index 0830585da9b..384b8ae9929 100644
--- a/spec/features/projects/members/list_spec.rb
+++ b/spec/features/projects/members/list_spec.rb
@@ -175,7 +175,7 @@ RSpec.describe 'Project members list', :js do
click_on 'Invite members'
page.within '#invite-members-modal' do
- fill_in 'Search for members to invite', with: id
+ fill_in 'Select members or type email addresses', with: id
wait_for_requests
click_button id
diff --git a/spec/features/projects/members/user_requests_access_spec.rb b/spec/features/projects/members/user_requests_access_spec.rb
index a339130ee3c..7073741a92d 100644
--- a/spec/features/projects/members/user_requests_access_spec.rb
+++ b/spec/features/projects/members/user_requests_access_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Projects > Members > User requests access', :js do
end
it 'request access feature is disabled' do
- project.update(request_access_enabled: false)
+ project.update!(request_access_enabled: false)
visit project_path(project)
expect(page).not_to have_content 'Request Access'
diff --git a/spec/features/projects/merge_request_button_spec.rb b/spec/features/projects/merge_request_button_spec.rb
index 9547ba8a390..93bbabcc3f8 100644
--- a/spec/features/projects/merge_request_button_spec.rb
+++ b/spec/features/projects/merge_request_button_spec.rb
@@ -14,7 +14,9 @@ RSpec.describe 'Merge Request button' do
it 'does not show Create merge request button' do
visit url
- expect(page).not_to have_link(label)
+ within '.content-wrapper' do
+ expect(page).not_to have_link(label)
+ end
end
end
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 4ff3827b240..7dc3ee63669 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'Project navbar' do
before do
insert_package_nav(_('Operations'))
+ insert_infrastructure_registry_nav
+ stub_config(registry: { enabled: false })
project.add_maintainer(user)
sign_in(user)
@@ -60,7 +62,7 @@ RSpec.describe 'Project navbar' do
before do
stub_config(registry: { enabled: true })
- insert_container_nav(_('Operations'))
+ insert_container_nav
visit project_path(project)
end
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index ec34640bd00..7119039d5ff 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -12,6 +12,72 @@ RSpec.describe 'New project', :js do
sign_in(user)
end
+ context 'new repo experiment', :experiment do
+ it 'when in control renders "project"' do
+ stub_experiments(new_repo: :control)
+
+ visit new_project_path
+
+ find('li.header-new.dropdown').click
+
+ page.within('li.header-new.dropdown') do
+ expect(page).to have_selector('a', text: 'New project')
+ expect(page).to have_no_selector('a', text: 'New project/repository')
+ end
+
+ expect(page).to have_selector('.blank-state-title', text: 'Create blank project')
+ expect(page).to have_no_selector('.blank-state-title', text: 'Create blank project/repository')
+ end
+
+ it 'when in candidate renders "project/repository"' do
+ stub_experiments(new_repo: :candidate)
+
+ visit new_project_path
+
+ find('li.header-new.dropdown').click
+
+ page.within('li.header-new.dropdown') do
+ expect(page).to have_selector('a', text: 'New project/repository')
+ end
+
+ expect(page).to have_selector('.blank-state-title', text: 'Create blank project/repository')
+ end
+
+ context 'with combined_menu feature disabled' do
+ before do
+ stub_feature_flags(combined_menu: false)
+ end
+
+ it 'when in control it renders "project" in the new projects dropdown' do
+ stub_experiments(new_repo: :control)
+
+ visit new_project_path
+
+ find('#nav-projects-dropdown').click
+
+ page.within('#nav-projects-dropdown') do
+ expect(page).to have_selector('a', text: 'Create blank project')
+ expect(page).to have_selector('a', text: 'Import project')
+ expect(page).to have_no_selector('a', text: 'Create blank project/repository')
+ expect(page).to have_no_selector('a', text: 'Import project/repository')
+ end
+ end
+
+ it 'when in candidate it renders "project/repository" in the new projects dropdown' do
+ stub_experiments(new_repo: :candidate)
+
+ visit new_project_path
+
+ find('#nav-projects-dropdown').click
+
+ page.within('#nav-projects-dropdown') do
+ expect(page).to have_selector('a', text: 'Create blank project/repository')
+ expect(page).to have_selector('a', text: 'Import project/repository')
+ end
+ end
+ end
+ end
+
it 'shows a message if multiple levels are restricted' do
Gitlab::CurrentSettings.update!(
restricted_visibility_levels: [Gitlab::VisibilityLevel::PRIVATE, Gitlab::VisibilityLevel::INTERNAL]
diff --git a/spec/features/projects/pages/user_edits_settings_spec.rb b/spec/features/projects/pages/user_edits_settings_spec.rb
index 6156b5243de..412ba17cf20 100644
--- a/spec/features/projects/pages/user_edits_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_settings_spec.rb
@@ -175,7 +175,6 @@ RSpec.describe 'Pages edits pages settings', :js do
expect(page).not_to have_field(:project_pages_https_only)
expect(page).not_to have_content('Force HTTPS (requires valid certificates)')
- expect(page).to have_button('Save')
end
end
end
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index 94800717677..4a0581bb5cf 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -566,7 +566,7 @@ RSpec.describe 'Pipeline', :js do
end
before do
- pipeline.update(user: user)
+ pipeline.update!(user: user)
end
it 'shows the pipeline information' do
@@ -628,7 +628,7 @@ RSpec.describe 'Pipeline', :js do
context 'when user does not have access to read jobs' do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
describe 'GET /:project/-/pipelines/:id' do
@@ -709,9 +709,9 @@ RSpec.describe 'Pipeline', :js do
end
end
- it 'displays the PipelineSchedule in an active state' do
+ it 'displays the PipelineSchedule in an inactive state' do
visit project_pipeline_schedules_path(project)
- page.click_link('Active')
+ page.click_link('Inactive')
expect(page).to have_selector('table.ci-table > tbody > tr > td', text: 'blocked user schedule')
end
@@ -1185,7 +1185,7 @@ RSpec.describe 'Pipeline', :js do
let(:role) { :guest }
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
context 'when accessing failed jobs page' do
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 9037aa5c9a8..e375bc10dbf 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -14,7 +14,6 @@ RSpec.describe 'Pipelines', :js do
sign_in(user)
stub_feature_flags(graphql_pipeline_details: false)
stub_feature_flags(graphql_pipeline_details_users: false)
- stub_feature_flags(new_pipelines_table: false)
project.add_developer(user)
project.update!(auto_devops_attributes: { enabled: false })
@@ -94,12 +93,12 @@ RSpec.describe 'Pipelines', :js do
wait_for_requests
end
- it 'renders run pipeline link' do
- expect(page).to have_link('Run Pipeline')
+ it 'renders "CI lint" link' do
+ expect(page).to have_link('CI lint')
end
- it 'renders ci lint link' do
- expect(page).to have_link('CI Lint')
+ it 'renders "Run pipeline" link' do
+ expect(page).to have_link('Run pipeline')
end
end
@@ -534,7 +533,7 @@ RSpec.describe 'Pipelines', :js do
end
it 'renders a mini pipeline graph' do
- expect(page).to have_selector('[data-testid="widget-mini-pipeline-graph"]')
+ expect(page).to have_selector('[data-testid="pipeline-mini-graph"]')
expect(page).to have_selector(dropdown_selector)
end
@@ -677,7 +676,7 @@ RSpec.describe 'Pipelines', :js do
end
it 'creates a new pipeline' do
- expect { click_on 'Run Pipeline' }
+ expect { click_on 'Run pipeline' }
.to change { Ci::Pipeline.count }.by(1)
expect(Ci::Pipeline.last).to be_web
@@ -690,7 +689,7 @@ RSpec.describe 'Pipelines', :js do
fill_in "Input variable value", with: "value"
end
- expect { click_on 'Run Pipeline' }
+ expect { click_on 'Run pipeline' }
.to change { Ci::Pipeline.count }.by(1)
expect(Ci::Pipeline.last.variables.map { |var| var.slice(:key, :secret_value) })
@@ -701,7 +700,7 @@ RSpec.describe 'Pipelines', :js do
context 'without gitlab-ci.yml' do
before do
- click_on 'Run Pipeline'
+ click_on 'Run pipeline'
end
it { expect(page).to have_content('Missing CI config file') }
@@ -714,44 +713,13 @@ RSpec.describe 'Pipelines', :js do
click_link 'master'
end
- expect { click_on 'Run Pipeline' }
+ expect { click_on 'Run pipeline' }
.to change { Ci::Pipeline.count }.by(1)
end
end
end
end
- describe 'Run Pipelines' do
- let(:project) { create(:project, :repository) }
-
- before do
- stub_feature_flags(new_pipeline_form: false)
- visit new_project_pipeline_path(project)
- end
-
- describe 'new pipeline page' do
- it 'has field to add a new pipeline' do
- expect(page).to have_selector('.js-branch-select')
- expect(find('.js-branch-select')).to have_content project.default_branch
- expect(page).to have_content('Run for')
- end
- end
-
- describe 'find pipelines' do
- it 'shows filtered pipelines', :js do
- click_button project.default_branch
-
- page.within '.dropdown-menu' do
- find('.dropdown-input-field').native.send_keys('fix')
-
- page.within '.dropdown-content' do
- expect(page).to have_content('fix')
- end
- end
- end
- end
- end
-
describe 'Reset runner caches' do
let(:project) { create(:project, :repository) }
@@ -762,17 +730,17 @@ RSpec.describe 'Pipelines', :js do
end
it 'has a clear caches button' do
- expect(page).to have_button 'Clear Runner Caches'
+ expect(page).to have_button 'Clear runner caches'
end
describe 'user clicks the button' do
context 'when project already has jobs_cache_index' do
before do
- project.update(jobs_cache_index: 1)
+ project.update!(jobs_cache_index: 1)
end
it 'increments jobs_cache_index' do
- click_button 'Clear Runner Caches'
+ click_button 'Clear runner caches'
wait_for_requests
expect(page.find('.flash-notice')).to have_content 'Project cache successfully reset.'
end
@@ -780,7 +748,7 @@ RSpec.describe 'Pipelines', :js do
context 'when project does not have jobs_cache_index' do
it 'sets jobs_cache_index to 1' do
- click_button 'Clear Runner Caches'
+ click_button 'Clear runner caches'
wait_for_requests
expect(page.find('.flash-notice')).to have_content 'Project cache successfully reset.'
end
@@ -788,6 +756,37 @@ RSpec.describe 'Pipelines', :js do
end
end
+ describe 'Run Pipelines' do
+ let(:project) { create(:project, :repository) }
+
+ before do
+ stub_feature_flags(new_pipeline_form: false)
+ visit new_project_pipeline_path(project)
+ end
+
+ describe 'new pipeline page' do
+ it 'has field to add a new pipeline' do
+ expect(page).to have_selector('.js-branch-select')
+ expect(find('.js-branch-select')).to have_content project.default_branch
+ expect(page).to have_content('Run for')
+ end
+ end
+
+ describe 'find pipelines' do
+ it 'shows filtered pipelines', :js do
+ click_button project.default_branch
+
+ page.within '.dropdown-menu' do
+ find('.dropdown-input-field').native.send_keys('fix')
+
+ page.within '.dropdown-content' do
+ expect(page).to have_content('fix')
+ end
+ end
+ end
+ end
+ end
+
describe 'Empty State' do
let(:project) { create(:project, :repository) }
diff --git a/spec/features/projects/releases/user_views_edit_release_spec.rb b/spec/features/projects/releases/user_views_edit_release_spec.rb
index bb54b6be9c4..024c0a227c5 100644
--- a/spec/features/projects/releases/user_views_edit_release_spec.rb
+++ b/spec/features/projects/releases/user_views_edit_release_spec.rb
@@ -37,7 +37,7 @@ RSpec.describe 'User edits Release', :js do
end
it 'renders the edit Release form' do
- expect(page).to have_content('Releases are based on Git tags. We recommend tags that use semantic versioning, for example v1.0, v2.0-pre.')
+ expect(page).to have_content('Releases are based on Git tags. We recommend tags that use semantic versioning, for example v1.0.0, v2.1.0-pre.')
expect(find_field('Tag name', disabled: true).value).to eq(release.tag)
expect(find_field('Release title').value).to eq(release.name)
diff --git a/spec/features/projects/releases/user_views_release_spec.rb b/spec/features/projects/releases/user_views_release_spec.rb
index 186122536ce..4410f345e56 100644
--- a/spec/features/projects/releases/user_views_release_spec.rb
+++ b/spec/features/projects/releases/user_views_release_spec.rb
@@ -5,7 +5,6 @@ require 'spec_helper'
RSpec.describe 'User views Release', :js do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
- let(:graphql_feature_flag) { true }
let(:release) do
create(:release,
@@ -15,8 +14,6 @@ RSpec.describe 'User views Release', :js do
end
before do
- stub_feature_flags(graphql_individual_release_page: graphql_feature_flag)
-
project.add_developer(user)
sign_in(user)
@@ -26,35 +23,23 @@ RSpec.describe 'User views Release', :js do
it_behaves_like 'page meta description', 'Lorem ipsum dolor sit amet'
- shared_examples 'release page' do
- it 'renders the breadcrumbs' do
- within('.breadcrumbs') do
- expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name}")
-
- expect(page).to have_link(project.creator.name, href: user_path(project.creator))
- expect(page).to have_link(project.name, href: project_path(project))
- expect(page).to have_link('Releases', href: project_releases_path(project))
- expect(page).to have_link(release.name, href: project_release_path(project, release))
- end
- end
+ it 'renders the breadcrumbs' do
+ within('.breadcrumbs') do
+ expect(page).to have_content("#{project.creator.name} #{project.name} Releases #{release.name}")
- it 'renders the release details' do
- within('.release-block') do
- expect(page).to have_content(release.name)
- expect(page).to have_content(release.tag)
- expect(page).to have_content(release.commit.short_id)
- expect(page).to have_content('Lorem ipsum dolor sit amet')
- end
+ expect(page).to have_link(project.creator.name, href: user_path(project.creator))
+ expect(page).to have_link(project.name, href: project_path(project))
+ expect(page).to have_link('Releases', href: project_releases_path(project))
+ expect(page).to have_link(release.name, href: project_release_path(project, release))
end
end
- describe 'when the graphql_individual_release_page feature flag is enabled' do
- it_behaves_like 'release page'
- end
-
- describe 'when the graphql_individual_release_page feature flag is disabled' do
- let(:graphql_feature_flag) { false }
-
- it_behaves_like 'release page'
+ it 'renders the release details' do
+ within('.release-block') do
+ expect(page).to have_content(release.name)
+ expect(page).to have_content(release.tag)
+ expect(page).to have_content(release.commit.short_id)
+ expect(page).to have_content('Lorem ipsum dolor sit amet')
+ end
end
end
diff --git a/spec/features/projects/remote_mirror_spec.rb b/spec/features/projects/remote_mirror_spec.rb
index 26d27c914cc..7bbffe627f6 100644
--- a/spec/features/projects/remote_mirror_spec.rb
+++ b/spec/features/projects/remote_mirror_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Project remote mirror', :feature do
context 'when last_error is present but last_update_at is not' do
it 'renders error message without timstamp' do
- remote_mirror.update(last_error: 'Some new error', last_update_at: nil)
+ remote_mirror.update!(last_error: 'Some new error', last_update_at: nil)
visit project_mirror_path(project)
@@ -25,7 +25,7 @@ RSpec.describe 'Project remote mirror', :feature do
context 'when last_error and last_update_at are present' do
it 'renders error message with timestamp' do
- remote_mirror.update(last_error: 'Some new error', last_update_at: Time.now - 5.minutes)
+ remote_mirror.update!(last_error: 'Some new error', last_update_at: Time.now - 5.minutes)
visit project_mirror_path(project)
diff --git a/spec/features/projects/services/disable_triggers_spec.rb b/spec/features/projects/services/disable_triggers_spec.rb
index b3a3d7f0622..d9e200cf563 100644
--- a/spec/features/projects/services/disable_triggers_spec.rb
+++ b/spec/features/projects/services/disable_triggers_spec.rb
@@ -12,10 +12,10 @@ RSpec.describe 'Disable individual triggers', :js do
end
context 'service has multiple supported events' do
- let(:service_name) { 'HipChat' }
+ let(:service_name) { 'Jenkins' }
it 'shows trigger checkboxes' do
- event_count = HipchatService.supported_events.count
+ event_count = JenkinsService.supported_events.count
expect(page).to have_content "Trigger"
expect(page).to have_css(checkbox_selector, visible: :all, count: event_count)
diff --git a/spec/features/projects/services/user_activates_asana_spec.rb b/spec/features/projects/services/user_activates_asana_spec.rb
index e95e7e89fc2..cf2290383e8 100644
--- a/spec/features/projects/services/user_activates_asana_spec.rb
+++ b/spec/features/projects/services/user_activates_asana_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe 'User activates Asana' do
it 'activates service', :js do
visit_project_integration('Asana')
- fill_in('Api key', with: 'verySecret')
+ fill_in('API key', with: 'verySecret')
fill_in('Restrict to branch', with: 'verySecret')
click_test_then_save_integration
diff --git a/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb b/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
index a9d91454670..91db375be3a 100644
--- a/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
+++ b/spec/features/projects/services/user_activates_atlassian_bamboo_ci_spec.rb
@@ -10,19 +10,20 @@ RSpec.describe 'User activates Atlassian Bamboo CI' do
end
it 'activates service', :js do
- visit_project_integration('Atlassian Bamboo CI')
- fill_in('Bamboo url', with: 'http://bamboo.example.com')
+ visit_project_integration('Atlassian Bamboo')
+ fill_in('Bamboo URL', with: 'http://bamboo.example.com')
fill_in('Build key', with: 'KEY')
fill_in('Username', with: 'user')
fill_in('Password', with: 'verySecret')
click_test_then_save_integration(expect_test_to_fail: false)
- expect(page).to have_content('Atlassian Bamboo CI settings saved and active.')
+ expect(page).to have_content('Atlassian Bamboo settings saved and active.')
# Password field should not be filled in.
- click_link('Atlassian Bamboo CI')
+ click_link('Atlassian Bamboo')
- expect(find_field('Enter new Password').value).to be_blank
+ expect(find_field('Enter new password').value).to be_blank
+ expect(page).to have_content('Leave blank to use your current password')
end
end
diff --git a/spec/features/projects/services/user_activates_hipchat_spec.rb b/spec/features/projects/services/user_activates_hipchat_spec.rb
deleted file mode 100644
index cffb780e05d..00000000000
--- a/spec/features/projects/services/user_activates_hipchat_spec.rb
+++ /dev/null
@@ -1,40 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'User activates HipChat', :js do
- include_context 'project service activation'
-
- context 'with standard settings' do
- before do
- stub_request(:post, /.*api.hipchat.com.*/)
- end
-
- it 'activates service' do
- visit_project_integration('HipChat')
- fill_in('Room', with: 'gitlab')
- fill_in('Token', with: 'verySecret')
-
- click_test_then_save_integration(expect_test_to_fail: false)
-
- expect(page).to have_content('HipChat settings saved and active.')
- end
- end
-
- context 'with custom settings' do
- before do
- stub_request(:post, /.*chat.example.com.*/)
- end
-
- it 'activates service' do
- visit_project_integration('HipChat')
- fill_in('Room', with: 'gitlab_custom')
- fill_in('Token', with: 'secretCustom')
- fill_in('Server', with: 'https://chat.example.com')
-
- click_test_then_save_integration(expect_test_to_fail: false)
-
- expect(page).to have_content('HipChat settings saved and active.')
- end
- end
-end
diff --git a/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb b/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb
index 72881054c6c..17bfe8fc1e2 100644
--- a/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb
+++ b/spec/features/projects/services/user_activates_jetbrains_teamcity_ci_spec.rb
@@ -10,16 +10,16 @@ RSpec.describe 'User activates JetBrains TeamCity CI' do
end
it 'activates service', :js do
- visit_project_integration('JetBrains TeamCity CI')
+ visit_project_integration('JetBrains TeamCity')
check('Push')
check('Merge Request')
- fill_in('Teamcity url', with: 'http://teamcity.example.com')
+ fill_in('TeamCity server URL', with: 'http://teamcity.example.com')
fill_in('Build type', with: 'GitlabTest_Build')
fill_in('Username', with: 'user')
fill_in('Password', with: 'verySecret')
click_test_then_save_integration(expect_test_to_fail: false)
- expect(page).to have_content('JetBrains TeamCity CI settings saved and active.')
+ expect(page).to have_content('JetBrains TeamCity settings saved and active.')
end
end
diff --git a/spec/features/projects/services/user_activates_jira_spec.rb b/spec/features/projects/services/user_activates_jira_spec.rb
index 85afc54be48..10f84aae93f 100644
--- a/spec/features/projects/services/user_activates_jira_spec.rb
+++ b/spec/features/projects/services/user_activates_jira_spec.rb
@@ -6,12 +6,13 @@ RSpec.describe 'User activates Jira', :js do
include_context 'project service activation'
include_context 'project service Jira context'
+ before do
+ stub_request(:get, test_url).to_return(body: { key: 'value' }.to_json)
+ end
+
describe 'user tests Jira Service' do
context 'when Jira connection test succeeds' do
before do
- server_info = { key: 'value' }.to_json
- stub_request(:get, test_url).with(basic_auth: %w(username password)).to_return(body: server_info)
-
visit_project_integration('Jira')
fill_form
click_test_then_save_integration(expect_test_to_fail: false)
@@ -81,4 +82,68 @@ RSpec.describe 'User activates Jira', :js do
end
end
end
+
+ describe 'issue transition settings' do
+ it 'using custom transitions' do
+ visit_project_integration('Jira')
+
+ expect(page).to have_field('Enable Jira transitions', checked: false)
+
+ check 'Enable Jira transitions'
+
+ expect(page).to have_field('Move to Done', checked: true)
+
+ fill_form
+ choose 'Use custom transitions'
+ click_save_integration
+
+ within '[data-testid="issue-transition-mode"]' do
+ expect(page).to have_content('This field is required.')
+ end
+
+ fill_in 'service[jira_issue_transition_id]', with: '1, 2, 3'
+ click_save_integration
+
+ expect(page).to have_content('Jira settings saved and active.')
+ expect(project.reload.jira_service.data_fields).to have_attributes(
+ jira_issue_transition_automatic: false,
+ jira_issue_transition_id: '1, 2, 3'
+ )
+ end
+
+ it 'using automatic transitions' do
+ create(:jira_service, project: project, jira_issue_transition_automatic: false, jira_issue_transition_id: '1, 2, 3')
+ visit_project_integration('Jira')
+
+ expect(page).to have_field('Enable Jira transitions', checked: true)
+ expect(page).to have_field('Use custom transitions', checked: true)
+ expect(page).to have_field('service[jira_issue_transition_id]', with: '1, 2, 3')
+
+ choose 'Move to Done'
+ click_save_integration
+
+ expect(page).to have_content('Jira settings saved and active.')
+ expect(project.reload.jira_service.data_fields).to have_attributes(
+ jira_issue_transition_automatic: true,
+ jira_issue_transition_id: ''
+ )
+ end
+
+ it 'disabling issue transitions' do
+ create(:jira_service, project: project, jira_issue_transition_automatic: true, jira_issue_transition_id: '1, 2, 3')
+ visit_project_integration('Jira')
+
+ expect(page).to have_field('Enable Jira transitions', checked: true)
+ expect(page).to have_field('Move to Done', checked: true)
+
+ uncheck 'Enable Jira transitions'
+ click_save_integration
+
+ expect(page).to have_content('Jira settings saved and active.')
+ expect(project.reload.jira_service.data_fields).to have_attributes(
+ jira_issue_transition_automatic: false,
+ jira_issue_transition_id: ''
+ )
+ end
+ end
end
diff --git a/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb b/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb
index 88812fc188b..54a501e89a2 100644
--- a/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb
+++ b/spec/features/projects/services/user_activates_mattermost_slash_command_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Set up Mattermost slash commands', :js do
let(:mattermost_enabled) { true }
it 'shows a help message' do
- expect(page).to have_content("This service allows users to perform common")
+ expect(page).to have_content("Use this service to perform common")
end
it 'shows a token placeholder' do
diff --git a/spec/features/projects/services/user_activates_pushover_spec.rb b/spec/features/projects/services/user_activates_pushover_spec.rb
index 3cfd069032a..97003ab7c2a 100644
--- a/spec/features/projects/services/user_activates_pushover_spec.rb
+++ b/spec/features/projects/services/user_activates_pushover_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'User activates Pushover' do
it 'activates service', :js do
visit_project_integration('Pushover')
- fill_in('Api key', with: 'verySecret')
+ fill_in('API key', with: 'verySecret')
fill_in('User key', with: 'verySecret')
fill_in('Device', with: 'myDevice')
select('High Priority', from: 'Priority')
diff --git a/spec/features/projects/services/user_activates_slack_notifications_spec.rb b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
index 2a880e05e0f..0cba1ee1c4c 100644
--- a/spec/features/projects/services/user_activates_slack_notifications_spec.rb
+++ b/spec/features/projects/services/user_activates_slack_notifications_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'User activates Slack notifications', :js do
before do
service.fields
- service.update(
+ service.update!(
push_channel: 1,
issue_channel: 2,
merge_request_channel: 3,
diff --git a/spec/features/projects/services/user_views_services_spec.rb b/spec/features/projects/services/user_views_services_spec.rb
index fef6b7bd991..b936a7f38f6 100644
--- a/spec/features/projects/services/user_views_services_spec.rb
+++ b/spec/features/projects/services/user_views_services_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views services' do
+RSpec.describe 'User views services', :js do
include_context 'project service activation'
it 'shows the list of available services' do
@@ -10,7 +10,7 @@ RSpec.describe 'User views services' do
expect(page).to have_content('Integrations')
expect(page).to have_content('Campfire')
- expect(page).to have_content('HipChat')
+ expect(page).to have_content('Jira')
expect(page).to have_content('Assembla')
expect(page).to have_content('Pushover')
expect(page).to have_content('Atlassian Bamboo')
diff --git a/spec/features/projects/settings/access_tokens_spec.rb b/spec/features/projects/settings/access_tokens_spec.rb
index 45fe19deb8e..8083c851bb7 100644
--- a/spec/features/projects/settings/access_tokens_spec.rb
+++ b/spec/features/projects/settings/access_tokens_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe 'Project > Settings > Access Tokens', :js do
let_it_be(:user) { create(:user) }
let_it_be(:bot_user) { create(:user, :project_bot) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
before_all do
project.add_maintainer(user)
@@ -33,6 +34,18 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
find('#created-personal-access-token').value
end
+ context 'when user is not a project maintainer' do
+ before do
+ project.add_developer(user)
+ end
+
+ it 'does not show project access token page' do
+ visit project_settings_access_tokens_path(project)
+
+ expect(page).to have_content("Page Not Found")
+ end
+ end
+
describe 'token creation' do
it 'allows creation of a project access token' do
name = 'My project access token'
@@ -57,6 +70,81 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
expect(active_project_access_tokens).to have_text('read_api')
expect(created_project_access_token).not_to be_empty
end
+
+ context 'when token creation is not allowed' do
+ before do
+ group.namespace_settings.update_column(:resource_access_token_creation_allowed, false)
+ end
+
+ it 'does not show project access token creation form' do
+ visit project_settings_access_tokens_path(project)
+
+ expect(page).not_to have_selector('#new_project_access_token')
+ end
+
+ it 'shows project access token creation disabled text' do
+ visit project_settings_access_tokens_path(project)
+
+ expect(page).to have_text('Project access token creation is disabled in this group. You can still use and manage existing tokens.')
+ end
+
+ context 'with a project in a personal namespace' do
+ let(:personal_project) { create(:project) }
+
+ before do
+ personal_project.add_maintainer(user)
+ end
+
+ it 'shows project access token creation form and text' do
+ visit project_settings_access_tokens_path(personal_project)
+
+ expect(page).to have_selector('#new_project_access_token')
+ expect(page).to have_text('You can generate an access token scoped to this project for each application to use the GitLab API.')
+ end
+ end
+
+ context 'group settings link' do
+ context 'when user is not a group owner' do
+ before do
+ group.add_developer(user)
+ end
+
+ it 'does not show group settings link' do
+ visit project_settings_access_tokens_path(project)
+
+ expect(page).not_to have_link('group settings', href: edit_group_path(group))
+ end
+ end
+
+ context 'with nested groups' do
+ let(:subgroup) { create(:group, parent: group) }
+
+ context 'when user is not a top level group owner' do
+ before do
+ subgroup.add_owner(user)
+ end
+
+ it 'does not show group settings link' do
+ visit project_settings_access_tokens_path(project)
+
+ expect(page).not_to have_link('group settings', href: edit_group_path(group))
+ end
+ end
+ end
+
+ context 'when user is a group owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it 'shows group settings link' do
+ visit project_settings_access_tokens_path(project)
+
+ expect(page).to have_link('group settings', href: edit_group_path(group))
+ end
+ end
+ end
+ end
end
describe 'active tokens' do
@@ -83,11 +171,25 @@ RSpec.describe 'Project > Settings > Access Tokens', :js do
end
it 'removes expired tokens from active section' do
- project_access_token.update(expires_at: 5.days.ago)
+ project_access_token.update!(expires_at: 5.days.ago)
visit project_settings_access_tokens_path(project)
expect(page).to have_selector('.settings-message')
expect(no_project_access_tokens_message).to have_text(no_active_tokens_text)
end
+
+ context 'when resource access token creation is not allowed' do
+ before do
+ group.namespace_settings.update_column(:resource_access_token_creation_allowed, false)
+ end
+
+ it 'allows revocation of an active token' do
+ visit project_settings_access_tokens_path(project)
+ accept_confirm { click_on 'Revoke' }
+
+ expect(page).to have_selector('.settings-message')
+ expect(no_project_access_tokens_message).to have_text(no_active_tokens_text)
+ end
+ end
end
end
diff --git a/spec/features/projects/settings/forked_project_settings_spec.rb b/spec/features/projects/settings/forked_project_settings_spec.rb
index f6c25d483ad..a84516e19f9 100644
--- a/spec/features/projects/settings/forked_project_settings_spec.rb
+++ b/spec/features/projects/settings/forked_project_settings_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
end
shared_examples 'project settings for a forked projects' do
- it 'allows deleting the link to the forked project' do
+ it 'allows deleting the link to the forked project', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/327817' do
visit edit_project_path(forked_project)
click_button 'Remove fork relationship'
@@ -25,7 +25,8 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
fill_in('confirm_name_input', with: forked_project.name)
click_button('Confirm')
- expect(page).to have_content('The fork relationship has been removed.')
+ wait_for_requests
+
expect(forked_project.reload.forked?).to be_falsy
end
end
diff --git a/spec/features/projects/settings/operations_settings_spec.rb b/spec/features/projects/settings/operations_settings_spec.rb
index fe0ee52e4fa..ca976997142 100644
--- a/spec/features/projects/settings/operations_settings_spec.rb
+++ b/spec/features/projects/settings/operations_settings_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js do
click_button('Connect')
- assert_text('Connection has failed. Re-check Auth Token and try again.')
+ assert_text('Connection failed. Check Auth Token and try again.')
end
end
end
diff --git a/spec/features/projects/settings/registry_settings_spec.rb b/spec/features/projects/settings/registry_settings_spec.rb
index 6e4082d1391..bc60cdd2f8e 100644
--- a/spec/features/projects/settings/registry_settings_spec.rb
+++ b/spec/features/projects/settings/registry_settings_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
select('7 days', from: 'Remove tags older than:')
fill_in('Remove tags matching:', with: '.*-production')
- submit_button = find('.btn.gl-button.btn-success')
+ submit_button = find('[data-testid="save-button"')
expect(submit_button).not_to be_disabled
submit_button.click
end
@@ -53,7 +53,7 @@ RSpec.describe 'Project > Settings > CI/CD > Container registry tag expiration p
within '#js-registry-policies' do
fill_in('Remove tags matching:', with: '*-production')
- submit_button = find('.btn.gl-button.btn-success')
+ submit_button = find('[data-testid="save-button"')
expect(submit_button).not_to be_disabled
submit_button.click
end
diff --git a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
index 397c334a2b8..ebda5c9ff59 100644
--- a/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
+++ b/spec/features/projects/settings/user_manages_merge_requests_settings_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
RSpec.describe 'Projects > Settings > User manages merge request settings' do
+ include ProjectForksHelper
+
let(:user) { create(:user) }
let(:project) { create(:project, :public, namespace: user.namespace, path: 'gitlab', name: 'sample') }
@@ -198,4 +200,36 @@ RSpec.describe 'Projects > Settings > User manages merge request settings' do
expect(project.reload.project_setting.squash_option).to eq('never')
end
end
+
+ describe 'target project settings' do
+ context 'when project is a fork' do
+ let_it_be(:upstream) { create(:project, :public) }
+
+ let(:project) { fork_project(upstream, user) }
+
+ it 'allows to change merge request target project behavior' do
+ expect(page).to have_content 'The default target project for merge requests'
+
+ radio = find_field('project_project_setting_attributes_mr_default_target_self_false')
+ expect(radio).to be_checked
+
+ choose('project_project_setting_attributes_mr_default_target_self_true')
+
+ within('.merge-request-settings-form') do
+ find('.rspec-save-merge-request-changes')
+ click_on('Save changes')
+ end
+
+ find('.flash-notice')
+ radio = find_field('project_project_setting_attributes_mr_default_target_self_true')
+
+ expect(radio).to be_checked
+ expect(project.reload.project_setting.mr_default_target_self).to be_truthy
+ end
+ end
+
+ it 'does not show target project section' do
+ expect(page).not_to have_content 'The default target project for merge requests'
+ end
+ end
end
diff --git a/spec/features/projects/settings/user_searches_in_settings_spec.rb b/spec/features/projects/settings/user_searches_in_settings_spec.rb
index 4c5b39d5282..9b09958bae5 100644
--- a/spec/features/projects/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/projects/settings/user_searches_in_settings_spec.rb
@@ -4,16 +4,42 @@ require 'spec_helper'
RSpec.describe 'User searches project settings', :js do
let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
+ let_it_be(:project) { create(:project, :repository, namespace: user.namespace, pages_https_only: false) }
before do
sign_in(user)
end
context 'in general settings page' do
- let(:visit_path) { edit_project_path(project) }
+ before do
+ visit edit_project_path(project)
+ end
+
+ it_behaves_like 'can search settings', 'Naming', 'Visibility'
+ end
+
+ context 'in Integrations page' do
+ before do
+ visit project_settings_integrations_path(project)
+ end
+
+ it_behaves_like 'can highlight results', 'third-party applications'
+ end
+
+ context 'in Webhooks page' do
+ before do
+ visit project_hooks_path(project)
+ end
- it_behaves_like 'can search settings with feature flag check', 'Naming', 'Visibility'
+ it_behaves_like 'can highlight results', 'Secret token'
+ end
+
+ context 'in Access Tokens page' do
+ before do
+ visit project_settings_access_tokens_path(project)
+ end
+
+ it_behaves_like 'can highlight results', 'Expires at'
end
context 'in Repository page' do
@@ -37,6 +63,16 @@ RSpec.describe 'User searches project settings', :js do
visit project_settings_operations_path(project)
end
- it_behaves_like 'can search settings', 'Alerts', 'Error tracking'
+ it_behaves_like 'can search settings', 'Alert integrations', 'Error tracking'
+ end
+
+ context 'in Pages page' do
+ before do
+ allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+
+ visit project_pages_path(project)
+ end
+
+ it_behaves_like 'can highlight results', 'static website'
end
end
diff --git a/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb b/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
index d0f297d2067..eed3494ef5b 100644
--- a/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
+++ b/spec/features/projects/settings/user_sees_revoke_deploy_token_modal_spec.rb
@@ -13,16 +13,10 @@ RSpec.describe 'Repository Settings > User sees revoke deploy token modal', :js
sign_in(user)
stub_feature_flags(ajax_new_deploy_token: project)
visit(project_settings_repository_path(project))
- click_link('Revoke')
+ click_button('Revoke')
end
it 'shows the revoke deploy token modal' do
expect(page).to have_content('You are about to revoke')
end
-
- it 'closes the revoke deploy token modal with escape keypress' do
- find('.modal.show').send_keys(:escape)
-
- expect(page).not_to have_content('You are about to revoke')
- end
end
diff --git a/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb b/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb
index 5e878411f6a..b7af0c29b33 100644
--- a/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb
+++ b/spec/features/projects/show/user_sees_deletion_failure_message_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Projects > Show > User sees a deletion failure message' do
end
it 'shows error message if deletion for project fails' do
- project.update(delete_error: "Something went wrong", pending_delete: false)
+ project.update!(delete_error: "Something went wrong", pending_delete: false)
visit project_path(project)
diff --git a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
index 9b51e867156..dc551158895 100644
--- a/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
+++ b/spec/features/projects/show/user_sees_setup_shortcut_buttons_spec.rb
@@ -226,11 +226,11 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
expect(project.repository.gitlab_ci_yml).to be_nil
page.within('.project-buttons') do
- expect(page).to have_link('Set up CI/CD', href: presenter.add_ci_yml_path)
+ expect(page).to have_link('Set up CI/CD', href: project_ci_pipeline_editor_path(project))
end
end
- it 'no "Set up CI/CD" button if the project already has a .gitlab-ci.yml' do
+ it '"Set up CI/CD" button is renamed if the project already has a .gitlab-ci.yml' do
Files::CreateService.new(
project,
project.creator,
@@ -247,6 +247,7 @@ RSpec.describe 'Projects > Show > User sees setup shortcut buttons' do
page.within('.project-buttons') do
expect(page).not_to have_link('Set up CI/CD')
+ expect(page).to have_link('CI/CD configuration')
end
end
end
diff --git a/spec/features/projects/show/user_uploads_files_spec.rb b/spec/features/projects/show/user_uploads_files_spec.rb
index 2030c4d998a..eb230082bfa 100644
--- a/spec/features/projects/show/user_uploads_files_spec.rb
+++ b/spec/features/projects/show/user_uploads_files_spec.rb
@@ -17,11 +17,17 @@ RSpec.describe 'Projects > Show > User uploads files' do
context 'when a user has write access' do
before do
visit(project_path(project))
+
+ wait_for_requests
end
- include_examples 'it uploads and commit a new text file'
+ include_examples 'it uploads and commits a new text file'
+
+ include_examples 'it uploads and commits a new image file'
+
+ include_examples 'it uploads and commits a new pdf file'
- include_examples 'it uploads and commit a new image file'
+ include_examples 'it uploads a file to a sub-directory'
end
context 'when a user does not have write access' do
@@ -31,7 +37,7 @@ RSpec.describe 'Projects > Show > User uploads files' do
visit(project_path(project2))
end
- include_examples 'it uploads and commit a new file to a forked project'
+ include_examples 'it uploads and commits a new file to a forked project'
end
context 'when in the empty_repo_upload experiment' do
diff --git a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
index b37d40c0eed..3ccb73c88ef 100644
--- a/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
+++ b/spec/features/projects/snippets/user_comments_on_snippet_spec.rb
@@ -29,7 +29,6 @@ RSpec.describe 'Projects > Snippets > User comments on a snippet', :js do
end
it 'has autocomplete' do
- find('#note_note').native.send_keys('')
fill_in 'note[note]', with: '@'
expect(page).to have_selector('.atwho-view')
diff --git a/spec/features/projects/sub_group_issuables_spec.rb b/spec/features/projects/sub_group_issuables_spec.rb
index 8c1d88276df..d7614201740 100644
--- a/spec/features/projects/sub_group_issuables_spec.rb
+++ b/spec/features/projects/sub_group_issuables_spec.rb
@@ -16,18 +16,18 @@ RSpec.describe 'Subgroup Issuables', :js do
it 'shows the full subgroup title when issues index page is empty' do
visit project_issues_path(project)
- expect_to_have_full_subgroup_title
+ expect_to_have_breadcrumb_links
end
it 'shows the full subgroup title when merge requests index page is empty' do
visit project_merge_requests_path(project)
- expect_to_have_full_subgroup_title
+ expect_to_have_breadcrumb_links
end
- def expect_to_have_full_subgroup_title
- title = find('.breadcrumbs-links')
+ def expect_to_have_breadcrumb_links
+ links = find('[data-testid="breadcrumb-links"]')
- expect(title).to have_content 'group subgroup project'
+ expect(links).to have_content 'group subgroup project'
end
end
diff --git a/spec/features/projects/user_sees_sidebar_spec.rb b/spec/features/projects/user_sees_sidebar_spec.rb
index e5ba6b503cc..ff6217d02a7 100644
--- a/spec/features/projects/user_sees_sidebar_spec.rb
+++ b/spec/features/projects/user_sees_sidebar_spec.rb
@@ -208,7 +208,7 @@ RSpec.describe 'Projects > User sees sidebar' do
it 'shows build tab if builds are public' do
project.public_builds = true
- project.save
+ project.save!
visit project_path(project)
diff --git a/spec/features/projects/user_sees_user_popover_spec.rb b/spec/features/projects/user_sees_user_popover_spec.rb
index 52e65deae3b..e357824a533 100644
--- a/spec/features/projects/user_sees_user_popover_spec.rb
+++ b/spec/features/projects/user_sees_user_popover_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe 'User sees user popover', :js do
end
end
- it "displays user popover in system note" do
+ it 'displays user popover in system note', :sidekiq_inline do
add_note("/assign @#{user.username}")
find('.system-note-message .js-user-link').hover
diff --git a/spec/features/projects/user_uses_shortcuts_spec.rb b/spec/features/projects/user_uses_shortcuts_spec.rb
index f97c8d820e3..b6fde19e0d4 100644
--- a/spec/features/projects/user_uses_shortcuts_spec.rb
+++ b/spec/features/projects/user_uses_shortcuts_spec.rb
@@ -151,7 +151,7 @@ RSpec.describe 'User uses shortcuts', :js do
find('body').native.send_key('g')
find('body').native.send_key('m')
- expect(page).to have_active_navigation('Merge Requests')
+ expect(page).to have_active_navigation('Merge requests')
end
end
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index 4730679feb8..c18b0f2688b 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -174,26 +174,6 @@ RSpec.describe 'Project' do
end
end
- describe 'remove forked relationship', :js do
- let(:user) { create(:user) }
- let(:project) { fork_project(create(:project, :public), user, namespace: user.namespace) }
-
- before do
- sign_in user
- visit edit_project_path(project)
- end
-
- it 'removes fork' do
- expect(page).to have_content 'Remove fork relationship'
-
- remove_with_confirm('Remove fork relationship', project.path)
-
- expect(page).to have_content 'The fork relationship has been removed.'
- expect(project.reload.forked?).to be_falsey
- expect(page).not_to have_content 'Remove fork relationship'
- end
- end
-
describe 'showing information about source of a project fork' do
let(:user) { create(:user) }
let(:base_project) { create(:project, :public, :repository) }
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index eb099359df9..207b74c990a 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -24,8 +24,8 @@ RSpec.describe 'Protected Branches', :js do
it 'does not allow developer to removes protected branch' do
visit project_branches_path(project)
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
+ find('input[data-testid="branch-search"]').set('fix')
+ find('input[data-testid="branch-search"]').native.send_keys(:enter)
expect(page).to have_css('.btn-danger.disabled')
end
@@ -47,8 +47,8 @@ RSpec.describe 'Protected Branches', :js do
it 'removes branch after modal confirmation' do
visit project_branches_path(project)
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
+ find('input[data-testid="branch-search"]').set('fix')
+ find('input[data-testid="branch-search"]').native.send_keys(:enter)
expect(page).to have_content('fix')
expect(find('.all-branches')).to have_selector('li', count: 1)
@@ -58,8 +58,8 @@ RSpec.describe 'Protected Branches', :js do
fill_in 'delete_branch_input', with: 'fix'
click_link 'Delete protected branch'
- fill_in 'branch-search', with: 'fix'
- find('#branch-search').native.send_keys(:enter)
+ find('input[data-testid="branch-search"]').set('fix')
+ find('input[data-testid="branch-search"]').native.send_keys(:enter)
expect(page).to have_content('No branches to show')
end
diff --git a/spec/features/registrations/welcome_spec.rb b/spec/features/registrations/welcome_spec.rb
new file mode 100644
index 00000000000..74320b69f19
--- /dev/null
+++ b/spec/features/registrations/welcome_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Welcome screen' do
+ let(:user) { create(:user) }
+
+ before do
+ gitlab_sign_in(user)
+
+ visit users_sign_up_welcome_path
+ end
+
+ it 'shows the email opt in' do
+ select 'Software Developer', from: 'user_role'
+ check 'user_email_opted_in'
+ click_button 'Get started!'
+
+ expect(user.reload.email_opted_in).to eq(true)
+ end
+end
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index cc024ab8f35..acfb7c2602a 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -49,19 +49,19 @@ RSpec.describe 'Runners' do
visit project_runners_path(project)
within '.activated-specific-runners' do
- expect(page).to have_content('Pause')
+ expect(page).to have_link('Pause')
end
click_on 'Pause'
within '.activated-specific-runners' do
- expect(page).to have_content('Resume')
+ expect(page).to have_link('Resume')
end
click_on 'Resume'
within '.activated-specific-runners' do
- expect(page).to have_content('Pause')
+ expect(page).to have_link('Pause')
end
end
@@ -79,7 +79,7 @@ RSpec.describe 'Runners' do
visit project_runners_path(project)
within '.activated-specific-runners' do
- first('.edit-runner > a').click
+ first('[data-testid="edit-runner-link"]').click
end
expect(page.find_field('runner[access_level]')).not_to be_checked
@@ -92,14 +92,14 @@ RSpec.describe 'Runners' do
context 'when a runner has a tag' do
before do
- specific_runner.update(tag_list: ['tag'])
+ specific_runner.update!(tag_list: ['tag'])
end
it 'user edits runner not to run untagged jobs' do
visit project_runners_path(project)
within '.activated-specific-runners' do
- first('.edit-runner > a').click
+ first('[data-testid="edit-runner-link"]').click
end
expect(page.find_field('runner[run_untagged]')).to be_checked
@@ -370,7 +370,7 @@ RSpec.describe 'Runners' do
context 'when a runner has a tag' do
before do
- runner.update(tag_list: ['tag'])
+ runner.update!(tag_list: ['tag'])
end
it 'user edits runner not to run untagged jobs' do
@@ -450,7 +450,7 @@ RSpec.describe 'Runners' do
context 'when a runner has a tag' do
before do
- runner.update(tag_list: ['tag'])
+ runner.update!(tag_list: ['tag'])
end
it 'user edits runner not to run untagged jobs' do
diff --git a/spec/features/search/user_uses_header_search_field_spec.rb b/spec/features/search/user_uses_header_search_field_spec.rb
index 9296a3f33d4..4c42800cf05 100644
--- a/spec/features/search/user_uses_header_search_field_spec.rb
+++ b/spec/features/search/user_uses_header_search_field_spec.rb
@@ -175,7 +175,7 @@ RSpec.describe 'User uses header search field', :js do
fill_in_search('Merge')
within(dashboard_search_options_popup_menu) do
- expect(page).to have_text('Merge Requests')
+ expect(page).to have_text('Merge requests')
end
end
diff --git a/spec/features/security/project/internal_access_spec.rb b/spec/features/security/project/internal_access_spec.rb
index 2440b738db3..9dcef13757a 100644
--- a/spec/features/security/project/internal_access_spec.rb
+++ b/spec/features/security/project/internal_access_spec.rb
@@ -356,7 +356,7 @@ RSpec.describe "Internal Project Access" do
context "when allowed for public and internal" do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
@@ -372,7 +372,7 @@ RSpec.describe "Internal Project Access" do
context "when disallowed for public and internal" do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { is_expected.to be_allowed_for(:admin) }
@@ -396,7 +396,7 @@ RSpec.describe "Internal Project Access" do
context "when allowed for public and internal" do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
@@ -412,7 +412,7 @@ RSpec.describe "Internal Project Access" do
context "when disallowed for public and internal" do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { is_expected.to be_allowed_for(:admin) }
@@ -436,7 +436,7 @@ RSpec.describe "Internal Project Access" do
context 'when allowed for public and internal' do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
@@ -452,7 +452,7 @@ RSpec.describe "Internal Project Access" do
context 'when disallowed for public and internal' do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/security/project/private_access_spec.rb b/spec/features/security/project/private_access_spec.rb
index 9d3109b92e6..5a200bea80a 100644
--- a/spec/features/security/project/private_access_spec.rb
+++ b/spec/features/security/project/private_access_spec.rb
@@ -319,7 +319,7 @@ RSpec.describe "Private Project Access" do
context 'when public builds is enabled' do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:guest).of(project) }
@@ -348,7 +348,7 @@ RSpec.describe "Private Project Access" do
context 'when public builds is enabled' do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:guest).of(project) }
@@ -375,7 +375,7 @@ RSpec.describe "Private Project Access" do
context 'when public builds is enabled' do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:guest).of(project) }
@@ -405,7 +405,7 @@ RSpec.describe "Private Project Access" do
context 'when public builds is enabled' do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:guest).of(project) }
@@ -414,7 +414,7 @@ RSpec.describe "Private Project Access" do
context 'when public buils are disabled' do
before do
project.public_builds = false
- project.save
+ project.save!
end
it { is_expected.to be_denied_for(:guest).of(project) }
@@ -440,7 +440,7 @@ RSpec.describe "Private Project Access" do
context 'when public builds is enabled' do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:guest).of(project) }
@@ -448,7 +448,7 @@ RSpec.describe "Private Project Access" do
context 'when public builds is disabled' do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it { is_expected.to be_denied_for(:guest).of(project) }
diff --git a/spec/features/security/project/public_access_spec.rb b/spec/features/security/project/public_access_spec.rb
index 28a1f1cda7f..8ceb6920e77 100644
--- a/spec/features/security/project/public_access_spec.rb
+++ b/spec/features/security/project/public_access_spec.rb
@@ -164,7 +164,7 @@ RSpec.describe "Public Project Access" do
context "when allowed for public" do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
@@ -180,7 +180,7 @@ RSpec.describe "Public Project Access" do
context "when disallowed for public" do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { is_expected.to be_allowed_for(:admin) }
@@ -204,7 +204,7 @@ RSpec.describe "Public Project Access" do
context "when allowed for public" do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
@@ -220,7 +220,7 @@ RSpec.describe "Public Project Access" do
context "when disallowed for public" do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { is_expected.to be_allowed_for(:admin) }
@@ -244,7 +244,7 @@ RSpec.describe "Public Project Access" do
context 'when allowed for public' do
before do
- project.update(public_builds: true)
+ project.update!(public_builds: true)
end
it { is_expected.to be_allowed_for(:admin) }
@@ -260,7 +260,7 @@ RSpec.describe "Public Project Access" do
context 'when disallowed for public' do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it('is allowed for admin when admin mode is enabled', :enable_admin_mode) { is_expected.to be_allowed_for(:admin) }
diff --git a/spec/features/snippets/notes_on_personal_snippets_spec.rb b/spec/features/snippets/notes_on_personal_snippets_spec.rb
index ce9a2d1461e..47dad9bd88e 100644
--- a/spec/features/snippets/notes_on_personal_snippets_spec.rb
+++ b/spec/features/snippets/notes_on_personal_snippets_spec.rb
@@ -108,9 +108,6 @@ RSpec.describe 'Comments on personal snippets', :js do
end
it 'does not have autocomplete' do
- wait_for_requests
-
- find('#note_note').native.send_keys('')
fill_in 'note[note]', with: '@'
wait_for_requests
diff --git a/spec/features/users/anonymous_sessions_spec.rb b/spec/features/users/anonymous_sessions_spec.rb
index 420fb225f94..273d3aa346f 100644
--- a/spec/features/users/anonymous_sessions_spec.rb
+++ b/spec/features/users/anonymous_sessions_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe 'Session TTLs', :clean_gitlab_redis_shared_state do
fill_in 'user_password', with: '12345678'
click_button 'Sign in'
- expect(page).to have_content('Invalid Login or password')
+ expect(page).to have_content('Invalid login or password')
expect_single_session_with_expiration(Settings.gitlab['unauthenticated_session_expire_delay'])
end
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 1d1120709b5..e60d9d6ab69 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -138,7 +138,7 @@ RSpec.describe 'Login' do
gitlab_sign_in(User.ghost)
- expect(page).to have_content('Invalid Login or password.')
+ expect(page).to have_content('Invalid login or password.')
end
it 'does not update Devise trackable attributes', :clean_gitlab_redis_shared_state do
@@ -239,7 +239,7 @@ RSpec.describe 'Login' do
expect(codes.size).to eq 10
# Ensure the generated codes get saved
- user.save(touch: false)
+ user.save!(touch: false)
end
context 'with valid code' do
@@ -406,7 +406,7 @@ RSpec.describe 'Login' do
gitlab_sign_in(user)
- expect(page).to have_content('Invalid Login or password.')
+ expect(page).to have_content('Invalid login or password.')
end
end
end
diff --git a/spec/features/users/show_spec.rb b/spec/features/users/show_spec.rb
index a8372800700..56d2aaea203 100644
--- a/spec/features/users/show_spec.rb
+++ b/spec/features/users/show_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe 'User page' do
context 'work information' do
it 'shows job title and organization details' do
- user.update(organization: 'GitLab - work info test', job_title: 'Frontend Engineer')
+ user.update!(organization: 'GitLab - work info test', job_title: 'Frontend Engineer')
subject
@@ -41,7 +41,7 @@ RSpec.describe 'User page' do
end
it 'shows job title' do
- user.update(organization: nil, job_title: 'Frontend Engineer - work info test')
+ user.update!(organization: nil, job_title: 'Frontend Engineer - work info test')
subject
@@ -49,7 +49,7 @@ RSpec.describe 'User page' do
end
it 'shows organization details' do
- user.update(organization: 'GitLab - work info test', job_title: '')
+ user.update!(organization: 'GitLab - work info test', job_title: '')
subject
diff --git a/spec/features/users/terms_spec.rb b/spec/features/users/terms_spec.rb
index 7500f2fe59a..8ba79d77c22 100644
--- a/spec/features/users/terms_spec.rb
+++ b/spec/features/users/terms_spec.rb
@@ -121,7 +121,7 @@ RSpec.describe 'Users > Terms' do
enforce_terms
- click_button 'Submit issue'
+ click_button 'Create issue'
expect(current_path).to eq(terms_path)
diff --git a/spec/features/whats_new_spec.rb b/spec/features/whats_new_spec.rb
index 7c5625486f5..55b96361f03 100644
--- a/spec/features/whats_new_spec.rb
+++ b/spec/features/whats_new_spec.rb
@@ -2,34 +2,60 @@
require "spec_helper"
-RSpec.describe "renders a `whats new` dropdown item", :js do
+RSpec.describe "renders a `whats new` dropdown item" do
let_it_be(:user) { create(:user) }
- before do
- sign_in(user)
- end
+ context 'when not logged in' do
+ it 'and on .com it renders' do
+ allow(Gitlab).to receive(:com?).and_return(true)
- it 'shows notification dot and count and removes it once viewed' do
- visit root_dashboard_path
+ visit user_path(user)
- page.within '.header-help' do
- expect(page).to have_selector('.notification-dot', visible: true)
+ page.within '.header-help' do
+ find('.header-help-dropdown-toggle').click
- find('.header-help-dropdown-toggle').click
+ expect(page).to have_button(text: "What's new")
+ end
+ end
+
+ it "doesn't render what's new" do
+ visit user_path(user)
- expect(page).to have_button(text: "What's new")
- expect(page).to have_selector('.js-whats-new-notification-count')
+ page.within '.header-help' do
+ find('.header-help-dropdown-toggle').click
+
+ expect(page).not_to have_button(text: "What's new")
+ end
+ end
+ end
- find('button', text: "What's new").click
+ context 'when logged in', :js do
+ before do
+ sign_in(user)
end
- find('.whats-new-drawer .gl-drawer-close-button').click
- find('.header-help-dropdown-toggle').click
+ it 'shows notification dot and count and removes it once viewed' do
+ visit root_dashboard_path
+
+ page.within '.header-help' do
+ expect(page).to have_selector('.notification-dot', visible: true)
+
+ find('.header-help-dropdown-toggle').click
+
+ expect(page).to have_button(text: "What's new")
+ expect(page).to have_selector('.js-whats-new-notification-count')
+
+ find('button', text: "What's new").click
+ end
+
+ find('.whats-new-drawer .gl-drawer-close-button').click
+ find('.header-help-dropdown-toggle').click
- page.within '.header-help' do
- expect(page).not_to have_selector('.notification-dot', visible: true)
- expect(page).to have_button(text: "What's new")
- expect(page).not_to have_selector('.js-whats-new-notification-count')
+ page.within '.header-help' do
+ expect(page).not_to have_selector('.notification-dot', visible: true)
+ expect(page).to have_button(text: "What's new")
+ expect(page).not_to have_selector('.js-whats-new-notification-count')
+ end
end
end
end
diff --git a/spec/finders/applications_finder_spec.rb b/spec/finders/applications_finder_spec.rb
index dc615144b88..b6c48d8cdae 100644
--- a/spec/finders/applications_finder_spec.rb
+++ b/spec/finders/applications_finder_spec.rb
@@ -5,18 +5,48 @@ require 'spec_helper'
RSpec.describe ApplicationsFinder do
let(:application1) { create(:application, name: 'some_application', owner: nil, redirect_uri: 'http://some_application.url', scopes: '') }
let(:application2) { create(:application, name: 'another_application', owner: nil, redirect_uri: 'http://other_application.url', scopes: '') }
+ let(:user_application) { create(:application, name: 'user_application', owner: create(:user), redirect_uri: 'http://user_application.url', scopes: '') }
+ let(:group_application) { create(:application, name: 'group_application', owner: create(:group), redirect_uri: 'http://group_application.url', scopes: '') }
describe '#execute' do
- it 'returns an array of applications' do
+ it 'returns an array of instance applications' do
found = described_class.new.execute
expect(found).to match_array([application1, application2])
end
- it 'returns the application by id' do
- params = { id: application1.id }
- found = described_class.new(params).execute
- expect(found).to match(application1)
+ context 'by_id' do
+ context 'with existing id' do
+ it 'returns the application' do
+ params = { id: application1.id }
+ found = described_class.new(params).execute
+
+ expect(found).to match(application1)
+ end
+ end
+
+ context 'with invalid id' do
+ it 'returns nil for user application' do
+ params = { id: user_application.id }
+ found = described_class.new(params).execute
+
+ expect(found).to be_nil
+ end
+
+ it 'returns nil for group application' do
+ params = { id: group_application.id }
+ found = described_class.new(params).execute
+
+ expect(found).to be_nil
+ end
+
+ it 'returns nil for non-existing application' do
+ params = { id: non_existing_record_id }
+ found = described_class.new(params).execute
+
+ expect(found).to be_nil
+ end
+ end
end
end
end
diff --git a/spec/finders/ci/variables_finder_spec.rb b/spec/finders/ci/variables_finder_spec.rb
index cd5f950ca8e..683788452cc 100644
--- a/spec/finders/ci/variables_finder_spec.rb
+++ b/spec/finders/ci/variables_finder_spec.rb
@@ -3,42 +3,57 @@
require 'spec_helper'
RSpec.describe Ci::VariablesFinder do
- let!(:project) { create(:project) }
- let!(:params) { {} }
+ shared_examples 'scoped variables' do
+ describe '#initialize' do
+ subject { described_class.new(owner, params) }
- let!(:var1) { create(:ci_variable, project: project, key: 'key1', environment_scope: 'staging') }
- let!(:var2) { create(:ci_variable, project: project, key: 'key2', environment_scope: 'staging') }
- let!(:var3) { create(:ci_variable, project: project, key: 'key2', environment_scope: 'production') }
+ context 'without key filter' do
+ let!(:params) { {} }
- describe '#initialize' do
- subject { described_class.new(project, params) }
-
- context 'without key filter' do
- let!(:params) { {} }
-
- it 'raises an error' do
- expect { subject }.to raise_error(ArgumentError, 'Please provide params[:key]')
+ it 'raises an error' do
+ expect { subject }.to raise_error(ArgumentError, 'Please provide params[:key]')
+ end
end
end
- end
- describe '#execute' do
- subject { described_class.new(project.reload, params).execute }
+ describe '#execute' do
+ subject { described_class.new(owner.reload, params).execute }
- context 'with key filter' do
- let!(:params) { { key: 'key1' } }
+ context 'with key filter' do
+ let!(:params) { { key: 'key1' } }
- it 'returns var1' do
- expect(subject).to contain_exactly(var1)
+ it 'returns var1' do
+ expect(subject).to contain_exactly(var1)
+ end
end
- end
- context 'with key and environment_scope filter' do
- let!(:params) { { key: 'key2', filter: { environment_scope: 'staging' } } }
+ context 'with key and environment_scope filter' do
+ let!(:params) { { key: 'key2', filter: { environment_scope: 'staging' } } }
- it 'returns var2' do
- expect(subject).to contain_exactly(var2)
+ it 'returns var2' do
+ expect(subject).to contain_exactly(var2)
+ end
end
end
end
+
+ context 'for a project' do
+ let(:owner) { create(:project) }
+
+ let!(:var1) { create(:ci_variable, project: owner, key: 'key1', environment_scope: 'staging') }
+ let!(:var2) { create(:ci_variable, project: owner, key: 'key2', environment_scope: 'staging') }
+ let!(:var3) { create(:ci_variable, project: owner, key: 'key2', environment_scope: 'production') }
+
+ include_examples 'scoped variables'
+ end
+
+ context 'for a group' do
+ let(:owner) { create(:group) }
+
+ let!(:var1) { create(:ci_group_variable, group: owner, key: 'key1', environment_scope: 'staging') }
+ let!(:var2) { create(:ci_group_variable, group: owner, key: 'key2', environment_scope: 'staging') }
+ let!(:var3) { create(:ci_group_variable, group: owner, key: 'key2', environment_scope: 'production') }
+
+ include_examples 'scoped variables'
+ end
end
diff --git a/spec/finders/concerns/finder_with_group_hierarchy_spec.rb b/spec/finders/concerns/finder_with_group_hierarchy_spec.rb
new file mode 100644
index 00000000000..8c2026a00a1
--- /dev/null
+++ b/spec/finders/concerns/finder_with_group_hierarchy_spec.rb
@@ -0,0 +1,112 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe FinderWithGroupHierarchy do
+ let(:finder_class) do
+ Class.new do
+ include FinderWithGroupHierarchy
+ include Gitlab::Utils::StrongMemoize
+
+ def initialize(current_user, params = {})
+ @current_user = current_user
+ @params = params
+ end
+
+ def execute(skip_authorization: false)
+ @skip_authorization = skip_authorization
+
+ item_ids
+ end
+
+ # normally an array of item ids would be returned,
+ # however for this spec just return the group ids
+ def item_ids
+ group? ? group_ids_for(group) : []
+ end
+
+ private
+
+ attr_reader :current_user, :params, :skip_authorization
+
+ def read_permission
+ :read_label
+ end
+ end
+ end
+
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:private_group) { create(:group, :private) }
+ let_it_be(:private_subgroup) { create(:group, :private, parent: private_group) }
+
+ let(:user) { create(:user) }
+
+ context 'when specifying group' do
+ it 'returns only the group by default' do
+ finder = finder_class.new(user, group: group)
+
+ expect(finder.execute).to match_array([group.id])
+ end
+ end
+
+ context 'when specifying group_id' do
+ it 'returns only the group by default' do
+ finder = finder_class.new(user, group_id: group.id)
+
+ expect(finder.execute).to match_array([group.id])
+ end
+ end
+
+ context 'when including items from group ancestors' do
+ before do
+ private_subgroup.add_developer(user)
+ end
+
+ it 'returns group and its ancestors' do
+ private_group.add_developer(user)
+
+ finder = finder_class.new(user, group: private_subgroup, include_ancestor_groups: true)
+
+ expect(finder.execute).to match_array([private_group.id, private_subgroup.id])
+ end
+
+ it 'ignores groups which user can not read' do
+ finder = finder_class.new(user, group: private_subgroup, include_ancestor_groups: true)
+
+ expect(finder.execute).to match_array([private_subgroup.id])
+ end
+
+ it 'returns them all when skip_authorization is true' do
+ finder = finder_class.new(user, group: private_subgroup, include_ancestor_groups: true)
+
+ expect(finder.execute(skip_authorization: true)).to match_array([private_group.id, private_subgroup.id])
+ end
+ end
+
+ context 'when including items from group descendants' do
+ before do
+ private_subgroup.add_developer(user)
+ end
+
+ it 'returns items from group and its descendants' do
+ private_group.add_developer(user)
+
+ finder = finder_class.new(user, group: private_group, include_descendant_groups: true)
+
+ expect(finder.execute).to match_array([private_group.id, private_subgroup.id])
+ end
+
+ it 'ignores items from groups which user can not read' do
+ finder = finder_class.new(user, group: private_group, include_descendant_groups: true)
+
+ expect(finder.execute).to match_array([private_subgroup.id])
+ end
+
+ it 'returns them all when skip_authorization is true' do
+ finder = finder_class.new(user, group: private_group, include_descendant_groups: true)
+
+ expect(finder.execute(skip_authorization: true)).to match_array([private_group.id, private_subgroup.id])
+ end
+ end
+end
diff --git a/spec/finders/concerns/packages/finder_helper_spec.rb b/spec/finders/concerns/packages/finder_helper_spec.rb
index 73f77647573..c1740ee1796 100644
--- a/spec/finders/concerns/packages/finder_helper_spec.rb
+++ b/spec/finders/concerns/packages/finder_helper_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe ::Packages::FinderHelper do
describe '#packages_visible_to_user' do
using RSpec::Parameterized::TableSyntax
- let_it_be(:user) { create(:user) }
let_it_be_with_reload(:group) { create(:group) }
let_it_be_with_reload(:project1) { create(:project, namespace: group) }
let_it_be(:package1) { create(:package, project: project1) }
@@ -44,41 +43,87 @@ RSpec.describe ::Packages::FinderHelper do
it { is_expected.to be_empty }
end
- where(:group_visibility, :subgroup_visibility, :project2_visibility, :user_role, :shared_example_name) do
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :maintainer | 'returning both packages'
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :developer | 'returning both packages'
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :guest | 'returning both packages'
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :anonymous | 'returning both packages'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :maintainer | 'returning both packages'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :developer | 'returning both packages'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :guest | 'returning package1'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :anonymous | 'returning package1'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both packages'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both packages'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning package1'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning package1'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both packages'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both packages'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning no packages'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning no packages'
+ context 'with a user' do
+ let_it_be(:user) { create(:user) }
+
+ where(:group_visibility, :subgroup_visibility, :project2_visibility, :user_role, :shared_example_name) do
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :maintainer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :developer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :guest | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :anonymous | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :maintainer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :developer | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :guest | 'returning package1'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :anonymous | 'returning package1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both packages'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both packages'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning package1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning package1'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both packages'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both packages'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning no packages'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning no packages'
+ end
+
+ with_them do
+ before do
+ unless user_role == :anonymous
+ group.send("add_#{user_role}", user)
+ subgroup.send("add_#{user_role}", user)
+ project1.send("add_#{user_role}", user)
+ project2.send("add_#{user_role}", user)
+ end
+
+ project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
end
- with_them do
- before do
- unless user_role == :anonymous
- group.send("add_#{user_role}", user)
- subgroup.send("add_#{user_role}", user)
- project1.send("add_#{user_role}", user)
- project2.send("add_#{user_role}", user)
+ context 'with a group deploy token' do
+ let_it_be(:user) { create(:deploy_token, :group, read_package_registry: true) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ shared_examples 'handling all conditions' do
+ where(:group_visibility, :subgroup_visibility, :project2_visibility, :shared_example_name) do
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | 'returning both packages'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | 'returning both packages'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | 'returning both packages'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | 'returning both packages'
+ end
+
+ with_them do
+ before do
+ project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
+ end
+
+ context 'with packages_finder_helper_deploy_token enabled' do
+ before do
+ expect(group).not_to receive(:all_projects)
end
- project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
- subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
- project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
- group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ it_behaves_like 'handling all conditions'
end
- it_behaves_like params[:shared_example_name]
+ context 'with packages_finder_helper_deploy_token disabled' do
+ before do
+ stub_feature_flags(packages_finder_helper_deploy_token: false)
+ expect(group).to receive(:all_projects).and_call_original
+ end
+
+ it_behaves_like 'handling all conditions'
+ end
end
end
@@ -121,41 +166,87 @@ RSpec.describe ::Packages::FinderHelper do
it { is_expected.to be_empty }
end
- where(:group_visibility, :subgroup_visibility, :project2_visibility, :user_role, :shared_example_name) do
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :maintainer | 'returning both projects'
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :developer | 'returning both projects'
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :guest | 'returning both projects'
- 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :anonymous | 'returning both projects'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :maintainer | 'returning both projects'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :developer | 'returning both projects'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :guest | 'returning project1'
- 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :anonymous | 'returning project1'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both projects'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both projects'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning project1'
- 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning project1'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both projects'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both projects'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning no project'
- 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning no project'
+ context 'with a user' do
+ let_it_be(:user) { create(:user) }
+
+ where(:group_visibility, :subgroup_visibility, :project2_visibility, :user_role, :shared_example_name) do
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :maintainer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :developer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :guest | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | :anonymous | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :maintainer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :developer | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :guest | 'returning project1'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | :anonymous | 'returning project1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both projects'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both projects'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning project1'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning project1'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :maintainer | 'returning both projects'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :developer | 'returning both projects'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :guest | 'returning no project'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | :anonymous | 'returning no project'
+ end
+
+ with_them do
+ before do
+ unless user_role == :anonymous
+ group.send("add_#{user_role}", user)
+ subgroup.send("add_#{user_role}", user)
+ project1.send("add_#{user_role}", user)
+ project2.send("add_#{user_role}", user)
+ end
+
+ project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
end
- with_them do
- before do
- unless user_role == :anonymous
- group.send("add_#{user_role}", user)
- subgroup.send("add_#{user_role}", user)
- project1.send("add_#{user_role}", user)
- project2.send("add_#{user_role}", user)
+ context 'with a group deploy token' do
+ let_it_be(:user) { create(:deploy_token, :group, read_package_registry: true) }
+ let_it_be(:group_deploy_token) { create(:group_deploy_token, deploy_token: user, group: group) }
+
+ shared_examples 'handling all conditions' do
+ where(:group_visibility, :subgroup_visibility, :project2_visibility, :shared_example_name) do
+ 'PUBLIC' | 'PUBLIC' | 'PUBLIC' | 'returning both projects'
+ 'PUBLIC' | 'PUBLIC' | 'PRIVATE' | 'returning both projects'
+ 'PUBLIC' | 'PRIVATE' | 'PRIVATE' | 'returning both projects'
+ 'PRIVATE' | 'PRIVATE' | 'PRIVATE' | 'returning both projects'
end
- project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
- subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
- project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
- group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ with_them do
+ before do
+ project2.update!(visibility_level: Gitlab::VisibilityLevel.const_get(project2_visibility, false))
+ subgroup.update!(visibility_level: Gitlab::VisibilityLevel.const_get(subgroup_visibility, false))
+ project1.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ group.update!(visibility_level: Gitlab::VisibilityLevel.const_get(group_visibility, false))
+ end
+
+ it_behaves_like params[:shared_example_name]
+ end
end
- it_behaves_like params[:shared_example_name]
+ context 'with packages_finder_helper_deploy_token enabled' do
+ before do
+ expect(group).not_to receive(:all_projects)
+ end
+
+ it_behaves_like 'handling all conditions'
+ end
+
+ context 'with packages_finder_helper_deploy_token disabled' do
+ before do
+ stub_feature_flags(packages_finder_helper_deploy_token: false)
+ expect(group).to receive(:all_projects).and_call_original
+ end
+
+ it_behaves_like 'handling all conditions'
+ end
end
end
end
diff --git a/spec/finders/design_management/designs_finder_spec.rb b/spec/finders/design_management/designs_finder_spec.rb
index feb78a4bc4b..631f23b7312 100644
--- a/spec/finders/design_management/designs_finder_spec.rb
+++ b/spec/finders/design_management/designs_finder_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe DesignManagement::DesignsFinder do
let_it_be(:design1) { create(:design, :with_file, issue: issue, versions_count: 1, relative_position: 3) }
let_it_be(:design2) { create(:design, :with_file, issue: issue, versions_count: 1, relative_position: 2) }
let_it_be(:design3) { create(:design, :with_file, issue: issue, versions_count: 1, relative_position: 1) }
+
let(:params) { {} }
subject(:designs) { described_class.new(issue, user, params).execute }
diff --git a/spec/finders/design_management/versions_finder_spec.rb b/spec/finders/design_management/versions_finder_spec.rb
index 6a56ccb10b8..0d606ef46f1 100644
--- a/spec/finders/design_management/versions_finder_spec.rb
+++ b/spec/finders/design_management/versions_finder_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe DesignManagement::VersionsFinder do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:design_1) { create(:design, :with_file, issue: issue, versions_count: 1) }
let_it_be(:design_2) { create(:design, :with_file, issue: issue, versions_count: 1) }
+
let(:version_1) { design_1.versions.first }
let(:version_2) { design_2.versions.first }
let(:design_or_collection) { issue.design_collection }
diff --git a/spec/finders/environments_by_deployments_finder_spec.rb b/spec/finders/environments_by_deployments_finder_spec.rb
new file mode 100644
index 00000000000..f5fcc4ef72a
--- /dev/null
+++ b/spec/finders/environments_by_deployments_finder_spec.rb
@@ -0,0 +1,127 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe EnvironmentsByDeploymentsFinder do
+ let(:project) { create(:project, :repository) }
+ let(:user) { project.creator }
+ let(:environment) { create(:environment, :available, project: project) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ describe '#execute' do
+ context 'tagged deployment' do
+ let(:environment_two) { create(:environment, project: project) }
+ # Environments need to include commits, so rewind two commits to fit
+ let(:commit) { project.commit('HEAD~2') }
+
+ before do
+ create(:deployment, :success, environment: environment, ref: 'v1.0.0', tag: true, sha: project.commit.id)
+ create(:deployment, :success, environment: environment_two, ref: 'v1.1.0', tag: true, sha: project.commit('HEAD~1').id)
+ end
+
+ it 'returns environment when with_tags is set' do
+ expect(described_class.new(project, user, ref: 'master', commit: commit, with_tags: true).execute)
+ .to contain_exactly(environment, environment_two)
+ end
+
+ it 'does not return environment when no with_tags is set' do
+ expect(described_class.new(project, user, ref: 'master', commit: commit).execute)
+ .to be_empty
+ end
+
+ it 'does not return environment when commit is not part of deployment' do
+ expect(described_class.new(project, user, ref: 'master', commit: project.commit('feature')).execute)
+ .to be_empty
+ end
+
+ # We expect two Gitaly calls: FindCommit, CommitIsAncestor
+ # This tests to ensure we don't call one CommitIsAncestor per environment
+ it 'only calls Gitaly twice when multiple environments are present', :request_store do
+ expect do
+ result = described_class.new(project, user, ref: 'master', commit: commit, with_tags: true, find_latest: true).execute
+
+ expect(result).to contain_exactly(environment_two)
+ end.to change { Gitlab::GitalyClient.get_request_count }.by(2)
+ end
+ end
+
+ context 'branch deployment' do
+ before do
+ create(:deployment, :success, environment: environment, ref: 'master', sha: project.commit.id)
+ end
+
+ it 'returns environment when ref is set' do
+ expect(described_class.new(project, user, ref: 'master', commit: project.commit).execute)
+ .to contain_exactly(environment)
+ end
+
+ it 'does not environment when ref is different' do
+ expect(described_class.new(project, user, ref: 'feature', commit: project.commit).execute)
+ .to be_empty
+ end
+
+ it 'does not return environment when commit is not part of deployment' do
+ expect(described_class.new(project, user, ref: 'master', commit: project.commit('feature')).execute)
+ .to be_empty
+ end
+
+ it 'returns environment when commit constraint is not set' do
+ expect(described_class.new(project, user, ref: 'master').execute)
+ .to contain_exactly(environment)
+ end
+ end
+
+ context 'commit deployment' do
+ before do
+ create(:deployment, :success, environment: environment, ref: 'master', sha: project.commit.id)
+ end
+
+ it 'returns environment' do
+ expect(described_class.new(project, user, commit: project.commit).execute)
+ .to contain_exactly(environment)
+ end
+ end
+
+ context 'recently updated' do
+ context 'when last deployment to environment is the most recent one' do
+ before do
+ create(:deployment, :success, environment: environment, ref: 'feature')
+ end
+
+ it 'finds recently updated environment' do
+ expect(described_class.new(project, user, ref: 'feature', recently_updated: true).execute)
+ .to contain_exactly(environment)
+ end
+ end
+
+ context 'when last deployment to environment is not the most recent' do
+ before do
+ create(:deployment, :success, environment: environment, ref: 'feature')
+ create(:deployment, :success, environment: environment, ref: 'master')
+ end
+
+ it 'does not find environment' do
+ expect(described_class.new(project, user, ref: 'feature', recently_updated: true).execute)
+ .to be_empty
+ end
+ end
+
+ context 'when there are two environments that deploy to the same branch' do
+ let(:second_environment) { create(:environment, project: project) }
+
+ before do
+ create(:deployment, :success, environment: environment, ref: 'feature')
+ create(:deployment, :success, environment: second_environment, ref: 'feature')
+ end
+
+ it 'finds both environments' do
+ expect(described_class.new(project, user, ref: 'feature', recently_updated: true).execute)
+ .to contain_exactly(environment, second_environment)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/finders/environments_finder_spec.rb b/spec/finders/environments_finder_spec.rb
index fd714ab9a8f..c2022331ad9 100644
--- a/spec/finders/environments_finder_spec.rb
+++ b/spec/finders/environments_finder_spec.rb
@@ -12,150 +12,36 @@ RSpec.describe EnvironmentsFinder do
end
describe '#execute' do
- context 'tagged deployment' do
- let(:environment_two) { create(:environment, project: project) }
- # Environments need to include commits, so rewind two commits to fit
- let(:commit) { project.commit('HEAD~2') }
-
- before do
- create(:deployment, :success, environment: environment, ref: 'v1.0.0', tag: true, sha: project.commit.id)
- create(:deployment, :success, environment: environment_two, ref: 'v1.1.0', tag: true, sha: project.commit('HEAD~1').id)
- end
-
- it 'returns environment when with_tags is set' do
- expect(described_class.new(project, user, ref: 'master', commit: commit, with_tags: true).execute)
- .to contain_exactly(environment, environment_two)
- end
-
- it 'does not return environment when no with_tags is set' do
- expect(described_class.new(project, user, ref: 'master', commit: commit).execute)
- .to be_empty
- end
-
- it 'does not return environment when commit is not part of deployment' do
- expect(described_class.new(project, user, ref: 'master', commit: project.commit('feature')).execute)
- .to be_empty
- end
-
- # We expect two Gitaly calls: FindCommit, CommitIsAncestor
- # This tests to ensure we don't call one CommitIsAncestor per environment
- it 'only calls Gitaly twice when multiple environments are present', :request_store do
- expect do
- result = described_class.new(project, user, ref: 'master', commit: commit, with_tags: true, find_latest: true).execute
-
- expect(result).to contain_exactly(environment_two)
- end.to change { Gitlab::GitalyClient.get_request_count }.by(2)
- end
- end
-
- context 'branch deployment' do
- before do
- create(:deployment, :success, environment: environment, ref: 'master', sha: project.commit.id)
- end
-
- it 'returns environment when ref is set' do
- expect(described_class.new(project, user, ref: 'master', commit: project.commit).execute)
- .to contain_exactly(environment)
- end
-
- it 'does not environment when ref is different' do
- expect(described_class.new(project, user, ref: 'feature', commit: project.commit).execute)
- .to be_empty
- end
-
- it 'does not return environment when commit is not part of deployment' do
- expect(described_class.new(project, user, ref: 'master', commit: project.commit('feature')).execute)
- .to be_empty
- end
-
- it 'returns environment when commit constraint is not set' do
- expect(described_class.new(project, user, ref: 'master').execute)
- .to contain_exactly(environment)
- end
- end
-
- context 'commit deployment' do
- before do
- create(:deployment, :success, environment: environment, ref: 'master', sha: project.commit.id)
- end
-
- it 'returns environment' do
- expect(described_class.new(project, user, commit: project.commit).execute)
- .to contain_exactly(environment)
- end
- end
-
- context 'recently updated' do
- context 'when last deployment to environment is the most recent one' do
- before do
- create(:deployment, :success, environment: environment, ref: 'feature')
- end
-
- it 'finds recently updated environment' do
- expect(described_class.new(project, user, ref: 'feature', recently_updated: true).execute)
- .to contain_exactly(environment)
- end
- end
-
- context 'when last deployment to environment is not the most recent' do
- before do
- create(:deployment, :success, environment: environment, ref: 'feature')
- create(:deployment, :success, environment: environment, ref: 'master')
- end
-
- it 'does not find environment' do
- expect(described_class.new(project, user, ref: 'feature', recently_updated: true).execute)
- .to be_empty
- end
- end
-
- context 'when there are two environments that deploy to the same branch' do
- let(:second_environment) { create(:environment, project: project) }
-
- before do
- create(:deployment, :success, environment: environment, ref: 'feature')
- create(:deployment, :success, environment: second_environment, ref: 'feature')
- end
-
- it 'finds both environments' do
- expect(described_class.new(project, user, ref: 'feature', recently_updated: true).execute)
- .to contain_exactly(environment, second_environment)
- end
- end
- end
- end
-
- describe '#find' do
context 'with states parameter' do
let(:stopped_environment) { create(:environment, :stopped, project: project) }
it 'returns environments with the requested state' do
- result = described_class.new(project, user, states: 'available').find
+ result = described_class.new(project, user, states: 'available').execute
expect(result).to contain_exactly(environment)
end
it 'returns environments with any of the requested states' do
- result = described_class.new(project, user, states: %w(available stopped)).find
+ result = described_class.new(project, user, states: %w(available stopped)).execute
expect(result).to contain_exactly(environment, stopped_environment)
end
it 'raises exception when requested state is invalid' do
- expect { described_class.new(project, user, states: %w(invalid stopped)).find }.to(
+ expect { described_class.new(project, user, states: %w(invalid stopped)).execute }.to(
raise_error(described_class::InvalidStatesError, 'Requested states are invalid')
)
end
context 'works with symbols' do
it 'returns environments with the requested state' do
- result = described_class.new(project, user, states: :available).find
+ result = described_class.new(project, user, states: :available).execute
expect(result).to contain_exactly(environment)
end
it 'returns environments with any of the requested states' do
- result = described_class.new(project, user, states: [:available, :stopped]).find
+ result = described_class.new(project, user, states: [:available, :stopped]).execute
expect(result).to contain_exactly(environment, stopped_environment)
end
@@ -167,7 +53,7 @@ RSpec.describe EnvironmentsFinder do
let(:environment3) { create(:environment, :available, name: 'test3', project: project) }
it 'searches environments by name and state' do
- result = described_class.new(project, user, search: 'test', states: :available).find
+ result = described_class.new(project, user, search: 'test', states: :available).execute
expect(result).to contain_exactly(environment3)
end
diff --git a/spec/finders/group_members_finder_spec.rb b/spec/finders/group_members_finder_spec.rb
index a87a05d4408..3238f6744f7 100644
--- a/spec/finders/group_members_finder_spec.rb
+++ b/spec/finders/group_members_finder_spec.rb
@@ -3,174 +3,180 @@
require 'spec_helper'
RSpec.describe GroupMembersFinder, '#execute' do
- let(:group) { create(:group) }
- let(:nested_group) { create(:group, parent: group) }
- let(:deeper_nested_group) { create(:group, parent: nested_group) }
- let(:user1) { create(:user) }
- let(:user2) { create(:user) }
- let(:user3) { create(:user) }
- let(:user4) { create(:user) }
- let(:user5) { create(:user, :two_factor_via_otp) }
-
- it 'returns members for top-level group' do
- member1 = group.add_maintainer(user1)
- member2 = group.add_maintainer(user2)
- member3 = group.add_maintainer(user3)
- create(:group_member, :minimal_access, user: create(:user), source: group)
-
- result = described_class.new(group).execute
-
- expect(result.to_a).to match_array([member3, member2, member1])
+ let(:group) { create(:group) }
+ let(:sub_group) { create(:group, parent: group) }
+ let(:sub_sub_group) { create(:group, parent: sub_group) }
+ let(:user1) { create(:user) }
+ let(:user2) { create(:user) }
+ let(:user3) { create(:user) }
+ let(:user4) { create(:user) }
+ let(:user5) { create(:user, :two_factor_via_otp) }
+
+ let(:groups) do
+ {
+ group: group,
+ sub_group: sub_group,
+ sub_sub_group: sub_sub_group
+ }
end
- it 'returns members & inherited members for nested group by default' do
- group.add_developer(user2)
- nested_group.request_access(user4)
- member1 = group.add_maintainer(user1)
- member3 = nested_group.add_maintainer(user2)
- member4 = nested_group.add_maintainer(user3)
-
- result = described_class.new(nested_group).execute
-
- expect(result.to_a).to match_array([member1, member3, member4])
+ context 'relations' do
+ let!(:members) do
+ {
+ user1_sub_sub_group: create(:group_member, :maintainer, group: sub_sub_group, user: user1),
+ user1_sub_group: create(:group_member, :developer, group: sub_group, user: user1),
+ user1_group: create(:group_member, :reporter, group: group, user: user1),
+ user2_sub_sub_group: create(:group_member, :reporter, group: sub_sub_group, user: user2),
+ user2_sub_group: create(:group_member, :developer, group: sub_group, user: user2),
+ user2_group: create(:group_member, :maintainer, group: group, user: user2),
+ user3_sub_sub_group: create(:group_member, :developer, group: sub_sub_group, user: user3, expires_at: 1.day.from_now),
+ user3_sub_group: create(:group_member, :developer, group: sub_group, user: user3, expires_at: 2.days.from_now),
+ user3_group: create(:group_member, :reporter, group: group, user: user3),
+ user4_sub_sub_group: create(:group_member, :reporter, group: sub_sub_group, user: user4),
+ user4_sub_group: create(:group_member, :developer, group: sub_group, user: user4, expires_at: 1.day.from_now),
+ user4_group: create(:group_member, :developer, group: group, user: user4, expires_at: 2.days.from_now)
+ }
+ end
+
+ using RSpec::Parameterized::TableSyntax
+
+ where(:subject_relations, :subject_group, :expected_members) do
+ nil | :group | [:user1_group, :user2_group, :user3_group, :user4_group]
+ [:direct] | :group | [:user1_group, :user2_group, :user3_group, :user4_group]
+ [:inherited] | :group | []
+ [:descendants] | :group | [:user1_sub_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
+ [:direct, :inherited] | :group | [:user1_group, :user2_group, :user3_group, :user4_group]
+ [:direct, :descendants] | :group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:descendants, :inherited] | :group | [:user1_sub_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
+ [:direct, :descendants, :inherited] | :group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ nil | :sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:direct] | :sub_group | [:user1_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
+ [:inherited] | :sub_group | [:user1_group, :user2_group, :user3_group, :user4_group]
+ [:descendants] | :sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group]
+ [:direct, :inherited] | :sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:direct, :descendants] | :sub_group | [:user1_sub_sub_group, :user2_sub_group, :user3_sub_group, :user4_sub_group]
+ [:descendants, :inherited] | :sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_sub_group, :user4_group]
+ [:direct, :descendants, :inherited] | :sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ nil | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:direct] | :sub_sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group]
+ [:inherited] | :sub_sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:descendants] | :sub_sub_group | []
+ [:direct, :inherited] | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:direct, :descendants] | :sub_sub_group | [:user1_sub_sub_group, :user2_sub_sub_group, :user3_sub_sub_group, :user4_sub_sub_group]
+ [:descendants, :inherited] | :sub_sub_group | [:user1_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ [:direct, :descendants, :inherited] | :sub_sub_group | [:user1_sub_sub_group, :user2_group, :user3_sub_group, :user4_group]
+ end
+
+ with_them do
+ it 'returns correct members' do
+ result = if subject_relations
+ described_class.new(groups[subject_group]).execute(include_relations: subject_relations)
+ else
+ described_class.new(groups[subject_group]).execute
+ end
+
+ expect(result.to_a).to match_array(expected_members.map { |name| members[name] })
+ end
+ end
end
- it 'does not return inherited members for nested group if requested' do
- group.add_maintainer(user1)
- group.add_developer(user2)
- member2 = nested_group.add_maintainer(user2)
- member3 = nested_group.add_maintainer(user3)
+ context 'search' do
+ it 'returns searched members if requested' do
+ group.add_maintainer(user2)
+ group.add_developer(user3)
+ member = group.add_maintainer(user1)
- result = described_class.new(nested_group).execute(include_relations: [:direct])
+ result = described_class.new(group, params: { search: user1.name }).execute
- expect(result.to_a).to match_array([member2, member3])
- end
+ expect(result.to_a).to match_array([member])
+ end
- it 'returns only inherited members for nested group if requested' do
- group.add_developer(user2)
- nested_group.request_access(user4)
- member1 = group.add_maintainer(user1)
- nested_group.add_maintainer(user2)
- nested_group.add_maintainer(user3)
+ it 'returns nothing if search only in inherited relation' do
+ group.add_maintainer(user2)
+ group.add_developer(user3)
+ group.add_maintainer(user1)
- result = described_class.new(nested_group).execute(include_relations: [:inherited])
+ result = described_class.new(group, params: { search: user1.name }).execute(include_relations: [:inherited])
- expect(result.to_a).to match_array([member1])
- end
+ expect(result.to_a).to match_array([])
+ end
- it 'does not return nil if `inherited only` relation is requested on root group' do
- group.add_developer(user2)
+ it 'returns searched member only from sub_group if search only in inherited relation' do
+ group.add_maintainer(user2)
+ group.add_developer(user3)
+ sub_group.add_maintainer(create(:user, name: user1.name))
+ member = group.add_maintainer(user1)
- result = described_class.new(group).execute(include_relations: [:inherited])
+ result = described_class.new(sub_group, params: { search: member.user.name }).execute(include_relations: [:inherited])
- expect(result).not_to be_nil
+ expect(result.to_a).to contain_exactly(member)
+ end
end
- it 'returns members for descendant groups if requested' do
- member1 = group.add_maintainer(user2)
- member2 = group.add_maintainer(user1)
- nested_group.add_maintainer(user2)
- member3 = nested_group.add_maintainer(user3)
- member4 = nested_group.add_maintainer(user4)
+ context 'filter by two-factor' do
+ it 'returns members with two-factor auth if requested by owner' do
+ group.add_owner(user2)
+ group.add_maintainer(user1)
+ member = group.add_maintainer(user5)
- result = described_class.new(group).execute(include_relations: [:direct, :descendants])
+ result = described_class.new(group, user2, params: { two_factor: 'enabled' }).execute
- expect(result.to_a).to match_array([member1, member2, member3, member4])
- end
+ expect(result.to_a).to contain_exactly(member)
+ end
- it 'returns searched members if requested' do
- group.add_maintainer(user2)
- group.add_developer(user3)
- member = group.add_maintainer(user1)
+ it 'returns members without two-factor auth if requested by owner' do
+ member1 = group.add_owner(user2)
+ member2 = group.add_maintainer(user1)
+ member_with_2fa = group.add_maintainer(user5)
- result = described_class.new(group, params: { search: user1.name }).execute
+ result = described_class.new(group, user2, params: { two_factor: 'disabled' }).execute
- expect(result.to_a).to match_array([member])
- end
+ expect(result.to_a).not_to include(member_with_2fa)
+ expect(result.to_a).to match_array([member1, member2])
+ end
- it 'returns nothing if search only in inherited relation' do
- group.add_maintainer(user2)
- group.add_developer(user3)
- group.add_maintainer(user1)
+ it 'returns direct members with two-factor auth if requested by owner' do
+ group.add_owner(user1)
+ group.add_maintainer(user2)
+ sub_group.add_maintainer(user3)
+ member_with_2fa = sub_group.add_maintainer(user5)
- result = described_class.new(group, params: { search: user1.name }).execute(include_relations: [:inherited])
+ result = described_class.new(sub_group, user1, params: { two_factor: 'enabled' }).execute(include_relations: [:direct])
- expect(result.to_a).to match_array([])
- end
+ expect(result.to_a).to match_array([member_with_2fa])
+ end
- it 'returns searched member only from nested_group if search only in inherited relation' do
- group.add_maintainer(user2)
- group.add_developer(user3)
- nested_group.add_maintainer(create(:user, name: user1.name))
- member = group.add_maintainer(user1)
+ it 'returns inherited members with two-factor auth if requested by owner' do
+ group.add_owner(user1)
+ member_with_2fa = group.add_maintainer(user5)
+ sub_group.add_maintainer(user2)
+ sub_group.add_maintainer(user3)
- result = described_class.new(nested_group, params: { search: member.user.name }).execute(include_relations: [:inherited])
+ result = described_class.new(sub_group, user1, params: { two_factor: 'enabled' }).execute(include_relations: [:inherited])
- expect(result.to_a).to contain_exactly(member)
- end
-
- it 'returns members with two-factor auth if requested by owner' do
- group.add_owner(user2)
- group.add_maintainer(user1)
- member = group.add_maintainer(user5)
-
- result = described_class.new(group, user2, params: { two_factor: 'enabled' }).execute
+ expect(result.to_a).to match_array([member_with_2fa])
+ end
- expect(result.to_a).to contain_exactly(member)
- end
-
- it 'returns members without two-factor auth if requested by owner' do
- member1 = group.add_owner(user2)
- member2 = group.add_maintainer(user1)
- member_with_2fa = group.add_maintainer(user5)
+ it 'returns direct members without two-factor auth if requested by owner' do
+ group.add_owner(user1)
+ group.add_maintainer(user2)
+ member3 = sub_group.add_maintainer(user3)
+ sub_group.add_maintainer(user5)
- result = described_class.new(group, user2, params: { two_factor: 'disabled' }).execute
+ result = described_class.new(sub_group, user1, params: { two_factor: 'disabled' }).execute(include_relations: [:direct])
- expect(result.to_a).not_to include(member_with_2fa)
- expect(result.to_a).to match_array([member1, member2])
- end
-
- it 'returns direct members with two-factor auth if requested by owner' do
- group.add_owner(user1)
- group.add_maintainer(user2)
- nested_group.add_maintainer(user3)
- member_with_2fa = nested_group.add_maintainer(user5)
-
- result = described_class.new(nested_group, user1, params: { two_factor: 'enabled' }).execute(include_relations: [:direct])
-
- expect(result.to_a).to match_array([member_with_2fa])
- end
-
- it 'returns inherited members with two-factor auth if requested by owner' do
- group.add_owner(user1)
- member_with_2fa = group.add_maintainer(user5)
- nested_group.add_maintainer(user2)
- nested_group.add_maintainer(user3)
-
- result = described_class.new(nested_group, user1, params: { two_factor: 'enabled' }).execute(include_relations: [:inherited])
-
- expect(result.to_a).to match_array([member_with_2fa])
- end
-
- it 'returns direct members without two-factor auth if requested by owner' do
- group.add_owner(user1)
- group.add_maintainer(user2)
- member3 = nested_group.add_maintainer(user3)
- nested_group.add_maintainer(user5)
-
- result = described_class.new(nested_group, user1, params: { two_factor: 'disabled' }).execute(include_relations: [:direct])
-
- expect(result.to_a).to match_array([member3])
- end
+ expect(result.to_a).to match_array([member3])
+ end
- it 'returns inherited members without two-factor auth if requested by owner' do
- member1 = group.add_owner(user1)
- group.add_maintainer(user5)
- nested_group.add_maintainer(user2)
- nested_group.add_maintainer(user3)
+ it 'returns inherited members without two-factor auth if requested by owner' do
+ member1 = group.add_owner(user1)
+ group.add_maintainer(user5)
+ sub_group.add_maintainer(user2)
+ sub_group.add_maintainer(user3)
- result = described_class.new(nested_group, user1, params: { two_factor: 'disabled' }).execute(include_relations: [:inherited])
+ result = described_class.new(sub_group, user1, params: { two_factor: 'disabled' }).execute(include_relations: [:inherited])
- expect(result.to_a).to match_array([member1])
+ expect(result.to_a).to match_array([member1])
+ end
end
end
diff --git a/spec/finders/issues_finder_spec.rb b/spec/finders/issues_finder_spec.rb
index b794ab626bf..a2aac857bf5 100644
--- a/spec/finders/issues_finder_spec.rb
+++ b/spec/finders/issues_finder_spec.rb
@@ -49,6 +49,13 @@ RSpec.describe IssuesFinder do
let(:expected_issuables) { [issue3, issue4] }
end
+ context 'when assignee_id does not exist' do
+ it_behaves_like 'assignee NOT ID filter' do
+ let(:params) { { not: { assignee_id: -100 } } }
+ let(:expected_issuables) { [issue1, issue2, issue3, issue4, issue5] }
+ end
+ end
+
context 'filter by username' do
let_it_be(:user3) { create(:user) }
@@ -71,6 +78,17 @@ RSpec.describe IssuesFinder do
let(:params) { { not: { assignee_username: [user.username, user2.username] } } }
let(:expected_issuables) { [issue3, issue4] }
end
+
+ context 'when assignee_username does not exist' do
+ it_behaves_like 'assignee NOT username filter' do
+ before do
+ issue2.assignees = [user2]
+ end
+
+ let(:params) { { not: { assignee_username: 'non_existent_username' } } }
+ let(:expected_issuables) { [issue1, issue2, issue3, issue4, issue5] }
+ end
+ end
end
it_behaves_like 'no assignee filter' do
diff --git a/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
index 4724a8eb5c7..6dffaff294d 100644
--- a/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
+++ b/spec/finders/merge_requests/oldest_per_commit_finder_spec.rb
@@ -77,6 +77,45 @@ RSpec.describe MergeRequests::OldestPerCommitFinder do
expect(described_class.new(project).execute(commits)).to eq(sha => mr)
end
+ it 'includes a merge request that was squashed into the target branch' do
+ project = create(:project)
+ sha = Digest::SHA1.hexdigest('foo')
+ mr = create(
+ :merge_request,
+ :merged,
+ target_project: project,
+ squash_commit_sha: sha
+ )
+
+ commits = [double(:commit, id: sha)]
+
+ expect(MergeRequestDiffCommit)
+ .not_to receive(:oldest_merge_request_id_per_commit)
+
+ expect(described_class.new(project).execute(commits)).to eq(sha => mr)
+ end
+
+ it 'includes a merge request for both a squash and merge commit' do
+ project = create(:project)
+ sha1 = Digest::SHA1.hexdigest('foo')
+ sha2 = Digest::SHA1.hexdigest('bar')
+ mr = create(
+ :merge_request,
+ :merged,
+ target_project: project,
+ squash_commit_sha: sha1,
+ merge_commit_sha: sha2
+ )
+
+ commits = [double(:commit1, id: sha1), double(:commit2, id: sha2)]
+
+ expect(MergeRequestDiffCommit)
+ .not_to receive(:oldest_merge_request_id_per_commit)
+
+ expect(described_class.new(project).execute(commits))
+ .to eq(sha1 => mr, sha2 => mr)
+ end
+
it 'includes the oldest merge request when a merge commit is present in a newer merge request' do
project = create(:project)
sha = Digest::SHA1.hexdigest('foo')
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index b3000498bb6..597d22801ca 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -156,6 +156,18 @@ RSpec.describe MergeRequestsFinder do
it { is_expected.to eq([merge_request2]) }
end
+
+ context 'when project_id is given' do
+ subject(:query) { described_class.new(user, merged_after: 15.days.ago, merged_before: 6.days.ago, project_id: merge_request2.project).execute }
+
+ it { is_expected.to eq([merge_request2]) }
+
+ it 'queries merge_request_metrics.target_project_id table' do
+ expect(query.to_sql).to include(%{"merge_request_metrics"."target_project_id" = #{merge_request2.target_project_id}})
+
+ expect(query.to_sql).not_to include(%{"merge_requests"."target_project_id"})
+ end
+ end
end
context 'filtering by group' do
diff --git a/spec/finders/metrics/dashboards/annotations_finder_spec.rb b/spec/finders/metrics/dashboards/annotations_finder_spec.rb
index 223fd2c047c..7c5932dde1e 100644
--- a/spec/finders/metrics/dashboards/annotations_finder_spec.rb
+++ b/spec/finders/metrics/dashboards/annotations_finder_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Metrics::Dashboards::AnnotationsFinder do
subject(:annotations) { described_class.new(dashboard: dashboard, params: params).execute }
let_it_be(:current_user) { create(:user) }
+
let(:path) { 'config/prometheus/common_metrics.yml' }
let(:params) { {} }
let(:environment) { create(:environment) }
diff --git a/spec/finders/metrics/users_starred_dashboards_finder_spec.rb b/spec/finders/metrics/users_starred_dashboards_finder_spec.rb
index 61dadb5239c..4136cf1123a 100644
--- a/spec/finders/metrics/users_starred_dashboards_finder_spec.rb
+++ b/spec/finders/metrics/users_starred_dashboards_finder_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Metrics::UsersStarredDashboardsFinder do
subject(:starred_dashboards) { described_class.new(user: user, project: project, params: params).execute }
let_it_be(:user) { create(:user) }
+
let(:project) { create(:project) }
let(:dashboard_path) { 'config/prometheus/common_metrics.yml' }
let(:params) { {} }
diff --git a/spec/finders/notes_finder_spec.rb b/spec/finders/notes_finder_spec.rb
index 868b126dc28..11de19cfdbc 100644
--- a/spec/finders/notes_finder_spec.rb
+++ b/spec/finders/notes_finder_spec.rb
@@ -213,6 +213,24 @@ RSpec.describe NotesFinder do
expect { described_class.new(user, params).execute }.to raise_error(RuntimeError)
end
end
+
+ describe 'sorting' do
+ it 'allows sorting' do
+ params = { project: project, sort: 'id_desc' }
+
+ expect(Note).to receive(:order_id_desc).once
+
+ described_class.new(user, params).execute
+ end
+
+ it 'defaults to sort by .fresh' do
+ params = { project: project }
+
+ expect(Note).to receive(:fresh).once
+
+ described_class.new(user, params).execute
+ end
+ end
end
describe '.search' do
diff --git a/spec/finders/packages/go/package_finder_spec.rb b/spec/finders/packages/go/package_finder_spec.rb
new file mode 100644
index 00000000000..b6fad1e7061
--- /dev/null
+++ b/spec/finders/packages/go/package_finder_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Go::PackageFinder do
+ include_context 'basic Go module'
+
+ let_it_be(:mod) { create :go_module, project: project }
+ let_it_be(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.1' }
+ let_it_be(:package) { create :golang_package, project: project, name: mod.name, version: 'v1.0.1' }
+
+ let(:finder) { described_class.new(project, mod_name, version_name) }
+
+ describe '#exists?' do
+ subject { finder.exists? }
+
+ context 'with a valid name and version' do
+ let(:mod_name) { mod.name }
+ let(:version_name) { version.name }
+
+ it 'executes SELECT 1' do
+ expect { subject }.to exceed_query_limit(0).for_query(/^SELECT 1/)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'with an invalid name' do
+ let(:mod_name) { 'foo/bar' }
+ let(:version_name) { 'baz' }
+
+ it { is_expected.to eq(false) }
+ end
+
+ context 'with an invalid version' do
+ let(:mod_name) { mod.name }
+ let(:version_name) { 'baz' }
+
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#execute' do
+ subject { finder.execute }
+
+ context 'with a valid name and version' do
+ let(:mod_name) { mod.name }
+ let(:version_name) { version.name }
+
+ it 'executes a single query' do
+ expect { subject }.not_to exceed_query_limit(1)
+ end
+
+ it { is_expected.to eq(package) }
+ end
+
+ context 'with an invalid name' do
+ let(:mod_name) { 'foo/bar' }
+ let(:version_name) { 'baz' }
+
+ it { is_expected.to eq(nil) }
+ end
+
+ context 'with an invalid version' do
+ let(:mod_name) { mod.name }
+ let(:version_name) { 'baz' }
+
+ it { is_expected.to eq(nil) }
+ end
+ end
+end
diff --git a/spec/finders/packages/maven/package_finder_spec.rb b/spec/finders/packages/maven/package_finder_spec.rb
index b955c331f28..ca144292501 100644
--- a/spec/finders/packages/maven/package_finder_spec.rb
+++ b/spec/finders/packages/maven/package_finder_spec.rb
@@ -11,71 +11,144 @@ RSpec.describe ::Packages::Maven::PackageFinder do
let(:param_path) { nil }
let(:param_project) { nil }
let(:param_group) { nil }
- let(:finder) { described_class.new(param_path, user, project: param_project, group: param_group) }
+ let(:param_order_by_package_file) { false }
+ let(:finder) { described_class.new(param_path, user, project: param_project, group: param_group, order_by_package_file: param_order_by_package_file) }
before do
group.add_developer(user)
end
- describe '#execute!' do
- subject { finder.execute! }
+ shared_examples 'Packages::Maven::PackageFinder examples' do
+ describe '#execute!' do
+ subject { finder.execute! }
- shared_examples 'handling valid and invalid paths' do
- context 'with a valid path' do
- let(:param_path) { package.maven_metadatum.path }
+ shared_examples 'handling valid and invalid paths' do
+ context 'with a valid path' do
+ let(:param_path) { package.maven_metadatum.path }
- it { is_expected.to eq(package) }
+ it { is_expected.to eq(package) }
+ end
+
+ context 'with an invalid path' do
+ let(:param_path) { 'com/example/my-app/1.0-SNAPSHOT' }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ end
+ end
end
- context 'with an invalid path' do
- let(:param_path) { 'com/example/my-app/1.0-SNAPSHOT' }
+ context 'within the project' do
+ let(:param_project) { project }
+ it_behaves_like 'handling valid and invalid paths'
+ end
+
+ context 'within a group' do
+ let(:param_group) { group }
+
+ context 'with maven_packages_group_level_improvements enabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: true)
+ expect(finder).to receive(:packages_visible_to_user).with(user, within_group: group).and_call_original
+ end
+
+ it_behaves_like 'handling valid and invalid paths'
+ end
+
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
+ expect(finder).not_to receive(:packages_visible_to_user)
+ end
+
+ it_behaves_like 'handling valid and invalid paths'
+ end
+ end
+
+ context 'across all projects' do
it 'raises an error' do
expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
end
end
- end
- context 'within the project' do
- let(:param_project) { project }
+ context 'versionless maven-metadata.xml package' do
+ let_it_be(:sub_group1) { create(:group, parent: group) }
+ let_it_be(:sub_group2) { create(:group, parent: group) }
+ let_it_be(:project1) { create(:project, group: sub_group1) }
+ let_it_be(:project2) { create(:project, group: sub_group2) }
+ let_it_be(:project3) { create(:project, group: sub_group1) }
+ let_it_be(:package_name) { 'foo' }
+ let_it_be(:package1) { create(:maven_package, project: project1, name: package_name, version: nil) }
+ let_it_be(:package2) { create(:maven_package, project: project2, name: package_name, version: nil) }
+ let_it_be(:package3) { create(:maven_package, project: project3, name: package_name, version: nil) }
+
+ let(:param_group) { group }
+ let(:param_path) { package_name }
+
+ before do
+ sub_group1.add_developer(user)
+ sub_group2.add_developer(user)
+ # the package with the most recently published file should be returned
+ create(:package_file, :xml, package: package2)
+ end
- it_behaves_like 'handling valid and invalid paths'
- end
+ context 'with maven_packages_group_level_improvements enabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: true)
+ expect(finder).not_to receive(:versionless_package?)
+ end
- context 'within a group' do
- let(:param_group) { group }
+ context 'without order by package file' do
+ it { is_expected.to eq(package3) }
+ end
- it_behaves_like 'handling valid and invalid paths'
- end
+ context 'with order by package file' do
+ let(:param_order_by_package_file) { true }
+
+ it { is_expected.to eq(package2) }
+ end
+ end
+
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
+ expect(finder).to receive(:versionless_package?).and_call_original
+ end
- context 'across all projects' do
- it 'raises an error' do
- expect { subject }.to raise_error(ActiveRecord::RecordNotFound)
+ context 'without order by package file' do
+ it { is_expected.to eq(package2) }
+ end
+
+ context 'with order by package file' do
+ let(:param_order_by_package_file) { true }
+
+ it { is_expected.to eq(package2) }
+ end
+ end
end
end
+ end
+
+ context 'when the maven_metadata_by_path_with_optimization_fence feature flag is off' do
+ before do
+ stub_feature_flags(maven_metadata_by_path_with_optimization_fence: false)
+ end
- context 'versionless maven-metadata.xml package' do
- let_it_be(:sub_group1) { create(:group, parent: group) }
- let_it_be(:sub_group2) { create(:group, parent: group) }
- let_it_be(:project1) { create(:project, group: sub_group1) }
- let_it_be(:project2) { create(:project, group: sub_group2) }
- let_it_be(:project3) { create(:project, group: sub_group1) }
- let_it_be(:package_name) { 'foo' }
- let_it_be(:package1) { create(:maven_package, project: project1, name: package_name, version: nil) }
- let_it_be(:package2) { create(:maven_package, project: project2, name: package_name, version: nil) }
- let_it_be(:package3) { create(:maven_package, project: project3, name: package_name, version: nil) }
-
- let(:param_group) { group }
- let(:param_path) { package_name }
-
- before do
- sub_group1.add_developer(user)
- sub_group2.add_developer(user)
- # the package with the most recently published file should be returned
- create(:package_file, :xml, package: package2)
- end
+ it_behaves_like 'Packages::Maven::PackageFinder examples'
+ end
+
+ context 'when the maven_metadata_by_path_with_optimization_fence feature flag is on' do
+ before do
+ stub_feature_flags(maven_metadata_by_path_with_optimization_fence: true)
+ end
+
+ it_behaves_like 'Packages::Maven::PackageFinder examples'
+
+ it 'uses CTE in the query' do
+ sql = described_class.new('some_path', user, group: group).send(:packages_with_path).to_sql
- it { is_expected.to eq(package2) }
+ expect(sql).to include('WITH "maven_metadata_by_path" AS')
end
end
end
diff --git a/spec/finders/pending_todos_finder_spec.rb b/spec/finders/pending_todos_finder_spec.rb
index 10d3c2905be..b17915f0d59 100644
--- a/spec/finders/pending_todos_finder_spec.rb
+++ b/spec/finders/pending_todos_finder_spec.rb
@@ -4,13 +4,15 @@ require 'spec_helper'
RSpec.describe PendingTodosFinder do
let(:user) { create(:user) }
+ let(:user2) { create(:user) }
+ let(:users) { [user, user2] }
describe '#execute' do
it 'returns only pending todos' do
create(:todo, :done, user: user)
todo = create(:todo, :pending, user: user)
- todos = described_class.new(user).execute
+ todos = described_class.new(users).execute
expect(todos).to eq([todo])
end
@@ -22,7 +24,7 @@ RSpec.describe PendingTodosFinder do
create(:todo, :pending, user: user, project: project2)
todo = create(:todo, :pending, user: user, project: project1)
- todos = described_class.new(user, project_id: project1.id).execute
+ todos = described_class.new(users, project_id: project1.id).execute
expect(todos).to eq([todo])
end
@@ -34,7 +36,7 @@ RSpec.describe PendingTodosFinder do
create(:todo, :pending, user: user, target: note)
- todos = described_class.new(user, target_id: issue.id).execute
+ todos = described_class.new(users, target_id: issue.id).execute
expect(todos).to eq([todo])
end
@@ -46,7 +48,7 @@ RSpec.describe PendingTodosFinder do
create(:todo, :pending, user: user, target: note)
- todos = described_class.new(user, target_type: issue.class.name).execute
+ todos = described_class.new(users, target_type: issue.class.name).execute
expect(todos).to eq([todo])
end
@@ -55,7 +57,7 @@ RSpec.describe PendingTodosFinder do
create(:todo, :pending, user: user, commit_id: '456')
todo = create(:todo, :pending, user: user, commit_id: '123')
- todos = described_class.new(user, commit_id: '123').execute
+ todos = described_class.new(users, commit_id: '123').execute
expect(todos).to eq([todo])
end
diff --git a/spec/finders/projects_finder_spec.rb b/spec/finders/projects_finder_spec.rb
index 4d9ff30daba..a178261e899 100644
--- a/spec/finders/projects_finder_spec.rb
+++ b/spec/finders/projects_finder_spec.rb
@@ -129,6 +129,12 @@ RSpec.describe ProjectsFinder do
it { is_expected.to eq([public_project]) }
end
+
+ context 'as string' do
+ let(:params) { { visibility_level: Gitlab::VisibilityLevel::INTERNAL.to_s } }
+
+ it { is_expected.to eq([internal_project]) }
+ end
end
describe 'filter by tags' do
diff --git a/spec/finders/repositories/branch_names_finder_spec.rb b/spec/finders/repositories/branch_names_finder_spec.rb
new file mode 100644
index 00000000000..4d8bfcc0f20
--- /dev/null
+++ b/spec/finders/repositories/branch_names_finder_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Repositories::BranchNamesFinder do
+ let(:project) { create(:project, :repository) }
+
+ let(:branch_names_finder) { described_class.new(project.repository, search: 'conflict-*') }
+
+ describe '#execute' do
+ subject(:execute) { branch_names_finder.execute }
+
+ it 'filters branch names' do
+ expect(execute).to contain_exactly(
+ 'conflict-binary-file',
+ 'conflict-resolvable',
+ 'conflict-contains-conflict-markers',
+ 'conflict-missing-side',
+ 'conflict-start',
+ 'conflict-non-utf8',
+ 'conflict-too-large'
+ )
+ end
+ end
+end
diff --git a/spec/finders/repositories/previous_tag_finder_spec.rb b/spec/finders/repositories/changelog_tag_finder_spec.rb
index b332dd158d1..cd79beb3e9e 100644
--- a/spec/finders/repositories/previous_tag_finder_spec.rb
+++ b/spec/finders/repositories/changelog_tag_finder_spec.rb
@@ -2,11 +2,18 @@
require 'spec_helper'
-RSpec.describe Repositories::PreviousTagFinder do
+RSpec.describe Repositories::ChangelogTagFinder do
let(:project) { build_stubbed(:project) }
let(:finder) { described_class.new(project) }
describe '#execute' do
+ context 'when the regular expression is invalid' do
+ it 'raises Gitlab::Changelog::Error' do
+ expect { described_class.new(project, regex: 'foo+*').execute('1.2.3') }
+ .to raise_error(Gitlab::Changelog::Error)
+ end
+ end
+
context 'when there is a previous tag' do
it 'returns the previous tag' do
tag1 = double(:tag1, name: 'v1.0.0')
@@ -15,10 +22,11 @@ RSpec.describe Repositories::PreviousTagFinder do
tag4 = double(:tag4, name: '0.9.0')
tag5 = double(:tag5, name: 'v0.8.0-pre1')
tag6 = double(:tag6, name: 'v0.7.0')
+ tag7 = double(:tag7, name: '0.5.0+42.ee.0')
allow(project.repository)
.to receive(:tags)
- .and_return([tag1, tag3, tag2, tag4, tag5, tag6])
+ .and_return([tag1, tag3, tag2, tag4, tag5, tag6, tag7])
expect(finder.execute('2.1.0')).to eq(tag3)
expect(finder.execute('2.0.0')).to eq(tag2)
@@ -26,6 +34,7 @@ RSpec.describe Repositories::PreviousTagFinder do
expect(finder.execute('1.0.1')).to eq(tag1)
expect(finder.execute('1.0.0')).to eq(tag4)
expect(finder.execute('0.9.0')).to eq(tag6)
+ expect(finder.execute('0.6.0')).to eq(tag7)
end
end
diff --git a/spec/finders/user_group_notification_settings_finder_spec.rb b/spec/finders/user_group_notification_settings_finder_spec.rb
index 453da691866..b9d800d8e55 100644
--- a/spec/finders/user_group_notification_settings_finder_spec.rb
+++ b/spec/finders/user_group_notification_settings_finder_spec.rb
@@ -129,4 +129,37 @@ RSpec.describe UserGroupNotificationSettingsFinder do
end
end
end
+
+ context 'preloading `emails_disabled`' do
+ let_it_be(:root_group) { create(:group) }
+ let_it_be(:sub_group) { create(:group, parent: root_group) }
+ let_it_be(:sub_sub_group) { create(:group, parent: sub_group) }
+
+ let_it_be(:another_root_group) { create(:group) }
+ let_it_be(:sub_group_with_emails_disabled) { create(:group, emails_disabled: true, parent: another_root_group) }
+ let_it_be(:another_sub_sub_group) { create(:group, parent: sub_group_with_emails_disabled) }
+
+ let_it_be(:root_group_with_emails_disabled) { create(:group, emails_disabled: true) }
+ let_it_be(:group) { create(:group, parent: root_group_with_emails_disabled) }
+
+ let(:groups) { Group.where(id: [sub_sub_group, another_sub_sub_group, group]) }
+
+ before do
+ described_class.new(user, groups).execute
+ end
+
+ it 'preloads the `group.emails_disabled` method' do
+ recorder = ActiveRecord::QueryRecorder.new do
+ groups.each(&:emails_disabled?)
+ end
+
+ expect(recorder.count).to eq(0)
+ end
+
+ it 'preloads the `group.emails_disabled` method correctly' do
+ groups.each do |group|
+ expect(group.emails_disabled?).to eq(Group.find(group.id).emails_disabled?) # compare the memoized and the freshly loaded value
+ end
+ end
+ end
end
diff --git a/spec/fixtures/api/schemas/entities/member.json b/spec/fixtures/api/schemas/entities/member.json
index 03b1872632e..f06687f9809 100644
--- a/spec/fixtures/api/schemas/entities/member.json
+++ b/spec/fixtures/api/schemas/entities/member.json
@@ -8,6 +8,7 @@
"requested_at",
"source",
"valid_roles",
+ "type",
"can_update",
"can_remove",
"is_direct_member"
@@ -40,6 +41,7 @@
"additionalProperties": false
},
"valid_roles": { "type": "object" },
+ "type": { "type": "string" },
"created_by": {
"type": "object",
"required": ["name", "web_url"],
diff --git a/spec/fixtures/api/schemas/entities/member_user.json b/spec/fixtures/api/schemas/entities/member_user.json
index ebd26bfaaaa..41a1e510de5 100644
--- a/spec/fixtures/api/schemas/entities/member_user.json
+++ b/spec/fixtures/api/schemas/entities/member_user.json
@@ -18,6 +18,5 @@
},
"additionalProperties": false
}
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/external_validation.json b/spec/fixtures/api/schemas/external_validation.json
index 1bd00a2e6fc..3ff71626cc0 100644
--- a/spec/fixtures/api/schemas/external_validation.json
+++ b/spec/fixtures/api/schemas/external_validation.json
@@ -11,11 +11,13 @@
"type": "object",
"required": [
"id",
- "path"
+ "path",
+ "created_at"
],
"properties": {
"id": { "type": "integer" },
- "path": { "type": "string" }
+ "path": { "type": "string" },
+ "created_at": { "type": ["string", "null"], "format": "date-time" }
}
},
"user": {
@@ -23,12 +25,14 @@
"required": [
"id",
"username",
- "email"
+ "email",
+ "created_at"
],
"properties": {
"id": { "type": "integer" },
"username": { "type": "string" },
- "email": { "type": "string" }
+ "email": { "type": "string" },
+ "created_at": { "type": ["string", "null"], "format": "date-time" }
}
},
"pipeline": {
@@ -70,6 +74,5 @@
}
}
}
- },
- "additionalProperties": false
+ }
}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json b/spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json
new file mode 100644
index 00000000000..31bb861ced5
--- /dev/null
+++ b/spec/fixtures/api/schemas/graphql/packages/package_conan_metadata.json
@@ -0,0 +1,37 @@
+{
+ "type": "object",
+ "additionalProperties": false,
+ "required": [
+ "id",
+ "createdAt",
+ "updatedAt",
+ "packageUsername",
+ "packageChannel",
+ "recipe",
+ "recipePath",
+ "packageName"
+ ],
+ "properties": {
+ "id": {
+ "type": "string"
+ },
+ "created_at": {
+ "type": "string"
+ },
+ "updated_at": {
+ "type": "string"
+ },
+ "package_username": {
+ "type": "string"
+ },
+ "package_channel": {
+ "type": "string"
+ },
+ "recipe": {
+ "type": "string"
+ },
+ "recipe_path": {
+ "type": "string"
+ }
+ }
+}
diff --git a/spec/fixtures/api/schemas/graphql/packages/package_details.json b/spec/fixtures/api/schemas/graphql/packages/package_details.json
index d2e2e65db54..87b173eefc7 100644
--- a/spec/fixtures/api/schemas/graphql/packages/package_details.json
+++ b/spec/fixtures/api/schemas/graphql/packages/package_details.json
@@ -2,8 +2,17 @@
"type": "object",
"additionalProperties": false,
"required": [
- "id", "name", "createdAt", "updatedAt", "version", "packageType",
- "project", "tags", "pipelines", "versions", "metadata"
+ "id",
+ "name",
+ "createdAt",
+ "updatedAt",
+ "version",
+ "packageType",
+ "project",
+ "tags",
+ "pipelines",
+ "versions",
+ "metadata"
],
"properties": {
"id": {
@@ -23,7 +32,18 @@
},
"packageType": {
"type": ["string"],
- "enum": ["MAVEN", "NPM", "CONAN", "NUGET", "PYPI", "COMPOSER", "GENERIC", "GOLANG", "DEBIAN"]
+ "enum": [
+ "MAVEN",
+ "NPM",
+ "CONAN",
+ "NUGET",
+ "PYPI",
+ "COMPOSER",
+ "GENERIC",
+ "GOLANG",
+ "RUBYGEMS",
+ "DEBIAN"
+ ]
},
"tags": {
"type": "object",
@@ -59,8 +79,18 @@
"metadata": {
"anyOf": [
{ "$ref": "./package_composer_metadata.json" },
+ { "$ref": "./package_conan_metadata.json" },
{ "type": "null" }
]
+ },
+ "packageFiles": {
+ "type": "object",
+ "additionalProperties": false,
+ "properties": {
+ "pageInfo": { "type": "object" },
+ "edges": { "type": "array" },
+ "nodes": { "type": "array" }
+ }
}
}
}
diff --git a/spec/fixtures/api/schemas/public_api/v4/user/public.json b/spec/fixtures/api/schemas/public_api/v4/user/public.json
index faa126b65f2..ee848eda9ed 100644
--- a/spec/fixtures/api/schemas/public_api/v4/user/public.json
+++ b/spec/fixtures/api/schemas/public_api/v4/user/public.json
@@ -70,6 +70,7 @@
"can_create_group": { "type": "boolean" },
"can_create_project": { "type": "boolean" },
"two_factor_enabled": { "type": "boolean" },
- "external": { "type": "boolean" }
+ "external": { "type": "boolean" },
+ "commit_email": { "type": "string" }
}
}
diff --git a/spec/fixtures/ce_sample_schema.json b/spec/fixtures/ce_sample_schema.json
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/spec/fixtures/ce_sample_schema.json
diff --git a/spec/fixtures/config/mail_room_enabled_ms_graph.yml b/spec/fixtures/config/mail_room_enabled_ms_graph.yml
new file mode 100644
index 00000000000..791760e1dfd
--- /dev/null
+++ b/spec/fixtures/config/mail_room_enabled_ms_graph.yml
@@ -0,0 +1,26 @@
+test:
+ incoming_email:
+ enabled: true
+ address: "gitlab-incoming+%{key}@gmail.com"
+ user: "gitlab-incoming@gmail.com"
+ mailbox: "inbox"
+ expunge_deleted: true
+ inbox_method: "microsoft_graph"
+ inbox_options:
+ tenant_id: "12345"
+ client_id: "MY-CLIENT-ID"
+ client_secret: "MY-CLIENT-SECRET"
+ poll_interval: 60
+
+ service_desk_email:
+ enabled: true
+ address: "gitlab-incoming+%{key}@gmail.com"
+ user: "gitlab-incoming@gmail.com"
+ mailbox: "inbox"
+ expunge_deleted: true
+ inbox_method: "microsoft_graph"
+ inbox_options:
+ tenant_id: "12345"
+ client_id: "MY-CLIENT-ID"
+ client_secret: "MY-CLIENT-SECRET"
+ poll_interval: 60
diff --git a/spec/fixtures/emails/update_commands_only.eml b/spec/fixtures/emails/update_commands_only.eml
new file mode 100644
index 00000000000..9442d9423f0
--- /dev/null
+++ b/spec/fixtures/emails/update_commands_only.eml
@@ -0,0 +1,22 @@
+Return-Path: <jake@adventuretime.ooo>
+Received: from iceking.adventuretime.ooo ([unix socket]) by iceking (Cyrus v2.2.13-Debian-2.2.13-19+squeeze3) with LMTPA; Thu, 13 Jun 2013 17:03:50 -0400
+Received: from mail-ie0-x234.google.com (mail-ie0-x234.google.com [IPv6:2607:f8b0:4001:c03::234]) by iceking.adventuretime.ooo (8.14.3/8.14.3/Debian-9.4) with ESMTP id r5DL3nFJ016967 (version=TLSv1/SSLv3 cipher=RC4-SHA bits=128 verify=NOT) for <reply+59d8df8370b7e95c5a49fbf86aeb2c93@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 17:03:50 -0400
+Received: by mail-ie0-f180.google.com with SMTP id f4so21977375iea.25 for <reply+59d8df8370b7e95c5a49fbf86aeb2c93@appmail.adventuretime.ooo>; Thu, 13 Jun 2013 14:03:48 -0700
+Received: by 10.0.0.1 with HTTP; Thu, 13 Jun 2013 14:03:48 -0700
+Date: Thu, 13 Jun 2013 17:03:48 -0400
+From: Jake the Dog <jake@adventuretime.ooo>
+To: reply+59d8df8370b7e95c5a49fbf86aeb2c93@appmail.adventuretime.ooo
+Message-ID: <CADkmRc+rNGAGGbV2iE5p918UVy4UyJqVcXRO2=otppgzduJSg@mail.gmail.com>
+In-Reply-To: <issue_1@localhost>
+References: <reply-59d8df8370b7e95c5a49fbf86aeb2c93@localhost> <issue_1@localhost>
+Subject: re: [Discourse Meta] eviltrout posted in 'Adventure Time Sux'
+Mime-Version: 1.0
+Content-Type: text/plain;
+ charset=ISO-8859-1
+Content-Transfer-Encoding: 7bit
+X-Sieve: CMU Sieve 2.2
+X-Received: by 10.0.0.1 with SMTP id n7mr11234144ipb.85.1371157428600; Thu,
+ 13 Jun 2013 14:03:48 -0700 (PDT)
+X-Scanned-By: MIMEDefang 2.69 on IPv6:2001:470:1d:165::1
+
+/close
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
new file mode 100644
index 00000000000..90d395e1eda
--- /dev/null
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric.yml
@@ -0,0 +1,21 @@
+---
+# See Usage Ping metrics dictionary docs https://docs.gitlab.com/ee/development/usage_ping/metrics_dictionary.html
+key_path: counts_weekly.test_metric
+description:
+product_section:
+product_stage:
+product_group:
+product_category:
+value_type: number
+status: implemented
+milestone: "13.9"
+introduced_by_url:
+time_frame: 7d
+data_source:
+distribution:
+- ce
+# Add here corresponding tiers
+# tier:
+# - free
+# - premium
+# - ultimate
diff --git a/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
new file mode 100644
index 00000000000..47fc1d7e376
--- /dev/null
+++ b/spec/fixtures/lib/generators/gitlab/usage_metric_definition_generator/sample_metric_with_name_suggestions.yml
@@ -0,0 +1,22 @@
+---
+# See Usage Ping metrics dictionary docs https://docs.gitlab.com/ee/development/usage_ping/metrics_dictionary.html
+key_path: counts_weekly.test_metric
+name: test metric name
+description:
+product_section:
+product_stage:
+product_group:
+product_category:
+value_type: number
+status: implemented
+milestone: "13.9"
+introduced_by_url:
+time_frame: 7d
+data_source:
+distribution:
+- ce
+# Add here corresponding tiers
+# tier:
+# - free
+# - premium
+# - ultimate
diff --git a/spec/fixtures/lib/gitlab/performance_bar/peek_data.json b/spec/fixtures/lib/gitlab/performance_bar/peek_data.json
index 8e207b69ecb..c60e787ddb1 100644
--- a/spec/fixtures/lib/gitlab/performance_bar/peek_data.json
+++ b/spec/fixtures/lib/gitlab/performance_bar/peek_data.json
@@ -13,6 +13,8 @@
"duration": 1.096,
"sql": "SELECT COUNT(*) FROM ((SELECT \"badges\".* FROM \"badges\" WHERE \"badges\".\"type\" = 'ProjectBadge' AND \"badges\".\"project_id\" = 8)\nUNION\n(SELECT \"badges\".* FROM \"badges\" WHERE \"badges\".\"type\" = 'GroupBadge' AND \"badges\".\"group_id\" IN (SELECT \"namespaces\".\"id\" FROM \"namespaces\" WHERE \"namespaces\".\"type\" = 'Group' AND \"namespaces\".\"id\" = 28))) badges",
"backtrace": [
+ "ee/lib/ee/peek/views/active_record.rb:11:in `generate_detail'",
+ "lib/peek/views/active_record.rb:42:in `block in setup_subscribers'",
"lib/gitlab/pagination/offset_pagination.rb:53:in `add_pagination_headers'",
"lib/gitlab/pagination/offset_pagination.rb:15:in `block in paginate'",
"lib/gitlab/pagination/offset_pagination.rb:14:in `tap'",
diff --git a/spec/fixtures/packages/rubygems/package-0.0.1.gem b/spec/fixtures/packages/rubygems/package-0.0.1.gem
index 2143ef408ac..658ef4ee25f 100644
--- a/spec/fixtures/packages/rubygems/package-0.0.1.gem
+++ b/spec/fixtures/packages/rubygems/package-0.0.1.gem
Binary files differ
diff --git a/spec/fixtures/packages/rubygems/package.gem b/spec/fixtures/packages/rubygems/package.gem
new file mode 100644
index 00000000000..658ef4ee25f
--- /dev/null
+++ b/spec/fixtures/packages/rubygems/package.gem
Binary files differ
diff --git a/spec/fixtures/packages/rubygems/package.gemspec b/spec/fixtures/packages/rubygems/package.gemspec
index bb87c47f5dc..ea03414cc6f 100644
--- a/spec/fixtures/packages/rubygems/package.gemspec
+++ b/spec/fixtures/packages/rubygems/package.gemspec
@@ -1,15 +1,42 @@
# frozen_string_literal: true
Gem::Specification.new do |s|
- s.name = %q{package}
- s.authors = ["Tanuki Steve"]
- s.version = "0.0.1"
- s.date = %q{2011-09-29}
- s.summary = %q{package is the best}
- s.files = [
- "lib/package.rb"
- ]
+ s.name = 'package'
+ s.authors = ['Tanuki Steve', 'Hal 9000']
+ s.author = 'Tanuki Steve'
+ s.version = '0.0.1'
+ s.date = '2011-09-29'
+ s.summary = 'package is the best'
+ s.files = ['lib/test_gem.rb']
+ s.require_paths = ['lib']
+
+ s.description = 'A test package for GitLab.'
+ s.email = 'tanuki@not_real.com'
+ s.homepage = 'https://gitlab.com/ruby-co/my-package'
+ s.license = 'MIT'
+
+ s.metadata = {
+ 'bug_tracker_uri' => 'https://gitlab.com/ruby-co/my-package/issues',
+ 'changelog_uri' => 'https://gitlab.com/ruby-co/my-package/CHANGELOG.md',
+ 'documentation_uri' => 'https://gitlab.com/ruby-co/my-package/docs',
+ 'mailing_list_uri' => 'https://gitlab.com/ruby-co/my-package/mailme',
+ 'source_code_uri' => 'https://gitlab.com/ruby-co/my-package'
+ }
+
+ s.bindir = 'bin'
+ s.executables = ['rake']
+ s.extensions = ['ext/foo.rb']
+ s.extra_rdoc_files = ['README.md', 'doc/userguide.md']
+ s.platform = Gem::Platform::RUBY
+ s.post_install_message = 'Installed, thank you!'
+ s.rdoc_options = ['--main', 'README.md']
s.required_ruby_version = '>= 2.7.0'
- s.rubygems_version = '>= 1.8.11'
- s.require_paths = ["lib"]
+ s.required_rubygems_version = '>= 1.8.11'
+ s.requirements = 'A high powered server or calculator'
+ s.rubygems_version = '1.8.09'
+
+ s.add_dependency 'dependency_1', '~> 1.2.3'
+ s.add_dependency 'dependency_2', '3.0.0'
+ s.add_dependency 'dependency_3', '>= 1.0.0'
+ s.add_dependency 'dependency_4'
end
diff --git a/spec/fixtures/security_reports/master/gl-sast-report.json b/spec/fixtures/security_reports/master/gl-sast-report.json
index ab610945508..9da9fdc3832 100644
--- a/spec/fixtures/security_reports/master/gl-sast-report.json
+++ b/spec/fixtures/security_reports/master/gl-sast-report.json
@@ -1,51 +1,8 @@
{
- "version": "1.2",
+ "version": "14.0.0",
"vulnerabilities": [
{
"category": "sast",
- "message": "Probable insecure usage of temp file/directory.",
- "cve": "python/hardcoded/hardcoded-tmp.py:52865813c884a507be1f152d654245af34aba8a391626d01f1ab6d3f52ec8779:B108",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-tmp.py",
- "start_line": 1,
- "end_line": 1
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B108",
- "value": "B108",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
- }
- ],
- "priority": "Medium",
- "file": "python/hardcoded/hardcoded-tmp.py",
- "line": 1,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
- "tool": "bandit",
- "tracking": {
- "type": "source",
- "items": [
- {
- "file": "python/hardcoded/hardcoded-tmp.py",
- "start_line": 1,
- "end_line": 1,
- "fingerprints": [
- { "algorithm": "hash", "value": "HASHVALUE" },
- { "algorithm": "scope_offset", "value": "python/hardcoded/hardcoded-tmp.py:ClassA:method_b:2" }
- ]
- }
- ]
- }
- },
- {
- "category": "sast",
"name": "Predictable pseudorandom number generator",
"message": "Predictable pseudorandom number generator",
"cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:47:PREDICTABLE_RANDOM",
@@ -69,20 +26,15 @@
"value": "PREDICTABLE_RANDOM",
"url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
}
- ],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 47,
- "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM",
- "tool": "find_sec_bugs"
+ ]
},
{
"category": "sast",
"name": "Predictable pseudorandom number generator",
"message": "Predictable pseudorandom number generator",
"cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:41:PREDICTABLE_RANDOM",
- "severity": "Medium",
- "confidence": "Medium",
+ "severity": "Low",
+ "confidence": "Low",
"scanner": {
"id": "find_sec_bugs",
"name": "Find Security Bugs"
@@ -101,153 +53,48 @@
"value": "PREDICTABLE_RANDOM",
"url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM"
}
- ],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 41,
- "url": "https://find-sec-bugs.github.io/bugs.htm#PREDICTABLE_RANDOM",
- "tool": "find_sec_bugs"
- },
- {
- "category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:cb203b465dffb0cb3a8e8bd8910b84b93b0a5995a938e4b903dbb0cd6ffa1254:B303",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 11,
- "end_line": 11
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 11,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:a7173c43ae66bd07466632d819d450e0071e02dbf782763640d1092981f9631b:B303",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 12,
- "end_line": 12
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 12,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:017017b77deb0b8369b6065947833eeea752a92ec8a700db590fece3e934cf0d:B303",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 13,
- "end_line": 13
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 13,
- "tool": "bandit"
+ ]
},
{
"category": "sast",
- "message": "Use of insecure MD2, MD4, or MD5 hash function.",
- "cve": "python/imports/imports-aliases.py:45fc8c53aea7b84f06bc4e590cc667678d6073c4c8a1d471177ca2146fb22db2:B303",
+ "name": "ECB mode is insecure",
+ "message": "ECB mode is insecure",
+ "description": "The cipher uses ECB mode, which provides poor confidentiality for encrypted data",
+ "cve": "ea0f905fc76f2739d5f10a1fd1e37a10:ECB_MODE:java-maven/src/main/java/com/gitlab/security_products/tests/App.java:29",
"severity": "Medium",
"confidence": "High",
"scanner": {
- "id": "bandit",
- "name": "Bandit"
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs"
},
"location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 14,
- "end_line": 14
+ "file": "java-maven/src/main/java/com/gitlab/security_products/tests/App.java",
+ "start_line": 29,
+ "end_line": 29,
+ "class": "com.gitlab.security_products.tests.App",
+ "method": "insecureCypher"
},
"identifiers": [
{
- "type": "bandit_test_id",
- "name": "Bandit Test ID B303",
- "value": "B303"
- }
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 14,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Pickle library appears to be in use, possible security issue.",
- "cve": "python/imports/imports-aliases.py:5f200d47291e7bbd8352db23019b85453ca048dd98ea0c291260fa7d009963a4:B301",
- "severity": "Medium",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 15,
- "end_line": 15
- },
- "identifiers": [
+ "type": "find_sec_bugs_type",
+ "name": "Find Security Bugs-ECB_MODE",
+ "value": "ECB_MODE",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
+ },
{
- "type": "bandit_test_id",
- "name": "Bandit Test ID B301",
- "value": "B301"
+ "type": "cwe",
+ "name": "CWE-327",
+ "value": "327",
+ "url": "https://cwe.mitre.org/data/definitions/327.html"
}
- ],
- "priority": "Medium",
- "file": "python/imports/imports-aliases.py",
- "line": 15,
- "tool": "bandit"
+ ]
},
{
"category": "sast",
- "name": "ECB mode is insecure",
- "message": "ECB mode is insecure",
- "cve": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:29:ECB_MODE",
+ "name": "Hard coded key",
+ "message": "Hard coded key",
+ "description": "Hard coded cryptographic key found",
+ "cve": "102ac67e0975ecec02a056008e0faad8:HARD_CODE_KEY:scala-sbt/src/main/scala/example/Main.scala:12",
"severity": "Medium",
"confidence": "High",
"scanner": {
@@ -255,25 +102,26 @@
"name": "Find Security Bugs"
},
"location": {
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "start_line": 29,
- "end_line": 29,
- "class": "com.gitlab.security_products.tests.App",
- "method": "insecureCypher"
+ "file": "scala-sbt/src/main/scala/example/Main.scala",
+ "start_line": 12,
+ "end_line": 12,
+ "class": "example.Main$",
+ "method": "getBytes"
},
"identifiers": [
{
"type": "find_sec_bugs_type",
- "name": "Find Security Bugs-ECB_MODE",
- "value": "ECB_MODE",
- "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE"
+ "name": "Find Security Bugs-HARD_CODE_KEY",
+ "value": "HARD_CODE_KEY",
+ "url": "https://find-sec-bugs.github.io/bugs.htm#HARD_CODE_KEY"
+ },
+ {
+ "type": "cwe",
+ "name": "CWE-321",
+ "value": "321",
+ "url": "https://cwe.mitre.org/data/definitions/321.html"
}
- ],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 29,
- "url": "https://find-sec-bugs.github.io/bugs.htm#ECB_MODE",
- "tool": "find_sec_bugs"
+ ]
},
{
"category": "sast",
@@ -301,693 +149,38 @@
"url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY"
}
],
- "priority": "Medium",
- "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
- "line": 29,
- "url": "https://find-sec-bugs.github.io/bugs.htm#CIPHER_INTEGRITY",
- "tool": "find_sec_bugs"
- },
- {
- "category": "sast",
- "message": "Probable insecure usage of temp file/directory.",
- "cve": "python/hardcoded/hardcoded-tmp.py:63dd4d626855555b816985d82c4614a790462a0a3ada89dc58eb97f9c50f3077:B108",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-tmp.py",
- "start_line": 14,
- "end_line": 14
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B108",
- "value": "B108",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
- }
- ],
- "priority": "Medium",
- "file": "python/hardcoded/hardcoded-tmp.py",
- "line": 14,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Probable insecure usage of temp file/directory.",
- "cve": "python/hardcoded/hardcoded-tmp.py:4ad6d4c40a8c263fc265f3384724014e0a4f8dd6200af83e51ff120420038031:B108",
- "severity": "Medium",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-tmp.py",
- "start_line": 10,
- "end_line": 10
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B108",
- "value": "B108",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html"
- }
- ],
- "priority": "Medium",
- "file": "python/hardcoded/hardcoded-tmp.py",
- "line": 10,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b108_hardcoded_tmp_directory.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with Popen module.",
- "cve": "python/imports/imports-aliases.py:2c3e1fa1e54c3c6646e8bcfaee2518153c6799b77587ff8d9a7b0631f6d34785:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 1,
- "end_line": 1
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 1,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with pickle module.",
- "cve": "python/imports/imports.py:af58d07f6ad519ef5287fcae65bf1a6999448a1a3a8bc1ac2a11daa80d0b96bf:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports.py",
- "start_line": 2,
- "end_line": 2
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports.py",
- "line": 2,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with subprocess module.",
- "cve": "python/imports/imports.py:8de9bc98029d212db530785a5f6780cfa663548746ff228ab8fa96c5bb82f089:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports.py",
- "start_line": 4,
- "end_line": 4
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports.py",
- "line": 4,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'blerg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:97c30f1d76d2a88913e3ce9ae74087874d740f87de8af697a9c455f01119f633:B106",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 22,
- "end_line": 22
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B106",
- "value": "B106",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 22,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b106_hardcoded_password_funcarg.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'root'",
- "cve": "python/hardcoded/hardcoded-passwords.py:7431c73a0bc16d94ece2a2e75ef38f302574d42c37ac0c3c38ad0b3bf8a59f10:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 5,
- "end_line": 5
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 5,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: ''",
- "cve": "python/hardcoded/hardcoded-passwords.py:d2d1857c27caedd49c57bfbcdc23afcc92bd66a22701fcdc632869aab4ca73ee:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 9,
- "end_line": 9
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 9,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'ajklawejrkl42348swfgkg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:fb3866215a61393a5c9c32a3b60e2058171a23219c353f722cbd3567acab21d2:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 13,
- "end_line": 13
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 13,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'blerg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:63c62a8b7e1e5224439bd26b28030585ac48741e28ca64561a6071080c560a5f:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 23,
- "end_line": 23
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 23,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Possible hardcoded password: 'blerg'",
- "cve": "python/hardcoded/hardcoded-passwords.py:4311b06d08df8fa58229b341c531da8e1a31ec4520597bdff920cd5c098d86f9:B105",
- "severity": "Low",
- "confidence": "Medium",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/hardcoded/hardcoded-passwords.py",
- "start_line": 24,
- "end_line": 24
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B105",
- "value": "B105",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html"
- }
- ],
- "priority": "Low",
- "file": "python/hardcoded/hardcoded-passwords.py",
- "line": 24,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b105_hardcoded_password_string.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with subprocess module.",
- "cve": "python/imports/imports-function.py:5858400c2f39047787702de44d03361ef8d954c9d14bd54ee1c2bef9e6a7df93:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-function.py",
- "start_line": 4,
- "end_line": 4
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-function.py",
- "line": 4,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with pickle module.",
- "cve": "python/imports/imports-function.py:dbda3cf4190279d30e0aad7dd137eca11272b0b225e8af4e8bf39682da67d956:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-function.py",
- "start_line": 2,
- "end_line": 2
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-function.py",
- "line": 2,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with Popen module.",
- "cve": "python/imports/imports-from.py:eb8a0db9cd1a8c1ab39a77e6025021b1261cc2a0b026b2f4a11fca4e0636d8dd:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-from.py",
- "start_line": 7,
- "end_line": 7
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-from.py",
- "line": 7,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "subprocess call with shell=True seems safe, but may be changed in the future, consider rewriting without shell",
- "cve": "python/imports/imports-aliases.py:f99f9721e27537fbcb6699a4cf39c6740d6234d2c6f06cfc2d9ea977313c483d:B602",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 9,
- "end_line": 9
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B602",
- "value": "B602",
- "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 9,
- "url": "https://docs.openstack.org/bandit/latest/plugins/b602_subprocess_popen_with_shell_equals_true.html",
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with subprocess module.",
- "cve": "python/imports/imports-from.py:332a12ab1146698f614a905ce6a6a5401497a12281aef200e80522711c69dcf4:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-from.py",
- "start_line": 6,
- "end_line": 6
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-from.py",
- "line": 6,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with Popen module.",
- "cve": "python/imports/imports-from.py:0a48de4a3d5348853a03666cb574697e3982998355e7a095a798bd02a5947276:B404",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-from.py",
- "start_line": 1,
- "end_line": 2
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B404",
- "value": "B404"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-from.py",
- "line": 1,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with pickle module.",
- "cve": "python/imports/imports-aliases.py:51b71661dff994bde3529639a727a678c8f5c4c96f00d300913f6d5be1bbdf26:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 7,
- "end_line": 8
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 7,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Consider possible security implications associated with loads module.",
- "cve": "python/imports/imports-aliases.py:6ff02aeb3149c01ab68484d794a94f58d5d3e3bb0d58557ef4153644ea68ea54:B403",
- "severity": "Low",
- "confidence": "High",
- "scanner": {
- "id": "bandit",
- "name": "Bandit"
- },
- "location": {
- "file": "python/imports/imports-aliases.py",
- "start_line": 6,
- "end_line": 6
- },
- "identifiers": [
- {
- "type": "bandit_test_id",
- "name": "Bandit Test ID B403",
- "value": "B403"
- }
- ],
- "priority": "Low",
- "file": "python/imports/imports-aliases.py",
- "line": 6,
- "tool": "bandit"
- },
- {
- "category": "sast",
- "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
- "cve": "c/subdir/utils.c:b466873101951fe96e1332f6728eb7010acbbd5dfc3b65d7d53571d091a06d9e:CWE-119!/CWE-120",
- "confidence": "Low",
- "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "c/subdir/utils.c",
- "start_line": 4
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - char",
- "value": "char"
- },
- {
- "type": "cwe",
- "name": "CWE-119",
- "value": "119",
- "url": "https://cwe.mitre.org/data/definitions/119.html"
- },
- {
- "type": "cwe",
- "name": "CWE-120",
- "value": "120",
- "url": "https://cwe.mitre.org/data/definitions/120.html"
- }
- ],
- "file": "c/subdir/utils.c",
- "line": 4,
- "url": "https://cwe.mitre.org/data/definitions/119.html",
- "tool": "flawfinder"
- },
- {
- "category": "sast",
- "message": "Check when opening files - can an attacker redirect it (via symlinks), force the opening of special file type (e.g., device files), move things around to create a race condition, control its ancestors, or change its contents? (CWE-362)",
- "cve": "c/subdir/utils.c:bab681140fcc8fc3085b6bba74081b44ea145c1c98b5e70cf19ace2417d30770:CWE-362",
- "confidence": "Low",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "c/subdir/utils.c",
- "start_line": 8
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - fopen",
- "value": "fopen"
- },
- {
- "type": "cwe",
- "name": "CWE-362",
- "value": "362",
- "url": "https://cwe.mitre.org/data/definitions/362.html"
- }
- ],
- "file": "c/subdir/utils.c",
- "line": 8,
- "url": "https://cwe.mitre.org/data/definitions/362.html",
- "tool": "flawfinder"
- },
- {
- "category": "sast",
- "message": "Statically-sized arrays can be improperly restricted, leading to potential overflows or other issues (CWE-119!/CWE-120)",
- "cve": "cplusplus/src/hello.cpp:c8c6dd0afdae6814194cf0930b719f757ab7b379cf8f261e7f4f9f2f323a818a:CWE-119!/CWE-120",
- "confidence": "Low",
- "solution": "Perform bounds checking, use functions that limit length, or ensure that the size is larger than the maximum possible length",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "cplusplus/src/hello.cpp",
- "start_line": 6
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - char",
- "value": "char"
- },
- {
- "type": "cwe",
- "name": "CWE-119",
- "value": "119",
- "url": "https://cwe.mitre.org/data/definitions/119.html"
- },
- {
- "type": "cwe",
- "name": "CWE-120",
- "value": "120",
- "url": "https://cwe.mitre.org/data/definitions/120.html"
- }
- ],
- "file": "cplusplus/src/hello.cpp",
- "line": 6,
- "url": "https://cwe.mitre.org/data/definitions/119.html",
- "tool": "flawfinder"
- },
- {
- "category": "sast",
- "message": "Does not check for buffer overflows when copying to destination [MS-banned] (CWE-120)",
- "cve": "cplusplus/src/hello.cpp:331c04062c4fe0c7c486f66f59e82ad146ab33cdd76ae757ca41f392d568cbd0:CWE-120",
- "confidence": "Low",
- "solution": "Consider using snprintf, strcpy_s, or strlcpy (warning: strncpy easily misused)",
- "scanner": {
- "id": "flawfinder",
- "name": "Flawfinder"
- },
- "location": {
- "file": "cplusplus/src/hello.cpp",
- "start_line": 7
- },
- "identifiers": [
- {
- "type": "flawfinder_func_name",
- "name": "Flawfinder - strcpy",
- "value": "strcpy"
- },
- {
- "type": "cwe",
- "name": "CWE-120",
- "value": "120",
- "url": "https://cwe.mitre.org/data/definitions/120.html"
- }
- ],
- "file": "cplusplus/src/hello.cpp",
- "line": 7,
- "url": "https://cwe.mitre.org/data/definitions/120.html",
- "tool": "flawfinder"
+ "tracking": {
+ "type": "source",
+ "items": [
+ {
+ "file": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy",
+ "start_line": 47,
+ "end_line": 47,
+ "signatures": [
+ {
+ "algorithm": "hash",
+ "value": "HASHVALUE"
+ },
+ {
+ "algorithm": "scope_offset",
+ "value": "groovy/src/main/java/com/gitlab/security_products/tests/App.groovy:App[0]:insecureCypher[0]:2"
+ }
+ ]
+ }
+ ]
+ }
}
],
"remediations": [],
"scan": {
"scanner": {
- "id": "gosec",
- "name": "Gosec",
- "url": "https://github.com/securego/gosec",
+ "id": "find_sec_bugs",
+ "name": "Find Security Bugs",
+ "url": "https://spotbugs.github.io",
"vendor": {
"name": "GitLab"
},
- "version": "2.3.0"
+ "version": "4.0.2"
},
"type": "sast",
"status": "success",
diff --git a/spec/fixtures/unsafe_javascript.xml b/spec/fixtures/unsafe_javascript.xml
new file mode 100644
index 00000000000..0c23d1e07db
--- /dev/null
+++ b/spec/fixtures/unsafe_javascript.xml
@@ -0,0 +1,10 @@
+<manifest>
+ <remote review="javascript://HelloTheGitlabSecurityTeam%0Aprompt(1)%0A" />
+
+ <project path="test1" name="manifest1" />
+ <project path="test2" name="manifest2" />
+ <project path="test3" name="manifest3" />
+ <project path="test4" name="manifest4" />
+ <project path="test5" name="manifest5" />
+ <project path="test6" name="manifest6" />
+</manifest> \ No newline at end of file
diff --git a/spec/frontend/__helpers__/experimentation_helper.js b/spec/frontend/__helpers__/experimentation_helper.js
index c08c25155e8..7a2ef61216a 100644
--- a/spec/frontend/__helpers__/experimentation_helper.js
+++ b/spec/frontend/__helpers__/experimentation_helper.js
@@ -12,3 +12,16 @@ export function withGonExperiment(experimentKey, value = true) {
window.gon = origGon;
});
}
+// This helper is for specs that use `gitlab-experiment` utilities, which have a different schema that gets pushed via Gon compared to `Experimentation Module`
+export function assignGitlabExperiment(experimentKey, variant) {
+ let origGon;
+
+ beforeEach(() => {
+ origGon = window.gon;
+ window.gon = { experiment: { [experimentKey]: { variant } } };
+ });
+
+ afterEach(() => {
+ window.gon = origGon;
+ });
+}
diff --git a/spec/frontend/__helpers__/mock_apollo_helper.js b/spec/frontend/__helpers__/mock_apollo_helper.js
index 914cce1d662..bd97a06071a 100644
--- a/spec/frontend/__helpers__/mock_apollo_helper.js
+++ b/spec/frontend/__helpers__/mock_apollo_helper.js
@@ -2,11 +2,15 @@ import { InMemoryCache } from 'apollo-cache-inmemory';
import { createMockClient } from 'mock-apollo-client';
import VueApollo from 'vue-apollo';
-export default (handlers = [], resolvers = {}) => {
- const fragmentMatcher = { match: () => true };
+const defaultCacheOptions = {
+ fragmentMatcher: { match: () => true },
+ addTypename: false,
+};
+
+export default (handlers = [], resolvers = {}, cacheOptions = {}) => {
const cache = new InMemoryCache({
- fragmentMatcher,
- addTypename: false,
+ ...defaultCacheOptions,
+ ...cacheOptions,
});
const mockClient = createMockClient({ cache, resolvers });
diff --git a/spec/frontend/__helpers__/vue_test_utils_helper.js b/spec/frontend/__helpers__/vue_test_utils_helper.js
index d6132ef84ac..a94cee84f74 100644
--- a/spec/frontend/__helpers__/vue_test_utils_helper.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper.js
@@ -1,4 +1,6 @@
-import { isArray } from 'lodash';
+import * as testingLibrary from '@testing-library/dom';
+import { createWrapper, WrapperArray, mount, shallowMount } from '@vue/test-utils';
+import { isArray, upperFirst } from 'lodash';
const vNodeContainsText = (vnode, text) =>
(vnode.text && vnode.text.includes(text)) ||
@@ -37,6 +39,17 @@ export const waitForMutation = (store, expectedMutationType) =>
});
export const extendedWrapper = (wrapper) => {
+ // https://testing-library.com/docs/queries/about
+ const AVAILABLE_QUERIES = [
+ 'byRole',
+ 'byLabelText',
+ 'byPlaceholderText',
+ 'byText',
+ 'byDisplayValue',
+ 'byAltText',
+ 'byTitle',
+ ];
+
if (isArray(wrapper) || !wrapper?.find) {
// eslint-disable-next-line no-console
console.warn(
@@ -56,5 +69,63 @@ export const extendedWrapper = (wrapper) => {
return this.findAll(`[data-testid="${id}"]`);
},
},
+ // `findBy`
+ ...AVAILABLE_QUERIES.reduce((accumulator, query) => {
+ return {
+ ...accumulator,
+ [`find${upperFirst(query)}`]: {
+ value(text, options = {}) {
+ const elements = testingLibrary[`queryAll${upperFirst(query)}`](
+ wrapper.element,
+ text,
+ options,
+ );
+
+ // Return VTU `ErrorWrapper` if element is not found
+ // https://github.com/vuejs/vue-test-utils/blob/dev/packages/test-utils/src/error-wrapper.js
+ // VTU does not expose `ErrorWrapper` so, as of now, this is the best way to
+ // create an `ErrorWrapper`
+ if (!elements.length) {
+ const emptyElement = document.createElement('div');
+
+ return createWrapper(emptyElement).find('testing-library-element-not-found');
+ }
+
+ return createWrapper(elements[0], this.options || {});
+ },
+ },
+ };
+ }, {}),
+ // `findAllBy`
+ ...AVAILABLE_QUERIES.reduce((accumulator, query) => {
+ return {
+ ...accumulator,
+ [`findAll${upperFirst(query)}`]: {
+ value(text, options = {}) {
+ const elements = testingLibrary[`queryAll${upperFirst(query)}`](
+ wrapper.element,
+ text,
+ options,
+ );
+
+ const wrappers = elements.map((element) => {
+ const elementWrapper = createWrapper(element, this.options || {});
+ elementWrapper.selector = text;
+
+ return elementWrapper;
+ });
+
+ const wrapperArray = new WrapperArray(wrappers);
+ wrapperArray.selector = text;
+
+ return wrapperArray;
+ },
+ },
+ };
+ }, {}),
});
};
+
+export const shallowMountExtended = (...args) => extendedWrapper(shallowMount(...args));
+
+export const mountExtended = (...args) => extendedWrapper(mount(...args));
diff --git a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
index d4f8e36c169..dfe5a483223 100644
--- a/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
+++ b/spec/frontend/__helpers__/vue_test_utils_helper_spec.js
@@ -1,7 +1,27 @@
-import { shallowMount } from '@vue/test-utils';
-import { extendedWrapper, shallowWrapperContainsSlotText } from './vue_test_utils_helper';
+import * as testingLibrary from '@testing-library/dom';
+import * as vtu from '@vue/test-utils';
+import {
+ shallowMount,
+ Wrapper as VTUWrapper,
+ WrapperArray as VTUWrapperArray,
+} from '@vue/test-utils';
+import {
+ extendedWrapper,
+ shallowMountExtended,
+ mountExtended,
+ shallowWrapperContainsSlotText,
+} from './vue_test_utils_helper';
+
+jest.mock('@testing-library/dom', () => ({
+ __esModule: true,
+ ...jest.requireActual('@testing-library/dom'),
+}));
describe('Vue test utils helpers', () => {
+ afterAll(() => {
+ jest.unmock('@testing-library/dom');
+ });
+
describe('shallowWrapperContainsSlotText', () => {
const mockText = 'text';
const mockSlot = `<div>${mockText}</div>`;
@@ -84,7 +104,7 @@ describe('Vue test utils helpers', () => {
);
});
- it('should find the component by test id', () => {
+ it('should find the element by test id', () => {
expect(mockComponent.findByTestId(testId).exists()).toBe(true);
});
});
@@ -105,5 +125,187 @@ describe('Vue test utils helpers', () => {
expect(mockComponent.findAllByTestId(testId)).toHaveLength(2);
});
});
+
+ describe.each`
+ findMethod | expectedQuery
+ ${'findByRole'} | ${'queryAllByRole'}
+ ${'findByLabelText'} | ${'queryAllByLabelText'}
+ ${'findByPlaceholderText'} | ${'queryAllByPlaceholderText'}
+ ${'findByText'} | ${'queryAllByText'}
+ ${'findByDisplayValue'} | ${'queryAllByDisplayValue'}
+ ${'findByAltText'} | ${'queryAllByAltText'}
+ `('$findMethod', ({ findMethod, expectedQuery }) => {
+ const text = 'foo bar';
+ const options = { selector: 'div' };
+ const mockDiv = document.createElement('div');
+
+ let wrapper;
+ beforeEach(() => {
+ wrapper = extendedWrapper(
+ shallowMount({
+ template: `<div>foo bar</div>`,
+ }),
+ );
+ });
+
+ it(`calls Testing Library \`${expectedQuery}\` function with correct parameters`, () => {
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => [mockDiv]);
+
+ wrapper[findMethod](text, options);
+
+ expect(testingLibrary[expectedQuery]).toHaveBeenLastCalledWith(
+ wrapper.element,
+ text,
+ options,
+ );
+ });
+
+ describe('when element is found', () => {
+ beforeEach(() => {
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => [mockDiv]);
+ jest.spyOn(vtu, 'createWrapper');
+ });
+
+ it('returns a VTU wrapper', () => {
+ const result = wrapper[findMethod](text, options);
+
+ expect(vtu.createWrapper).toHaveBeenCalledWith(mockDiv, wrapper.options);
+ expect(result).toBeInstanceOf(VTUWrapper);
+ });
+ });
+
+ describe('when multiple elements are found', () => {
+ beforeEach(() => {
+ const mockSpan = document.createElement('span');
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => [mockDiv, mockSpan]);
+ jest.spyOn(vtu, 'createWrapper');
+ });
+
+ it('returns the first element as a VTU wrapper', () => {
+ const result = wrapper[findMethod](text, options);
+
+ expect(vtu.createWrapper).toHaveBeenCalledWith(mockDiv, wrapper.options);
+ expect(result).toBeInstanceOf(VTUWrapper);
+ });
+ });
+
+ describe('when element is not found', () => {
+ beforeEach(() => {
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => []);
+ });
+
+ it('returns a VTU error wrapper', () => {
+ expect(wrapper[findMethod](text, options).exists()).toBe(false);
+ });
+ });
+ });
+
+ describe.each`
+ findMethod | expectedQuery
+ ${'findAllByRole'} | ${'queryAllByRole'}
+ ${'findAllByLabelText'} | ${'queryAllByLabelText'}
+ ${'findAllByPlaceholderText'} | ${'queryAllByPlaceholderText'}
+ ${'findAllByText'} | ${'queryAllByText'}
+ ${'findAllByDisplayValue'} | ${'queryAllByDisplayValue'}
+ ${'findAllByAltText'} | ${'queryAllByAltText'}
+ `('$findMethod', ({ findMethod, expectedQuery }) => {
+ const text = 'foo bar';
+ const options = { selector: 'div' };
+ const mockElements = [
+ document.createElement('li'),
+ document.createElement('li'),
+ document.createElement('li'),
+ ];
+
+ let wrapper;
+ beforeEach(() => {
+ wrapper = extendedWrapper(
+ shallowMount({
+ template: `
+ <ul>
+ <li>foo</li>
+ <li>bar</li>
+ <li>baz</li>
+ </ul>
+ `,
+ }),
+ );
+ });
+
+ it(`calls Testing Library \`${expectedQuery}\` function with correct parameters`, () => {
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => mockElements);
+
+ wrapper[findMethod](text, options);
+
+ expect(testingLibrary[expectedQuery]).toHaveBeenLastCalledWith(
+ wrapper.element,
+ text,
+ options,
+ );
+ });
+
+ describe('when elements are found', () => {
+ beforeEach(() => {
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => mockElements);
+ });
+
+ it('returns a VTU wrapper array', () => {
+ const result = wrapper[findMethod](text, options);
+
+ expect(result).toBeInstanceOf(VTUWrapperArray);
+ expect(
+ result.wrappers.every(
+ (resultWrapper) =>
+ resultWrapper instanceof VTUWrapper && resultWrapper.options === wrapper.options,
+ ),
+ ).toBe(true);
+ expect(result.length).toBe(3);
+ });
+ });
+
+ describe('when elements are not found', () => {
+ beforeEach(() => {
+ jest.spyOn(testingLibrary, expectedQuery).mockImplementation(() => []);
+ });
+
+ it('returns an empty VTU wrapper array', () => {
+ const result = wrapper[findMethod](text, options);
+
+ expect(result).toBeInstanceOf(VTUWrapperArray);
+ expect(result.length).toBe(0);
+ });
+ });
+ });
+ });
+
+ describe.each`
+ mountExtendedFunction | expectedMountFunction
+ ${shallowMountExtended} | ${'shallowMount'}
+ ${mountExtended} | ${'mount'}
+ `('$mountExtendedFunction', ({ mountExtendedFunction, expectedMountFunction }) => {
+ const FakeComponent = jest.fn();
+ const options = {
+ propsData: {
+ foo: 'bar',
+ },
+ };
+
+ beforeEach(() => {
+ const mockWrapper = { find: jest.fn() };
+ jest.spyOn(vtu, expectedMountFunction).mockImplementation(() => mockWrapper);
+ });
+
+ it(`calls \`${expectedMountFunction}\` with passed arguments`, () => {
+ mountExtendedFunction(FakeComponent, options);
+
+ expect(vtu[expectedMountFunction]).toHaveBeenCalledWith(FakeComponent, options);
+ });
+
+ it('returns extended wrapper', () => {
+ const result = mountExtendedFunction(FakeComponent, options);
+
+ expect(result).toHaveProperty('find');
+ expect(result).toHaveProperty('findByTestId');
+ });
});
});
diff --git a/spec/frontend/__helpers__/web_worker_fake.js b/spec/frontend/__helpers__/web_worker_fake.js
new file mode 100644
index 00000000000..041a9bd8540
--- /dev/null
+++ b/spec/frontend/__helpers__/web_worker_fake.js
@@ -0,0 +1,71 @@
+import path from 'path';
+
+const isRelative = (pathArg) => pathArg.startsWith('.');
+
+const transformRequirePath = (base, pathArg) => {
+ if (!isRelative(pathArg)) {
+ return pathArg;
+ }
+
+ return path.resolve(base, pathArg);
+};
+
+const createRelativeRequire = (filename) => {
+ const rel = path.relative(__dirname, path.dirname(filename));
+ const base = path.resolve(__dirname, rel);
+
+ // reason: Dynamic require should be fine here since the code is dynamically evaluated anyways.
+ // eslint-disable-next-line import/no-dynamic-require, global-require
+ return (pathArg) => require(transformRequirePath(base, pathArg));
+};
+
+/**
+ * Simulates a WebWorker module similar to the kind created by Webpack's [`worker-loader`][1]
+ *
+ * [1]: https://webpack.js.org/loaders/worker-loader/
+ */
+export class FakeWebWorker {
+ /**
+ * Constructs a new FakeWebWorker instance
+ *
+ * @param {String} filename is the full path of the code, which is used to resolve relative imports.
+ * @param {String} code is the raw code of the web worker, which is dynamically evaluated on construction.
+ */
+ constructor(filename, code) {
+ let isAlive = true;
+
+ const clientTarget = new EventTarget();
+ const workerTarget = new EventTarget();
+
+ this.addEventListener = (...args) => clientTarget.addEventListener(...args);
+ this.removeEventListener = (...args) => clientTarget.removeEventListener(...args);
+ this.postMessage = (message) => {
+ if (!isAlive) {
+ return;
+ }
+
+ workerTarget.dispatchEvent(new MessageEvent('message', { data: message }));
+ };
+ this.terminate = () => {
+ isAlive = false;
+ };
+
+ const workerScope = {
+ addEventListener: (...args) => workerTarget.addEventListener(...args),
+ removeEventListener: (...args) => workerTarget.removeEventListener(...args),
+ postMessage: (message) => {
+ if (!isAlive) {
+ return;
+ }
+
+ clientTarget.dispatchEvent(new MessageEvent('message', { data: message }));
+ },
+ };
+
+ // reason: `no-new-func` is like `eval` except it only executed on global scope and it's easy
+ // to pass in local references. `eval` is very unsafe in production, but in our test environment
+ // we shold be fine.
+ // eslint-disable-next-line no-new-func
+ Function('self', 'require', code)(workerScope, createRelativeRequire(filename));
+ }
+}
diff --git a/spec/frontend/__helpers__/web_worker_mock.js b/spec/frontend/__helpers__/web_worker_mock.js
deleted file mode 100644
index 2b4a391e1d2..00000000000
--- a/spec/frontend/__helpers__/web_worker_mock.js
+++ /dev/null
@@ -1,10 +0,0 @@
-/* eslint-disable class-methods-use-this */
-export default class WebWorkerMock {
- addEventListener() {}
-
- removeEventListener() {}
-
- terminate() {}
-
- postMessage() {}
-}
diff --git a/spec/frontend/__helpers__/web_worker_transformer.js b/spec/frontend/__helpers__/web_worker_transformer.js
new file mode 100644
index 00000000000..5b2f7d77947
--- /dev/null
+++ b/spec/frontend/__helpers__/web_worker_transformer.js
@@ -0,0 +1,18 @@
+/* eslint-disable import/no-commonjs */
+const babelJestTransformer = require('babel-jest');
+
+// This Jest will transform the code of a WebWorker module into a FakeWebWorker subclass.
+// This is meant to mirror Webpack's [`worker-loader`][1].
+// [1]: https://webpack.js.org/loaders/worker-loader/
+module.exports = {
+ process: (contentArg, filename, ...args) => {
+ const { code: content } = babelJestTransformer.process(contentArg, filename, ...args);
+
+ return `const { FakeWebWorker } = require("helpers/web_worker_fake");
+ module.exports = class JestTransformedWorker extends FakeWebWorker {
+ constructor() {
+ super(${JSON.stringify(filename)}, ${JSON.stringify(content)});
+ }
+ };`;
+ },
+};
diff --git a/spec/frontend/__mocks__/vue/index.js b/spec/frontend/__mocks__/vue/index.js
new file mode 100644
index 00000000000..52a5c6c5fcd
--- /dev/null
+++ b/spec/frontend/__mocks__/vue/index.js
@@ -0,0 +1,7 @@
+import Vue from 'vue';
+
+Vue.config.productionTip = false;
+Vue.config.devtools = false;
+
+export default Vue;
+export * from 'vue';
diff --git a/spec/frontend/access_tokens/index_spec.js b/spec/frontend/access_tokens/index_spec.js
index e3f17e21739..1d8ac7cec25 100644
--- a/spec/frontend/access_tokens/index_spec.js
+++ b/spec/frontend/access_tokens/index_spec.js
@@ -25,18 +25,22 @@ describe('access tokens', () => {
});
describe.each`
- initFunction | mountSelector | expectedComponent
- ${initExpiresAtField} | ${'js-access-tokens-expires-at'} | ${ExpiresAtField}
- ${initProjectsField} | ${'js-access-tokens-projects'} | ${ProjectsField}
- `('$initFunction', ({ initFunction, mountSelector, expectedComponent }) => {
+ initFunction | mountSelector | fieldName | expectedComponent
+ ${initExpiresAtField} | ${'js-access-tokens-expires-at'} | ${'expiresAt'} | ${ExpiresAtField}
+ ${initProjectsField} | ${'js-access-tokens-projects'} | ${'projects'} | ${ProjectsField}
+ `('$initFunction', ({ initFunction, mountSelector, fieldName, expectedComponent }) => {
describe('when mount element exists', () => {
+ const nameAttribute = `access_tokens[${fieldName}]`;
+ const idAttribute = `access_tokens_${fieldName}`;
+
beforeEach(() => {
const mountEl = document.createElement('div');
mountEl.classList.add(mountSelector);
const input = document.createElement('input');
- input.setAttribute('name', 'foo-bar');
- input.setAttribute('id', 'foo-bar');
+ input.setAttribute('name', nameAttribute);
+ input.setAttribute('data-js-name', fieldName);
+ input.setAttribute('id', idAttribute);
input.setAttribute('placeholder', 'Foo bar');
input.setAttribute('value', '1,2');
@@ -57,8 +61,8 @@ describe('access tokens', () => {
expect(component.exists()).toBe(true);
expect(component.props('inputAttrs')).toEqual({
- name: 'foo-bar',
- id: 'foo-bar',
+ name: nameAttribute,
+ id: idAttribute,
value: '1,2',
placeholder: 'Foo bar',
});
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
new file mode 100644
index 00000000000..ae9b6f57ee0
--- /dev/null
+++ b/spec/frontend/admin/signup_restrictions/components/signup_checkbox_spec.js
@@ -0,0 +1,66 @@
+import { GlFormCheckbox } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import SignupCheckbox from '~/pages/admin/application_settings/general/components/signup_checkbox.vue';
+
+describe('Signup Form', () => {
+ let wrapper;
+
+ const props = {
+ name: 'name',
+ helpText: 'some help text',
+ label: 'a label',
+ value: true,
+ dataQaSelector: 'qa_selector',
+ };
+
+ const mountComponent = () => {
+ wrapper = shallowMount(SignupCheckbox, {
+ propsData: props,
+ stubs: {
+ GlFormCheckbox,
+ },
+ });
+ };
+
+ const findByTestId = (id) => wrapper.find(`[data-testid="${id}"]`);
+ const findHiddenInput = () => findByTestId('input');
+ const findCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findCheckboxLabel = () => findByTestId('label');
+ const findHelpText = () => findByTestId('helpText');
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('Signup Checkbox', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ describe('hidden input element', () => {
+ it('gets passed correct values from props', () => {
+ expect(findHiddenInput().attributes('name')).toBe(props.name);
+
+ expect(findHiddenInput().attributes('value')).toBe('1');
+ });
+ });
+
+ describe('checkbox', () => {
+ it('gets passed correct checked value', () => {
+ expect(findCheckbox().attributes('checked')).toBe('true');
+ });
+
+ it('gets passed correct label', () => {
+ expect(findCheckboxLabel().text()).toBe(props.label);
+ });
+
+ it('gets passed correct help text', () => {
+ expect(findHelpText().text()).toBe(props.helpText);
+ });
+
+ it('gets passed data qa selector', () => {
+ expect(findCheckbox().attributes('data-qa-selector')).toBe(props.dataQaSelector);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
new file mode 100644
index 00000000000..18339164d5a
--- /dev/null
+++ b/spec/frontend/admin/signup_restrictions/components/signup_form_spec.js
@@ -0,0 +1,331 @@
+import { GlButton, GlModal } from '@gitlab/ui';
+import { within, fireEvent } from '@testing-library/dom';
+import { shallowMount, mount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import SignupForm from '~/pages/admin/application_settings/general/components/signup_form.vue';
+import { mockData } from '../mock_data';
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+describe('Signup Form', () => {
+ let wrapper;
+ let formSubmitSpy;
+
+ const mountComponent = ({ injectedProps = {}, mountFn = shallowMount, stubs = {} } = {}) => {
+ wrapper = extendedWrapper(
+ mountFn(SignupForm, {
+ provide: {
+ ...mockData,
+ ...injectedProps,
+ },
+ stubs,
+ }),
+ );
+ };
+
+ const queryByLabelText = (text) => within(wrapper.element).queryByLabelText(text);
+
+ const findForm = () => wrapper.findByTestId('form');
+ const findInputCsrf = () => findForm().find('[name="authenticity_token"]');
+ const findFormSubmitButton = () => findForm().find(GlButton);
+
+ const findDenyListRawRadio = () => queryByLabelText('Enter denylist manually');
+ const findDenyListFileRadio = () => queryByLabelText('Upload denylist file');
+
+ const findDenyListRawInputGroup = () => wrapper.findByTestId('domain-denylist-raw-input-group');
+ const findDenyListFileInputGroup = () => wrapper.findByTestId('domain-denylist-file-input-group');
+
+ const findRequireAdminApprovalCheckbox = () =>
+ wrapper.findByTestId('require-admin-approval-checkbox');
+ const findUserCapInput = () => wrapper.findByTestId('user-cap-input');
+ const findModal = () => wrapper.find(GlModal);
+
+ afterEach(() => {
+ wrapper.destroy();
+
+ formSubmitSpy = null;
+ });
+
+ describe('form data', () => {
+ beforeEach(() => {
+ mountComponent();
+ });
+
+ it.each`
+ prop | propValue | elementSelector | formElementPassedDataType | formElementKey | expected
+ ${'signupEnabled'} | ${mockData.signupEnabled} | ${'[name="application_setting[signup_enabled]"]'} | ${'prop'} | ${'value'} | ${mockData.signupEnabled}
+ ${'requireAdminApprovalAfterUserSignup'} | ${mockData.requireAdminApprovalAfterUserSignup} | ${'[name="application_setting[require_admin_approval_after_user_signup]"]'} | ${'prop'} | ${'value'} | ${mockData.requireAdminApprovalAfterUserSignup}
+ ${'sendUserConfirmationEmail'} | ${mockData.sendUserConfirmationEmail} | ${'[name="application_setting[send_user_confirmation_email]"]'} | ${'prop'} | ${'value'} | ${mockData.sendUserConfirmationEmail}
+ ${'newUserSignupsCap'} | ${mockData.newUserSignupsCap} | ${'[name="application_setting[new_user_signups_cap]"]'} | ${'attribute'} | ${'value'} | ${mockData.newUserSignupsCap}
+ ${'minimumPasswordLength'} | ${mockData.minimumPasswordLength} | ${'[name="application_setting[minimum_password_length]"]'} | ${'attribute'} | ${'value'} | ${mockData.minimumPasswordLength}
+ ${'minimumPasswordLengthMin'} | ${mockData.minimumPasswordLengthMin} | ${'[name="application_setting[minimum_password_length]"]'} | ${'attribute'} | ${'min'} | ${mockData.minimumPasswordLengthMin}
+ ${'minimumPasswordLengthMax'} | ${mockData.minimumPasswordLengthMax} | ${'[name="application_setting[minimum_password_length]"]'} | ${'attribute'} | ${'max'} | ${mockData.minimumPasswordLengthMax}
+ ${'domainAllowlistRaw'} | ${mockData.domainAllowlistRaw} | ${'[name="application_setting[domain_allowlist_raw]"]'} | ${'value'} | ${'value'} | ${mockData.domainAllowlistRaw}
+ ${'domainDenylistEnabled'} | ${mockData.domainDenylistEnabled} | ${'[name="application_setting[domain_denylist_enabled]"]'} | ${'prop'} | ${'value'} | ${mockData.domainDenylistEnabled}
+ ${'denylistTypeRawSelected'} | ${mockData.denylistTypeRawSelected} | ${'[name="denylist_type"]'} | ${'attribute'} | ${'checked'} | ${'raw'}
+ ${'domainDenylistRaw'} | ${mockData.domainDenylistRaw} | ${'[name="application_setting[domain_denylist_raw]"]'} | ${'value'} | ${'value'} | ${mockData.domainDenylistRaw}
+ ${'emailRestrictionsEnabled'} | ${mockData.emailRestrictionsEnabled} | ${'[name="application_setting[email_restrictions_enabled]"]'} | ${'prop'} | ${'value'} | ${mockData.emailRestrictionsEnabled}
+ ${'emailRestrictions'} | ${mockData.emailRestrictions} | ${'[name="application_setting[email_restrictions]"]'} | ${'value'} | ${'value'} | ${mockData.emailRestrictions}
+ ${'afterSignUpText'} | ${mockData.afterSignUpText} | ${'[name="application_setting[after_sign_up_text]"]'} | ${'value'} | ${'value'} | ${mockData.afterSignUpText}
+ `(
+ 'form element $elementSelector gets $expected value for $formElementKey $formElementPassedDataType when prop $prop is set to $propValue',
+ ({ elementSelector, expected, formElementKey, formElementPassedDataType }) => {
+ const formElement = wrapper.find(elementSelector);
+
+ switch (formElementPassedDataType) {
+ case 'attribute':
+ expect(formElement.attributes(formElementKey)).toBe(expected);
+ break;
+ case 'prop':
+ expect(formElement.props(formElementKey)).toBe(expected);
+ break;
+ case 'value':
+ expect(formElement.element.value).toBe(expected);
+ break;
+ default:
+ expect(formElement.props(formElementKey)).toBe(expected);
+ break;
+ }
+ },
+ );
+ it('gets passed the path for action attribute', () => {
+ expect(findForm().attributes('action')).toBe(mockData.settingsPath);
+ });
+
+ it('gets passed the csrf token as a hidden input value', () => {
+ expect(findInputCsrf().attributes('type')).toBe('hidden');
+
+ expect(findInputCsrf().attributes('value')).toBe('mock-csrf-token');
+ });
+ });
+
+ describe('domain deny list', () => {
+ describe('when it is set to raw from props', () => {
+ beforeEach(() => {
+ mountComponent({ mountFn: mount });
+ });
+
+ it('has raw list selected', () => {
+ expect(findDenyListRawRadio().checked).toBe(true);
+ });
+
+ it('has file not selected', () => {
+ expect(findDenyListFileRadio().checked).toBe(false);
+ });
+
+ it('raw list input is displayed', () => {
+ expect(findDenyListRawInputGroup().exists()).toBe(true);
+ });
+
+ it('file input is not displayed', () => {
+ expect(findDenyListFileInputGroup().exists()).toBe(false);
+ });
+
+ describe('when user clicks on file radio', () => {
+ beforeEach(() => {
+ fireEvent.click(findDenyListFileRadio());
+ });
+
+ it('has raw list not selected', () => {
+ expect(findDenyListRawRadio().checked).toBe(false);
+ });
+
+ it('has file selected', () => {
+ expect(findDenyListFileRadio().checked).toBe(true);
+ });
+
+ it('raw list input is not displayed', () => {
+ expect(findDenyListRawInputGroup().exists()).toBe(false);
+ });
+
+ it('file input is displayed', () => {
+ expect(findDenyListFileInputGroup().exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('when it is set to file from injected props', () => {
+ beforeEach(() => {
+ mountComponent({ mountFn: mount, injectedProps: { denylistTypeRawSelected: false } });
+ });
+
+ it('has raw list not selected', () => {
+ expect(findDenyListRawRadio().checked).toBe(false);
+ });
+
+ it('has file selected', () => {
+ expect(findDenyListFileRadio().checked).toBe(true);
+ });
+
+ it('raw list input is not displayed', () => {
+ expect(findDenyListRawInputGroup().exists()).toBe(false);
+ });
+
+ it('file input is displayed', () => {
+ expect(findDenyListFileInputGroup().exists()).toBe(true);
+ });
+
+ describe('when user clicks on raw list radio', () => {
+ beforeEach(() => {
+ fireEvent.click(findDenyListRawRadio());
+ });
+
+ it('has raw list selected', () => {
+ expect(findDenyListRawRadio().checked).toBe(true);
+ });
+
+ it('has file not selected', () => {
+ expect(findDenyListFileRadio().checked).toBe(false);
+ });
+
+ it('raw list input is displayed', () => {
+ expect(findDenyListRawInputGroup().exists()).toBe(true);
+ });
+
+ it('file input is not displayed', () => {
+ expect(findDenyListFileInputGroup().exists()).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('form submit button confirmation modal for side-effect of adding possibly unwanted new users', () => {
+ it.each`
+ requireAdminApprovalAction | userCapAction | buttonEffect
+ ${'unchanged from true'} | ${'unchanged'} | ${'submits form'}
+ ${'unchanged from false'} | ${'unchanged'} | ${'submits form'}
+ ${'toggled off'} | ${'unchanged'} | ${'shows confirmation modal'}
+ ${'toggled on'} | ${'unchanged'} | ${'submits form'}
+ ${'unchanged from false'} | ${'increased'} | ${'shows confirmation modal'}
+ ${'unchanged from true'} | ${'increased'} | ${'shows confirmation modal'}
+ ${'toggled off'} | ${'increased'} | ${'shows confirmation modal'}
+ ${'toggled on'} | ${'increased'} | ${'shows confirmation modal'}
+ ${'toggled on'} | ${'decreased'} | ${'submits form'}
+ ${'unchanged from false'} | ${'changed from limited to unlimited'} | ${'shows confirmation modal'}
+ ${'unchanged from false'} | ${'changed from unlimited to limited'} | ${'submits form'}
+ ${'unchanged from false'} | ${'unchanged from unlimited'} | ${'submits form'}
+ `(
+ '$buttonEffect if require admin approval for new sign-ups is $requireAdminApprovalAction and the user cap is $userCapAction',
+ async ({ requireAdminApprovalAction, userCapAction, buttonEffect }) => {
+ let isModalDisplayed;
+
+ switch (buttonEffect) {
+ case 'shows confirmation modal':
+ isModalDisplayed = true;
+ break;
+ case 'submits form':
+ isModalDisplayed = false;
+ break;
+ default:
+ isModalDisplayed = false;
+ break;
+ }
+
+ const isFormSubmittedWhenClickingFormSubmitButton = !isModalDisplayed;
+
+ const injectedProps = {};
+
+ const USER_CAP_DEFAULT = 5;
+
+ switch (userCapAction) {
+ case 'changed from unlimited to limited':
+ injectedProps.newUserSignupsCap = '';
+ break;
+ case 'unchanged from unlimited':
+ injectedProps.newUserSignupsCap = '';
+ break;
+ default:
+ injectedProps.newUserSignupsCap = USER_CAP_DEFAULT;
+ break;
+ }
+
+ switch (requireAdminApprovalAction) {
+ case 'unchanged from true':
+ injectedProps.requireAdminApprovalAfterUserSignup = true;
+ break;
+ case 'unchanged from false':
+ injectedProps.requireAdminApprovalAfterUserSignup = false;
+ break;
+ case 'toggled off':
+ injectedProps.requireAdminApprovalAfterUserSignup = true;
+ break;
+ case 'toggled on':
+ injectedProps.requireAdminApprovalAfterUserSignup = false;
+ break;
+ default:
+ injectedProps.requireAdminApprovalAfterUserSignup = false;
+ break;
+ }
+
+ formSubmitSpy = jest.spyOn(HTMLFormElement.prototype, 'submit').mockImplementation();
+
+ await mountComponent({
+ injectedProps,
+ stubs: { GlButton, GlModal: stubComponent(GlModal) },
+ });
+
+ findModal().vm.show = jest.fn();
+
+ if (
+ requireAdminApprovalAction === 'toggled off' ||
+ requireAdminApprovalAction === 'toggled on'
+ ) {
+ await findRequireAdminApprovalCheckbox().vm.$emit('input', false);
+ }
+
+ switch (userCapAction) {
+ case 'increased':
+ await findUserCapInput().vm.$emit('input', USER_CAP_DEFAULT + 1);
+ break;
+ case 'decreased':
+ await findUserCapInput().vm.$emit('input', USER_CAP_DEFAULT - 1);
+ break;
+ case 'changed from limited to unlimited':
+ await findUserCapInput().vm.$emit('input', '');
+ break;
+ case 'changed from unlimited to limited':
+ await findUserCapInput().vm.$emit('input', USER_CAP_DEFAULT);
+ break;
+ default:
+ break;
+ }
+
+ await findFormSubmitButton().trigger('click');
+
+ if (isFormSubmittedWhenClickingFormSubmitButton) {
+ expect(formSubmitSpy).toHaveBeenCalled();
+ expect(findModal().vm.show).not.toHaveBeenCalled();
+ } else {
+ expect(formSubmitSpy).not.toHaveBeenCalled();
+ expect(findModal().vm.show).toHaveBeenCalled();
+ }
+ },
+ );
+
+ describe('modal actions', () => {
+ beforeEach(async () => {
+ const INITIAL_USER_CAP = 5;
+
+ await mountComponent({
+ injectedProps: {
+ newUserSignupsCap: INITIAL_USER_CAP,
+ },
+ stubs: { GlButton, GlModal: stubComponent(GlModal) },
+ });
+
+ await findUserCapInput().vm.$emit('input', INITIAL_USER_CAP + 1);
+
+ await findFormSubmitButton().trigger('click');
+ });
+
+ it('submits the form after clicking approve users button', async () => {
+ formSubmitSpy = jest.spyOn(HTMLFormElement.prototype, 'submit').mockImplementation();
+
+ await findModal().vm.$emit('primary');
+
+ expect(formSubmitSpy).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/admin/signup_restrictions/mock_data.js b/spec/frontend/admin/signup_restrictions/mock_data.js
new file mode 100644
index 00000000000..624a5614c9c
--- /dev/null
+++ b/spec/frontend/admin/signup_restrictions/mock_data.js
@@ -0,0 +1,41 @@
+export const rawMockData = {
+ host: 'path/to/host',
+ settingsPath: 'path/to/settings',
+ signupEnabled: 'true',
+ requireAdminApprovalAfterUserSignup: 'true',
+ sendUserConfirmationEmail: 'true',
+ minimumPasswordLength: '8',
+ minimumPasswordLengthMin: '3',
+ minimumPasswordLengthMax: '10',
+ minimumPasswordLengthHelpLink: 'help/link',
+ domainAllowlistRaw: 'domain1.com, domain2.com',
+ newUserSignupsCap: '8',
+ domainDenylistEnabled: 'true',
+ denylistTypeRawSelected: 'true',
+ domainDenylistRaw: 'domain2.com, domain3.com',
+ emailRestrictionsEnabled: 'true',
+ supportedSyntaxLinkUrl: '/supported/syntax/link',
+ emailRestrictions: 'user1@domain.com, user2@domain.com',
+ afterSignUpText: 'Congratulations on your successful sign-up!',
+};
+
+export const mockData = {
+ host: 'path/to/host',
+ settingsPath: 'path/to/settings',
+ signupEnabled: true,
+ requireAdminApprovalAfterUserSignup: true,
+ sendUserConfirmationEmail: true,
+ minimumPasswordLength: '8',
+ minimumPasswordLengthMin: '3',
+ minimumPasswordLengthMax: '10',
+ minimumPasswordLengthHelpLink: 'help/link',
+ domainAllowlistRaw: 'domain1.com, domain2.com',
+ newUserSignupsCap: '8',
+ domainDenylistEnabled: true,
+ denylistTypeRawSelected: true,
+ domainDenylistRaw: 'domain2.com, domain3.com',
+ emailRestrictionsEnabled: true,
+ supportedSyntaxLinkUrl: '/supported/syntax/link',
+ emailRestrictions: 'user1@domain.com, user2@domain.com',
+ afterSignUpText: 'Congratulations on your successful sign-up!',
+};
diff --git a/spec/frontend/admin/signup_restrictions/utils.js b/spec/frontend/admin/signup_restrictions/utils.js
new file mode 100644
index 00000000000..30a95467e09
--- /dev/null
+++ b/spec/frontend/admin/signup_restrictions/utils.js
@@ -0,0 +1,19 @@
+export const setDataAttributes = (data, element) => {
+ Object.keys(data).forEach((key) => {
+ const value = data[key];
+
+ // attribute should be:
+ // - valueless if value is 'true'
+ // - absent if value is 'false'
+ switch (value) {
+ case false:
+ break;
+ case true:
+ element.dataset[`${key}`] = '';
+ break;
+ default:
+ element.dataset[`${key}`] = value;
+ break;
+ }
+ });
+};
diff --git a/spec/frontend/admin/signup_restrictions/utils_spec.js b/spec/frontend/admin/signup_restrictions/utils_spec.js
new file mode 100644
index 00000000000..fd5c4c3317b
--- /dev/null
+++ b/spec/frontend/admin/signup_restrictions/utils_spec.js
@@ -0,0 +1,22 @@
+import { getParsedDataset } from '~/pages/admin/application_settings/utils';
+import { rawMockData, mockData } from './mock_data';
+
+describe('utils', () => {
+ describe('getParsedDataset', () => {
+ it('returns correct results', () => {
+ expect(
+ getParsedDataset({
+ dataset: rawMockData,
+ booleanAttributes: [
+ 'signupEnabled',
+ 'requireAdminApprovalAfterUserSignup',
+ 'sendUserConfirmationEmail',
+ 'domainDenylistEnabled',
+ 'denylistTypeRawSelected',
+ 'emailRestrictionsEnabled',
+ ],
+ }),
+ ).toEqual(mockData);
+ });
+ });
+});
diff --git a/spec/frontend/admin/users/components/user_date_spec.js b/spec/frontend/admin/users/components/user_date_spec.js
index 6428b10059b..1a2f2938db5 100644
--- a/spec/frontend/admin/users/components/user_date_spec.js
+++ b/spec/frontend/admin/users/components/user_date_spec.js
@@ -1,6 +1,6 @@
import { shallowMount } from '@vue/test-utils';
-import UserDate from '~/admin/users/components/user_date.vue';
+import UserDate from '~/vue_shared/components/user_date.vue';
import { users } from '../mock_data';
const mockDate = users[0].createdAt;
diff --git a/spec/frontend/admin/users/components/users_table_spec.js b/spec/frontend/admin/users/components/users_table_spec.js
index f1fcc20fb65..424b0deebd3 100644
--- a/spec/frontend/admin/users/components/users_table_spec.js
+++ b/spec/frontend/admin/users/components/users_table_spec.js
@@ -3,8 +3,8 @@ import { mount } from '@vue/test-utils';
import AdminUserActions from '~/admin/users/components/user_actions.vue';
import AdminUserAvatar from '~/admin/users/components/user_avatar.vue';
-import AdminUserDate from '~/admin/users/components/user_date.vue';
import AdminUsersTable from '~/admin/users/components/users_table.vue';
+import AdminUserDate from '~/vue_shared/components/user_date.vue';
import { users, paths } from '../mock_data';
diff --git a/spec/frontend/admin/users/new_spec.js b/spec/frontend/admin/users/new_spec.js
new file mode 100644
index 00000000000..692c583dca8
--- /dev/null
+++ b/spec/frontend/admin/users/new_spec.js
@@ -0,0 +1,76 @@
+import {
+ setupInternalUserRegexHandler,
+ ID_USER_EMAIL,
+ ID_USER_EXTERNAL,
+ ID_WARNING,
+} from '~/admin/users/new';
+
+describe('admin/users/new', () => {
+ const FIXTURE = 'admin/users/new_with_internal_user_regex.html';
+
+ let elExternal;
+ let elUserEmail;
+ let elWarningMessage;
+
+ beforeEach(() => {
+ loadFixtures(FIXTURE);
+ setupInternalUserRegexHandler();
+
+ elExternal = document.getElementById(ID_USER_EXTERNAL);
+ elUserEmail = document.getElementById(ID_USER_EMAIL);
+ elWarningMessage = document.getElementById(ID_WARNING);
+
+ elExternal.checked = true;
+ });
+
+ const changeEmail = (val) => {
+ elUserEmail.value = val;
+ elUserEmail.dispatchEvent(new Event('input'));
+ };
+
+ const hasHiddenWarning = () => elWarningMessage.classList.contains('hidden');
+
+ describe('Behaviour of userExternal checkbox', () => {
+ it('hides warning by default', () => {
+ expect(hasHiddenWarning()).toBe(true);
+ });
+
+ describe('when matches email as internal', () => {
+ beforeEach(() => {
+ changeEmail('test@');
+ });
+
+ it('has external unchecked', () => {
+ expect(elExternal.checked).toBe(false);
+ });
+
+ it('shows warning', () => {
+ expect(hasHiddenWarning()).toBe(false);
+ });
+
+ describe('when external is checked again', () => {
+ beforeEach(() => {
+ elExternal.dispatchEvent(new Event('change'));
+ });
+
+ it('hides warning', () => {
+ expect(hasHiddenWarning()).toBe(true);
+ });
+ });
+ });
+
+ describe('when matches emails as external', () => {
+ beforeEach(() => {
+ changeEmail('test.ext@');
+ });
+
+ it('has external checked', () => {
+ expect(elExternal.checked).toBe(true);
+ });
+
+ it('hides warning', () => {
+ expect(hasHiddenWarning()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap b/spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap
deleted file mode 100644
index 1f8429af7dd..00000000000
--- a/spec/frontend/alerts_settings/components/__snapshots__/alerts_settings_form_spec.js.snap
+++ /dev/null
@@ -1,524 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`AlertsSettingsForm with default values renders the initial template 1`] = `
-<form
- class="gl-mt-6"
->
- <div
- class="tabs gl-tabs"
- id="__BVID__6"
- >
- <!---->
- <div
- class=""
- >
- <ul
- class="nav gl-tabs-nav"
- id="__BVID__6__BV_tab_controls_"
- role="tablist"
- >
- <!---->
- <li
- class="nav-item"
- role="presentation"
- >
- <a
- aria-controls="__BVID__8"
- aria-posinset="1"
- aria-selected="true"
- aria-setsize="3"
- class="nav-link active gl-tab-nav-item gl-tab-nav-item-active gl-tab-nav-item-active-indigo"
- href="#"
- id="__BVID__8___BV_tab_button__"
- role="tab"
- target="_self"
- >
- Configure details
- </a>
- </li>
- <li
- class="nav-item"
- role="presentation"
- >
- <a
- aria-controls="__BVID__19"
- aria-disabled="true"
- aria-posinset="2"
- aria-selected="false"
- aria-setsize="3"
- class="nav-link disabled disabled gl-tab-nav-item"
- href="#"
- id="__BVID__19___BV_tab_button__"
- role="tab"
- tabindex="-1"
- target="_self"
- >
- View credentials
- </a>
- </li>
- <li
- class="nav-item"
- role="presentation"
- >
- <a
- aria-controls="__BVID__41"
- aria-disabled="true"
- aria-posinset="3"
- aria-selected="false"
- aria-setsize="3"
- class="nav-link disabled disabled gl-tab-nav-item"
- href="#"
- id="__BVID__41___BV_tab_button__"
- role="tab"
- tabindex="-1"
- target="_self"
- >
- Send test alert
- </a>
- </li>
- <!---->
- </ul>
- </div>
- <div
- class="tab-content gl-tab-content"
- id="__BVID__6__BV_tab_container_"
- >
- <transition-stub
- css="true"
- enteractiveclass=""
- enterclass=""
- entertoclass="show"
- leaveactiveclass=""
- leaveclass="show"
- leavetoclass=""
- mode="out-in"
- name=""
- >
- <div
- aria-hidden="false"
- aria-labelledby="__BVID__8___BV_tab_button__"
- class="tab-pane active"
- id="__BVID__8"
- role="tabpanel"
- style=""
- >
- <div
- class="form-group gl-form-group"
- id="integration-type"
- role="group"
- >
- <label
- class="d-block col-form-label"
- for="integration-type"
- id="integration-type__BV_label_"
- >
- 1.Select integration type
- </label>
- <div
- class="bv-no-focus-ring"
- >
- <select
- class="gl-form-select gl-max-w-full custom-select"
- id="__BVID__13"
- >
- <option
- value=""
- >
- Select integration type
- </option>
- <option
- value="HTTP"
- >
- HTTP Endpoint
- </option>
- <option
- value="PROMETHEUS"
- >
- External Prometheus
- </option>
- </select>
-
- <!---->
- <!---->
- <!---->
- <!---->
- </div>
- </div>
-
- <div
- class="gl-mt-3"
- >
- <!---->
-
- <label
- class="gl-display-flex gl-flex-direction-column gl-mb-0 gl-w-max-content gl-my-4 gl-font-weight-normal"
- >
- <span
- class="gl-toggle-wrapper"
- >
- <span
- class="gl-toggle-label"
- data-testid="toggle-label"
- >
- Active
- </span>
-
- <!---->
-
- <button
- aria-label="Active"
- class="gl-toggle"
- role="switch"
- type="button"
- >
- <span
- class="toggle-icon"
- >
- <svg
- aria-hidden="true"
- class="gl-icon s16"
- data-testid="close-icon"
- >
- <use
- href="#close"
- />
- </svg>
- </span>
- </button>
- </span>
-
- <!---->
- </label>
-
- <!---->
-
- <!---->
- </div>
-
- <div
- class="gl-display-flex gl-justify-content-start gl-py-3"
- >
- <button
- class="btn js-no-auto-disable btn-confirm btn-md gl-button"
- data-testid="integration-form-submit"
- type="submit"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
-
- Save integration
-
- </span>
- </button>
-
- <button
- class="btn gl-ml-3 js-no-auto-disable btn-default btn-md gl-button"
- type="reset"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
- Cancel and close
- </span>
- </button>
- </div>
- </div>
- </transition-stub>
-
- <transition-stub
- css="true"
- enteractiveclass=""
- enterclass=""
- entertoclass="show"
- leaveactiveclass=""
- leaveclass="show"
- leavetoclass=""
- mode="out-in"
- name=""
- >
- <div
- aria-hidden="true"
- aria-labelledby="__BVID__19___BV_tab_button__"
- class="tab-pane disabled"
- id="__BVID__19"
- role="tabpanel"
- style="display: none;"
- >
- <span>
- Utilize the URL and authorization key below to authorize an external service to send alerts to GitLab. Review your external service's documentation to learn where to add these details, and the
- <a
- class="gl-link gl-display-inline-block"
- href="https://docs.gitlab.com/ee/operations/incident_management/alert_integrations.html"
- rel="noopener noreferrer"
- target="_blank"
- >
- GitLab documentation
- </a>
- to learn more about configuring your endpoint.
- </span>
-
- <fieldset
- class="form-group gl-form-group"
- id="integration-webhook"
- >
- <!---->
- <div
- class="bv-no-focus-ring"
- role="group"
- tabindex="-1"
- >
- <div
- class="gl-my-4"
- >
- <span
- class="gl-font-weight-bold"
- >
-
- Webhook URL
-
- </span>
-
- <div
- id="url"
- readonly="readonly"
- >
- <div
- class="input-group"
- role="group"
- >
- <!---->
- <!---->
-
- <input
- class="gl-form-input form-control"
- id="url"
- readonly="readonly"
- type="text"
- />
-
- <div
- class="input-group-append"
- >
- <button
- aria-label="Copy this value"
- class="btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon"
- data-clipboard-text=""
- title="Copy"
- type="button"
- >
- <!---->
-
- <svg
- aria-hidden="true"
- class="gl-button-icon gl-icon s16"
- data-testid="copy-to-clipboard-icon"
- >
- <use
- href="#copy-to-clipboard"
- />
- </svg>
-
- <!---->
- </button>
- </div>
- <!---->
- </div>
- </div>
- </div>
-
- <div
- class="gl-my-4"
- >
- <span
- class="gl-font-weight-bold"
- >
-
- Authorization key
-
- </span>
-
- <div
- class="gl-mb-3"
- id="authorization-key"
- readonly="readonly"
- >
- <div
- class="input-group"
- role="group"
- >
- <!---->
- <!---->
-
- <input
- class="gl-form-input form-control"
- id="authorization-key"
- readonly="readonly"
- type="text"
- />
-
- <div
- class="input-group-append"
- >
- <button
- aria-label="Copy this value"
- class="btn gl-m-0! btn-default btn-md gl-button btn-default-secondary btn-icon"
- data-clipboard-text=""
- title="Copy"
- type="button"
- >
- <!---->
-
- <svg
- aria-hidden="true"
- class="gl-button-icon gl-icon s16"
- data-testid="copy-to-clipboard-icon"
- >
- <use
- href="#copy-to-clipboard"
- />
- </svg>
-
- <!---->
- </button>
- </div>
- <!---->
- </div>
- </div>
- </div>
- <!---->
- <!---->
- <!---->
- </div>
- </fieldset>
-
- <button
- class="btn btn-danger btn-md disabled gl-button"
- disabled="disabled"
- type="button"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
-
- Reset Key
-
- </span>
- </button>
-
- <button
- class="btn gl-ml-3 js-no-auto-disable btn-default btn-md gl-button"
- type="reset"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
- Cancel and close
- </span>
- </button>
-
- <!---->
- </div>
- </transition-stub>
-
- <transition-stub
- css="true"
- enteractiveclass=""
- enterclass=""
- entertoclass="show"
- leaveactiveclass=""
- leaveclass="show"
- leavetoclass=""
- mode="out-in"
- name=""
- >
- <div
- aria-hidden="true"
- aria-labelledby="__BVID__41___BV_tab_button__"
- class="tab-pane disabled"
- id="__BVID__41"
- role="tabpanel"
- style="display: none;"
- >
- <fieldset
- class="form-group gl-form-group"
- id="test-integration"
- >
- <!---->
- <div
- class="bv-no-focus-ring"
- role="group"
- tabindex="-1"
- >
- <span>
- Provide an example payload from the monitoring tool you intend to integrate with. This will allow you to send an alert to an active GitLab alerting point.
- </span>
-
- <textarea
- class="gl-form-input gl-form-textarea gl-my-3 form-control is-valid"
- id="test-payload"
- placeholder="{ \\"events\\": [{ \\"application\\": \\"Name of application\\" }] }"
- style="resize: none; overflow-y: scroll;"
- wrap="soft"
- />
- <!---->
- <!---->
- <!---->
- </div>
- </fieldset>
-
- <button
- class="btn js-no-auto-disable btn-confirm btn-md gl-button"
- data-testid="send-test-alert"
- type="button"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
-
- Send
-
- </span>
- </button>
-
- <button
- class="btn gl-ml-3 js-no-auto-disable btn-default btn-md gl-button"
- type="reset"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
- Cancel and close
- </span>
- </button>
- </div>
- </transition-stub>
- <!---->
- </div>
- </div>
-</form>
-`;
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
index d2dcff14432..9912ac433a5 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_form_spec.js
@@ -1,5 +1,7 @@
import { GlForm, GlFormSelect, GlFormInput, GlToggle, GlFormTextarea, GlTab } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import MappingBuilder from '~/alerts_settings/components/alert_mapping_builder.vue';
import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_form.vue';
@@ -8,48 +10,52 @@ import alertFields from '../mocks/alert_fields.json';
import parsedMapping from '../mocks/parsed_mapping.json';
import { defaultAlertSettingsConfig } from './util';
+const scrollIntoViewMock = jest.fn();
+HTMLElement.prototype.scrollIntoView = scrollIntoViewMock;
+
describe('AlertsSettingsForm', () => {
let wrapper;
const mockToastShow = jest.fn();
const createComponent = ({ data = {}, props = {}, multiIntegrations = true } = {}) => {
- wrapper = mount(AlertsSettingsForm, {
- data() {
- return { ...data };
- },
- propsData: {
- loading: false,
- canAddIntegration: true,
- ...props,
- },
- provide: {
- ...defaultAlertSettingsConfig,
- multiIntegrations,
- },
- mocks: {
- $apollo: {
- query: jest.fn(),
+ wrapper = extendedWrapper(
+ mount(AlertsSettingsForm, {
+ data() {
+ return { ...data };
},
- $toast: {
- show: mockToastShow,
+ propsData: {
+ loading: false,
+ canAddIntegration: true,
+ ...props,
},
- },
- });
+ provide: {
+ ...defaultAlertSettingsConfig,
+ multiIntegrations,
+ },
+ mocks: {
+ $apollo: {
+ query: jest.fn(),
+ },
+ $toast: {
+ show: mockToastShow,
+ },
+ },
+ }),
+ );
};
const findForm = () => wrapper.findComponent(GlForm);
const findSelect = () => wrapper.findComponent(GlFormSelect);
const findFormFields = () => wrapper.findAllComponents(GlFormInput);
const findFormToggle = () => wrapper.findComponent(GlToggle);
- const findSamplePayloadSection = () => wrapper.find('[data-testid="sample-payload-section"]');
- const findMappingBuilderSection = () => wrapper.find(`[id = "mapping-builder"]`);
+ const findSamplePayloadSection = () => wrapper.findByTestId('sample-payload-section');
const findMappingBuilder = () => wrapper.findComponent(MappingBuilder);
- const findSubmitButton = () => wrapper.find(`[type = "submit"]`);
- const findMultiSupportText = () =>
- wrapper.find(`[data-testid="multi-integrations-not-supported"]`);
- const findJsonTestSubmit = () => wrapper.find(`[data-testid="send-test-alert"]`);
+
+ const findSubmitButton = () => wrapper.findByTestId('integration-form-submit');
+ const findMultiSupportText = () => wrapper.findByTestId('multi-integrations-not-supported');
+ const findJsonTestSubmit = () => wrapper.findByTestId('send-test-alert');
const findJsonTextArea = () => wrapper.find(`[id = "test-payload"]`);
- const findActionBtn = () => wrapper.find(`[data-testid="payload-action-btn"]`);
+ const findActionBtn = () => wrapper.findByTestId('payload-action-btn');
const findTabs = () => wrapper.findAllComponents(GlTab);
afterEach(() => {
@@ -74,10 +80,6 @@ describe('AlertsSettingsForm', () => {
createComponent();
});
- it('renders the initial template', () => {
- expect(wrapper.element).toMatchSnapshot();
- });
-
it('render the initial form with only an integration type dropdown', () => {
expect(findForm().exists()).toBe(true);
expect(findSelect().exists()).toBe(true);
@@ -151,29 +153,28 @@ describe('AlertsSettingsForm', () => {
findMappingBuilder().vm.$emit('onMappingUpdate', sampleMapping);
findForm().trigger('submit');
- expect(wrapper.emitted('create-new-integration')[0]).toEqual([
- {
- type: typeSet.http,
- variables: {
- name: integrationName,
- active: true,
- payloadAttributeMappings: sampleMapping,
- payloadExample: '{}',
- },
+ expect(wrapper.emitted('create-new-integration')[0][0]).toMatchObject({
+ type: typeSet.http,
+ variables: {
+ name: integrationName,
+ active: true,
+ payloadAttributeMappings: sampleMapping,
+ payloadExample: '{}',
},
- ]);
+ });
});
it('update', () => {
createComponent({
data: {
- selectedIntegration: typeSet.http,
- currentIntegration: { id: '1', name: 'Test integration pre' },
+ integrationForm: { id: '1', name: 'Test integration pre', type: typeSet.http },
+ currentIntegration: { id: '1' },
},
props: {
loading: false,
},
});
+
const updatedIntegrationName = 'Test integration post';
enableIntegration(0, updatedIntegrationName);
@@ -181,21 +182,16 @@ describe('AlertsSettingsForm', () => {
expect(submitBtn.exists()).toBe(true);
expect(submitBtn.text()).toBe('Save integration');
- findForm().trigger('submit');
-
- expect(wrapper.emitted('update-integration')[0]).toEqual(
- expect.arrayContaining([
- {
- type: typeSet.http,
- variables: {
- name: updatedIntegrationName,
- active: true,
- payloadAttributeMappings: [],
- payloadExample: '{}',
- },
- },
- ]),
- );
+ submitBtn.trigger('click');
+ expect(wrapper.emitted('update-integration')[0][0]).toMatchObject({
+ type: typeSet.http,
+ variables: {
+ name: updatedIntegrationName,
+ active: true,
+ payloadAttributeMappings: [],
+ payloadExample: '{}',
+ },
+ });
});
});
@@ -211,16 +207,17 @@ describe('AlertsSettingsForm', () => {
findForm().trigger('submit');
- expect(wrapper.emitted('create-new-integration')[0]).toEqual([
- { type: typeSet.prometheus, variables: { apiUrl, active: true } },
- ]);
+ expect(wrapper.emitted('create-new-integration')[0][0]).toMatchObject({
+ type: typeSet.prometheus,
+ variables: { apiUrl, active: true },
+ });
});
it('update', () => {
createComponent({
data: {
- selectedIntegration: typeSet.prometheus,
- currentIntegration: { id: '1', apiUrl: 'https://test-pre.com' },
+ integrationForm: { id: '1', apiUrl: 'https://test-pre.com', type: typeSet.prometheus },
+ currentIntegration: { id: '1' },
},
props: {
loading: false,
@@ -236,9 +233,10 @@ describe('AlertsSettingsForm', () => {
findForm().trigger('submit');
- expect(wrapper.emitted('update-integration')[0]).toEqual([
- { type: typeSet.prometheus, variables: { apiUrl, active: true } },
- ]);
+ expect(wrapper.emitted('update-integration')[0][0]).toMatchObject({
+ type: typeSet.prometheus,
+ variables: { apiUrl, active: true },
+ });
});
});
});
@@ -247,7 +245,6 @@ describe('AlertsSettingsForm', () => {
beforeEach(() => {
createComponent({
data: {
- selectedIntegration: typeSet.http,
currentIntegration: { id: '1', name: 'Test' },
active: true,
},
@@ -262,7 +259,7 @@ describe('AlertsSettingsForm', () => {
await findJsonTextArea().setValue('Invalid JSON');
jest.runAllTimers();
- await wrapper.vm.$nextTick();
+ await nextTick();
const jsonTestSubmit = findJsonTestSubmit();
expect(jsonTestSubmit.exists()).toBe(true);
@@ -275,7 +272,7 @@ describe('AlertsSettingsForm', () => {
await findJsonTextArea().setValue('{ "value": "value" }');
jest.runAllTimers();
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findJsonTestSubmit().props('disabled')).toBe(false);
});
});
@@ -283,14 +280,13 @@ describe('AlertsSettingsForm', () => {
describe('Test payload section for HTTP integration', () => {
const validSamplePayload = JSON.stringify(alertFields);
const emptySamplePayload = '{}';
-
beforeEach(() => {
createComponent({
+ multiIntegrations: true,
data: {
+ integrationForm: { type: typeSet.http },
currentIntegration: {
- type: typeSet.http,
- payloadExample: validSamplePayload,
- payloadAttributeMappings: [],
+ payloadExample: emptySamplePayload,
},
active: false,
resetPayloadAndMappingConfirmed: false,
@@ -300,25 +296,25 @@ describe('AlertsSettingsForm', () => {
});
describe.each`
- active | resetPayloadAndMappingConfirmed | disabled
- ${true} | ${true} | ${undefined}
- ${false} | ${true} | ${'disabled'}
- ${true} | ${false} | ${'disabled'}
- ${false} | ${false} | ${'disabled'}
- `('', ({ active, resetPayloadAndMappingConfirmed, disabled }) => {
+ payload | resetPayloadAndMappingConfirmed | disabled
+ ${validSamplePayload} | ${true} | ${undefined}
+ ${emptySamplePayload} | ${true} | ${undefined}
+ ${validSamplePayload} | ${false} | ${'disabled'}
+ ${emptySamplePayload} | ${false} | ${undefined}
+ `('', ({ payload, resetPayloadAndMappingConfirmed, disabled }) => {
const payloadResetMsg = resetPayloadAndMappingConfirmed
? 'was confirmed'
: 'was not confirmed';
const enabledState = disabled === 'disabled' ? 'disabled' : 'enabled';
- const activeState = active ? 'active' : 'not active';
+ const validPayloadMsg = payload === emptySamplePayload ? 'not valid' : 'valid';
- it(`textarea should be ${enabledState} when payload reset ${payloadResetMsg} and current integration is ${activeState}`, async () => {
+ it(`textarea should be ${enabledState} when payload reset ${payloadResetMsg} and payload is ${validPayloadMsg}`, async () => {
wrapper.setData({
- selectedIntegration: typeSet.http,
- active,
+ currentIntegration: { payloadExample: payload },
resetPayloadAndMappingConfirmed,
});
- await wrapper.vm.$nextTick();
+
+ await nextTick();
expect(findSamplePayloadSection().find(GlFormTextarea).attributes('disabled')).toBe(
disabled,
);
@@ -329,9 +325,9 @@ describe('AlertsSettingsForm', () => {
describe.each`
resetPayloadAndMappingConfirmed | payloadExample | caption
${false} | ${validSamplePayload} | ${'Edit payload'}
- ${true} | ${emptySamplePayload} | ${'Parse payload for custom mapping'}
- ${true} | ${validSamplePayload} | ${'Parse payload for custom mapping'}
- ${false} | ${emptySamplePayload} | ${'Parse payload for custom mapping'}
+ ${true} | ${emptySamplePayload} | ${'Parse payload fields'}
+ ${true} | ${validSamplePayload} | ${'Parse payload fields'}
+ ${false} | ${emptySamplePayload} | ${'Parse payload fields'}
`('', ({ resetPayloadAndMappingConfirmed, payloadExample, caption }) => {
const samplePayloadMsg = payloadExample ? 'was provided' : 'was not provided';
const payloadResetMsg = resetPayloadAndMappingConfirmed
@@ -340,16 +336,12 @@ describe('AlertsSettingsForm', () => {
it(`shows ${caption} button when sample payload ${samplePayloadMsg} and payload reset ${payloadResetMsg}`, async () => {
wrapper.setData({
- selectedIntegration: typeSet.http,
currentIntegration: {
payloadExample,
- type: typeSet.http,
- active: true,
- payloadAttributeMappings: [],
},
resetPayloadAndMappingConfirmed,
});
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findActionBtn().text()).toBe(caption);
});
});
@@ -358,7 +350,6 @@ describe('AlertsSettingsForm', () => {
describe('Parsing payload', () => {
beforeEach(() => {
wrapper.setData({
- selectedIntegration: typeSet.http,
resetPayloadAndMappingConfirmed: true,
});
});
@@ -398,11 +389,12 @@ describe('AlertsSettingsForm', () => {
${true} | ${false} | ${1} | ${false}
${false} | ${true} | ${1} | ${false}
`('', ({ alertFieldsProvided, multiIntegrations, integrationOption, visible }) => {
- const visibleMsg = visible ? 'is rendered' : 'is not rendered';
- const alertFieldsMsg = alertFieldsProvided ? 'are provided' : 'are not provided';
+ const visibleMsg = visible ? 'rendered' : 'not rendered';
+ const alertFieldsMsg = alertFieldsProvided ? 'provided' : 'not provided';
const integrationType = integrationOption === 1 ? typeSet.http : typeSet.prometheus;
+ const multiIntegrationsEnabled = multiIntegrations ? 'enabled' : 'not enabled';
- it(`${visibleMsg} when integration type is ${integrationType} and alert fields ${alertFieldsMsg}`, async () => {
+ it(`is ${visibleMsg} when multiIntegrations are ${multiIntegrationsEnabled}, integration type is ${integrationType} and alert fields are ${alertFieldsMsg}`, async () => {
createComponent({
multiIntegrations,
props: {
@@ -411,8 +403,80 @@ describe('AlertsSettingsForm', () => {
});
await selectOptionAtIndex(integrationOption);
- expect(findMappingBuilderSection().exists()).toBe(visible);
+ expect(findMappingBuilder().exists()).toBe(visible);
+ });
+ });
+ });
+
+ describe('Form validation', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should not be able to submit when no integration type is selected', async () => {
+ await selectOptionAtIndex(0);
+
+ expect(findSubmitButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('should not be able to submit when HTTP integration form is invalid', async () => {
+ await selectOptionAtIndex(1);
+ await findFormFields().at(0).vm.$emit('input', '');
+ expect(findSubmitButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('should be able to submit when HTTP integration form is valid', async () => {
+ await selectOptionAtIndex(1);
+ await findFormFields().at(0).vm.$emit('input', 'Name');
+ expect(findSubmitButton().attributes('disabled')).toBe(undefined);
+ });
+
+ it('should not be able to submit when Prometheus integration form is invalid', async () => {
+ await selectOptionAtIndex(2);
+ await findFormFields().at(0).vm.$emit('input', '');
+
+ expect(findSubmitButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('should be able to submit when Prometheus integration form is valid', async () => {
+ await selectOptionAtIndex(2);
+ await findFormFields().at(0).vm.$emit('input', 'http://valid.url');
+
+ expect(findSubmitButton().attributes('disabled')).toBe(undefined);
+ });
+
+ it('should be able to submit when form is dirty', async () => {
+ wrapper.setData({
+ currentIntegration: { type: typeSet.http, name: 'Existing integration' },
+ });
+ await nextTick();
+ await findFormFields().at(0).vm.$emit('input', 'Updated name');
+
+ expect(findSubmitButton().attributes('disabled')).toBe(undefined);
+ });
+
+ it('should not be able to submit when form is pristine', async () => {
+ wrapper.setData({
+ currentIntegration: { type: typeSet.http, name: 'Existing integration' },
});
+ await nextTick();
+
+ expect(findSubmitButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('should disable submit button after click on validation failure', async () => {
+ await selectOptionAtIndex(1);
+ findSubmitButton().trigger('click');
+ await nextTick();
+
+ expect(findSubmitButton().attributes('disabled')).toBe('disabled');
+ });
+
+ it('should scroll to invalid field on validation failure', async () => {
+ await selectOptionAtIndex(1);
+ findSubmitButton().trigger('click');
+
+ expect(scrollIntoViewMock).toHaveBeenCalledWith({ behavior: 'smooth', block: 'center' });
});
});
});
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index 77fac6dd022..dd8ce838dfd 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -1,18 +1,18 @@
-import { GlLoadingIcon } from '@gitlab/ui';
+import { GlLoadingIcon, GlAlert } from '@gitlab/ui';
import { mount, createLocalVue } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createHttpIntegrationMutation from 'ee_else_ce/alerts_settings/graphql/mutations/create_http_integration.mutation.graphql';
import updateHttpIntegrationMutation from 'ee_else_ce/alerts_settings/graphql/mutations/update_http_integration.mutation.graphql';
import createMockApollo from 'helpers/mock_apollo_helper';
import { useMockIntersectionObserver } from 'helpers/mock_dom_observer';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import IntegrationsList from '~/alerts_settings/components/alerts_integrations_list.vue';
import AlertsSettingsForm from '~/alerts_settings/components/alerts_settings_form.vue';
-import AlertsSettingsWrapper, {
- i18n,
-} from '~/alerts_settings/components/alerts_settings_wrapper.vue';
-import { typeSet } from '~/alerts_settings/constants';
+import AlertsSettingsWrapper from '~/alerts_settings/components/alerts_settings_wrapper.vue';
+import { typeSet, i18n } from '~/alerts_settings/constants';
import createPrometheusIntegrationMutation from '~/alerts_settings/graphql/mutations/create_prometheus_integration.mutation.graphql';
import destroyHttpIntegrationMutation from '~/alerts_settings/graphql/mutations/destroy_http_integration.mutation.graphql';
import resetHttpTokenMutation from '~/alerts_settings/graphql/mutations/reset_http_token.mutation.graphql';
@@ -27,10 +27,12 @@ import {
RESET_INTEGRATION_TOKEN_ERROR,
UPDATE_INTEGRATION_ERROR,
INTEGRATION_PAYLOAD_TEST_ERROR,
+ INTEGRATION_INACTIVE_PAYLOAD_TEST_ERROR,
DELETE_INTEGRATION_ERROR,
} from '~/alerts_settings/utils/error_messages';
import createFlash, { FLASH_TYPES } from '~/flash';
import axios from '~/lib/utils/axios_utils';
+import httpStatusCodes from '~/lib/utils/http_status';
import {
createHttpVariables,
updateHttpVariables,
@@ -81,8 +83,9 @@ describe('AlertsSettingsWrapper', () => {
const findLoader = () => wrapper.findComponent(IntegrationsList).findComponent(GlLoadingIcon);
const findIntegrationsList = () => wrapper.findComponent(IntegrationsList);
const findIntegrations = () => wrapper.find(IntegrationsList).findAll('table tbody tr');
- const findAddIntegrationBtn = () => wrapper.find('[data-testid="add-integration-btn"]');
+ const findAddIntegrationBtn = () => wrapper.findByTestId('add-integration-btn');
const findAlertsSettingsForm = () => wrapper.findComponent(AlertsSettingsForm);
+ const findAlert = () => wrapper.findComponent(GlAlert);
async function destroyHttpIntegration(localWrapper) {
await jest.runOnlyPendingTimers();
@@ -94,32 +97,34 @@ describe('AlertsSettingsWrapper', () => {
}
async function awaitApolloDomMock() {
- await wrapper.vm.$nextTick(); // kick off the DOM update
+ await nextTick(); // kick off the DOM update
await jest.runOnlyPendingTimers(); // kick off the mocked GQL stuff (promises)
- await wrapper.vm.$nextTick(); // kick off the DOM update for flash
+ await nextTick(); // kick off the DOM update for flash
}
const createComponent = ({ data = {}, provide = {}, loading = false } = {}) => {
- wrapper = mount(AlertsSettingsWrapper, {
- data() {
- return { ...data };
- },
- provide: {
- ...defaultAlertSettingsConfig,
- ...provide,
- },
- mocks: {
- $apollo: {
- mutate: jest.fn(),
- query: jest.fn(),
- queries: {
- integrations: {
- loading,
+ wrapper = extendedWrapper(
+ mount(AlertsSettingsWrapper, {
+ data() {
+ return { ...data };
+ },
+ provide: {
+ ...defaultAlertSettingsConfig,
+ ...provide,
+ },
+ mocks: {
+ $apollo: {
+ mutate: jest.fn(),
+ query: jest.fn(),
+ queries: {
+ integrations: {
+ loading,
+ },
},
},
},
- },
- });
+ }),
+ );
};
function createComponentWithApollo({
@@ -200,20 +205,29 @@ describe('AlertsSettingsWrapper', () => {
loading: false,
});
});
- it('calls `$apollo.mutate` with `createHttpIntegrationMutation`', () => {
- jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
- data: { createHttpIntegrationMutation: { integration: { id: '1' } } },
+
+ describe('Create', () => {
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue({
+ data: { httpIntegrationCreate: { integration: { id: '1' }, errors: [] } },
+ });
+ findAlertsSettingsForm().vm.$emit('create-new-integration', {
+ type: typeSet.http,
+ variables: createHttpVariables,
+ });
});
- findAlertsSettingsForm().vm.$emit('create-new-integration', {
- type: typeSet.http,
- variables: createHttpVariables,
+
+ it('calls `$apollo.mutate` with `createHttpIntegrationMutation`', () => {
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
+ mutation: createHttpIntegrationMutation,
+ update: expect.anything(),
+ variables: createHttpVariables,
+ });
});
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledTimes(1);
- expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
- mutation: createHttpIntegrationMutation,
- update: expect.anything(),
- variables: createHttpVariables,
+ it('shows success alert', () => {
+ expect(findAlert().exists()).toBe(true);
});
});
@@ -334,13 +348,29 @@ describe('AlertsSettingsWrapper', () => {
expect(createFlash).toHaveBeenCalledWith({ message: UPDATE_INTEGRATION_ERROR });
});
- it('shows an error alert when integration test payload fails ', async () => {
- const mock = new AxiosMockAdapter(axios);
- mock.onPost(/(.*)/).replyOnce(403);
- return wrapper.vm.testAlertPayload({ endpoint: '', data: '', token: '' }).then(() => {
+ describe('Test alert failure', () => {
+ let mock;
+ beforeEach(() => {
+ mock = new AxiosMockAdapter(axios);
+ });
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('shows an error alert when integration test payload is invalid ', async () => {
+ mock.onPost(/(.*)/).replyOnce(httpStatusCodes.UNPROCESSABLE_ENTITY);
+ await wrapper.vm.testAlertPayload({ endpoint: '', data: '', token: '' });
expect(createFlash).toHaveBeenCalledWith({ message: INTEGRATION_PAYLOAD_TEST_ERROR });
expect(createFlash).toHaveBeenCalledTimes(1);
- mock.restore();
+ });
+
+ it('shows an error alert when integration is not activated ', async () => {
+ mock.onPost(/(.*)/).replyOnce(httpStatusCodes.FORBIDDEN);
+ await wrapper.vm.testAlertPayload({ endpoint: '', data: '', token: '' });
+ expect(createFlash).toHaveBeenCalledWith({
+ message: INTEGRATION_INACTIVE_PAYLOAD_TEST_ERROR,
+ });
+ expect(createFlash).toHaveBeenCalledTimes(1);
});
});
@@ -354,7 +384,7 @@ describe('AlertsSettingsWrapper', () => {
loading: false,
});
- jest.spyOn(wrapper.vm.$apollo, 'mutate');
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValueOnce({});
findIntegrationsList().vm.$emit('edit-integration', updateHttpVariables);
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: updateCurrentHttpIntegrationMutation,
@@ -372,7 +402,7 @@ describe('AlertsSettingsWrapper', () => {
loading: false,
});
- jest.spyOn(wrapper.vm.$apollo, 'mutate');
+ jest.spyOn(wrapper.vm.$apollo, 'mutate').mockResolvedValue();
findIntegrationsList().vm.$emit('edit-integration', updatePrometheusVariables);
expect(wrapper.vm.$apollo.mutate).toHaveBeenCalledWith({
mutation: updateCurrentPrometheusIntegrationMutation,
@@ -414,7 +444,7 @@ describe('AlertsSettingsWrapper', () => {
createComponentWithApollo();
await jest.runOnlyPendingTimers();
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findIntegrations()).toHaveLength(4);
});
@@ -426,7 +456,7 @@ describe('AlertsSettingsWrapper', () => {
expect(destroyIntegrationHandler).toHaveBeenCalled();
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(findIntegrations()).toHaveLength(3);
});
diff --git a/spec/frontend/analytics/usage_trends/components/app_spec.js b/spec/frontend/analytics/usage_trends/components/app_spec.js
index f0306ea72e3..156be26f895 100644
--- a/spec/frontend/analytics/usage_trends/components/app_spec.js
+++ b/spec/frontend/analytics/usage_trends/components/app_spec.js
@@ -24,7 +24,7 @@ describe('UsageTrendsApp', () => {
expect(wrapper.find(UsageCounts).exists()).toBe(true);
});
- ['Total projects & groups', 'Pipelines', 'Issues & Merge Requests'].forEach((usage) => {
+ ['Total projects & groups', 'Pipelines', 'Issues & merge requests'].forEach((usage) => {
it(`displays the ${usage} chart`, () => {
const chartTitles = wrapper
.findAll(UsageTrendsCountChart)
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index d6e1b170dd3..cb29dab86bf 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -264,18 +264,18 @@ describe('Api', () => {
it('fetches group labels', (done) => {
const options = { params: { search: 'foo' } };
const expectedGroup = 'gitlab-org';
- const expectedUrl = `${dummyUrlRoot}/groups/${expectedGroup}/-/labels`;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${expectedGroup}/labels`;
mock.onGet(expectedUrl).reply(httpStatus.OK, [
{
id: 1,
- title: 'Foo Label',
+ name: 'Foo Label',
},
]);
Api.groupLabels(expectedGroup, options)
.then((res) => {
expect(res.length).toBe(1);
- expect(res[0].title).toBe('Foo Label');
+ expect(res[0].name).toBe('Foo Label');
})
.then(done)
.catch(done.fail);
@@ -593,7 +593,7 @@ describe('Api', () => {
});
describe('newLabel', () => {
- it('creates a new label', (done) => {
+ it('creates a new project label', (done) => {
const namespace = 'some namespace';
const project = 'some project';
const labelData = { some: 'data' };
@@ -618,26 +618,23 @@ describe('Api', () => {
});
});
- it('creates a group label', (done) => {
+ it('creates a new group label', (done) => {
const namespace = 'group/subgroup';
- const labelData = { some: 'data' };
+ const labelData = { name: 'Foo', color: '#000000' };
const expectedUrl = Api.buildUrl(Api.groupLabelsPath).replace(':namespace_path', namespace);
- const expectedData = {
- label: labelData,
- };
mock.onPost(expectedUrl).reply((config) => {
- expect(config.data).toBe(JSON.stringify(expectedData));
+ expect(config.data).toBe(JSON.stringify({ color: labelData.color }));
return [
httpStatus.OK,
{
- name: 'test',
+ ...labelData,
},
];
});
Api.newLabel(namespace, undefined, labelData, (response) => {
- expect(response.name).toBe('test');
+ expect(response.name).toBe('Foo');
done();
});
});
@@ -1225,13 +1222,26 @@ describe('Api', () => {
)}/repository/files/${encodeURIComponent(dummyFilePath)}/raw`;
describe('when the raw file is successfully fetched', () => {
- it('resolves the Promise', () => {
+ beforeEach(() => {
mock.onGet(expectedUrl).replyOnce(httpStatus.OK);
+ });
+ it('resolves the Promise', () => {
return Api.getRawFile(dummyProjectPath, dummyFilePath).then(() => {
expect(mock.history.get).toHaveLength(1);
});
});
+
+ describe('when the method is called with params', () => {
+ it('sets the params on the request', () => {
+ const params = { ref: 'main' };
+ jest.spyOn(axios, 'get');
+
+ Api.getRawFile(dummyProjectPath, dummyFilePath, params);
+
+ expect(axios.get).toHaveBeenCalledWith(expectedUrl, { params });
+ });
+ });
});
describe('when an error occurs while getting a raw file', () => {
@@ -1382,6 +1392,38 @@ describe('Api', () => {
});
});
+ describe('updateFreezePeriod', () => {
+ const options = {
+ id: 10,
+ freeze_start: '* * * * *',
+ freeze_end: '* * * * *',
+ cron_timezone: 'America/Juneau',
+ created_at: '2020-07-11T07:04:50.153Z',
+ updated_at: '2020-07-11T07:04:50.153Z',
+ };
+ const projectId = 8;
+ const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/freeze_periods/${options.id}`;
+
+ const expectedResult = {
+ id: 10,
+ freeze_start: '* * * * *',
+ freeze_end: '* * * * *',
+ cron_timezone: 'America/Juneau',
+ created_at: '2020-07-11T07:04:50.153Z',
+ updated_at: '2020-07-11T07:04:50.153Z',
+ };
+
+ describe('when the freeze period is successfully updated', () => {
+ it('resolves the Promise', () => {
+ mock.onPut(expectedUrl, options).replyOnce(httpStatus.OK, expectedResult);
+
+ return Api.updateFreezePeriod(projectId, options).then(({ data }) => {
+ expect(data).toStrictEqual(expectedResult);
+ });
+ });
+ });
+ });
+
describe('createPipeline', () => {
it('creates new pipeline', () => {
const redirectUrl = 'ci-project/-/pipelines/95';
diff --git a/spec/frontend/batch_comments/components/preview_item_spec.js b/spec/frontend/batch_comments/components/preview_item_spec.js
index 08167a94068..03a28ce8001 100644
--- a/spec/frontend/batch_comments/components/preview_item_spec.js
+++ b/spec/frontend/batch_comments/components/preview_item_spec.js
@@ -124,4 +124,16 @@ describe('Batch comments draft preview item component', () => {
);
});
});
+
+ describe('for new comment', () => {
+ it('renders title', () => {
+ createComponent(false, {}, (store) => {
+ store.state.notes.discussions.push({});
+ });
+
+ expect(vm.$el.querySelector('.review-preview-item-header-text').textContent).toContain(
+ 'Your new comment',
+ );
+ });
+ });
});
diff --git a/spec/frontend/behaviors/markdown/render_mermaid_spec.js b/spec/frontend/behaviors/markdown/render_mermaid_spec.js
new file mode 100644
index 00000000000..51a345cab0e
--- /dev/null
+++ b/spec/frontend/behaviors/markdown/render_mermaid_spec.js
@@ -0,0 +1,25 @@
+import { initMermaid } from '~/behaviors/markdown/render_mermaid';
+import * as ColorUtils from '~/lib/utils/color_utils';
+
+describe('Render mermaid diagrams for Gitlab Flavoured Markdown', () => {
+ it.each`
+ darkMode | expectedTheme
+ ${false} | ${'neutral'}
+ ${true} | ${'dark'}
+ `('is $darkMode $expectedTheme', async ({ darkMode, expectedTheme }) => {
+ jest.spyOn(ColorUtils, 'darkModeEnabled').mockImplementation(() => darkMode);
+
+ const mermaid = {
+ initialize: jest.fn(),
+ };
+
+ await initMermaid(mermaid);
+
+ expect(mermaid.initialize).toHaveBeenCalledTimes(1);
+ expect(mermaid.initialize).toHaveBeenCalledWith(
+ expect.objectContaining({
+ theme: expectedTheme,
+ }),
+ );
+ });
+});
diff --git a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
index 26d38b115b6..bb3b16b4c7a 100644
--- a/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
+++ b/spec/frontend/behaviors/shortcuts/shortcuts_issuable_spec.js
@@ -329,7 +329,7 @@ describe('ShortcutsIssuable', () => {
window.shortcut = new ShortcutsIssuable();
[sidebarCollapsedBtn, sidebarExpandedBtn] = document.querySelectorAll(
- '.sidebar-source-branch button',
+ '.js-sidebar-source-branch button',
);
[sidebarCollapsedBtn, sidebarExpandedBtn].forEach((btn) => jest.spyOn(btn, 'click'));
diff --git a/spec/frontend/blob/file_template_selector_spec.js b/spec/frontend/blob/file_template_selector_spec.js
new file mode 100644
index 00000000000..2ab3b3ebc82
--- /dev/null
+++ b/spec/frontend/blob/file_template_selector_spec.js
@@ -0,0 +1,61 @@
+import $ from 'jquery';
+import FileTemplateSelector from '~/blob/file_template_selector';
+
+describe('FileTemplateSelector', () => {
+ let subject;
+ let dropdown;
+ let wrapper;
+
+ const createSubject = () => {
+ subject = new FileTemplateSelector({});
+ subject.config = {
+ dropdown,
+ wrapper,
+ };
+ subject.initDropdown = jest.fn();
+ };
+
+ afterEach(() => {
+ subject = null;
+ });
+
+ describe('show method', () => {
+ beforeEach(() => {
+ dropdown = document.createElement('div');
+ wrapper = document.createElement('div');
+ wrapper.classList.add('hidden');
+ createSubject();
+ });
+
+ it('calls init on first call', () => {
+ jest.spyOn(subject, 'init');
+ subject.show();
+
+ expect(subject.init).toHaveBeenCalledTimes(1);
+ });
+
+ it('does not call init on subsequent calls', () => {
+ jest.spyOn(subject, 'init');
+ subject.show();
+ subject.show();
+
+ expect(subject.init).toHaveBeenCalledTimes(1);
+ });
+
+ it('removes hidden class from $wrapper', () => {
+ expect($(wrapper).hasClass('hidden')).toBe(true);
+
+ subject.show();
+
+ expect($(wrapper).hasClass('hidden')).toBe(false);
+ });
+
+ it('sets the focus on the dropdown', async () => {
+ subject.show();
+ jest.spyOn(subject.$dropdown, 'focus');
+ jest.runAllTimers();
+
+ expect(subject.$dropdown.focus).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index 4487fc15de6..36043b09636 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -1,11 +1,14 @@
import { GlLabel } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { range } from 'lodash';
+import Vuex from 'vuex';
+import BoardBlockedIcon from '~/boards/components/board_blocked_icon.vue';
import BoardCardInner from '~/boards/components/board_card_inner.vue';
+import { issuableTypes } from '~/boards/constants';
import eventHub from '~/boards/eventhub';
import defaultStore from '~/boards/stores';
import { updateHistory } from '~/lib/utils/url_utility';
-import { mockLabelList } from './mock_data';
+import { mockLabelList, mockIssue } from './mock_data';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/boards/eventhub');
@@ -29,8 +32,28 @@ describe('Board card component', () => {
let wrapper;
let issue;
let list;
+ let store;
+
+ const findBoardBlockedIcon = () => wrapper.find(BoardBlockedIcon);
+
+ const createStore = () => {
+ store = new Vuex.Store({
+ ...defaultStore,
+ state: {
+ ...defaultStore.state,
+ issuableType: issuableTypes.issue,
+ },
+ getters: {
+ isGroupBoard: () => true,
+ isEpicBoard: () => false,
+ isProjectBoard: () => false,
+ },
+ });
+ };
+
+ const createWrapper = (props = {}) => {
+ createStore();
- const createWrapper = (props = {}, store = defaultStore) => {
wrapper = mount(BoardCardInner, {
store,
propsData: {
@@ -41,6 +64,13 @@ describe('Board card component', () => {
stubs: {
GlLabel: true,
},
+ mocks: {
+ $apollo: {
+ queries: {
+ blockingIssuables: { loading: false },
+ },
+ },
+ },
provide: {
rootPath: '/',
scopedLabelsAvailable: false,
@@ -51,14 +81,9 @@ describe('Board card component', () => {
beforeEach(() => {
list = mockLabelList;
issue = {
- title: 'Testing',
- id: 1,
- iid: 1,
- confidential: false,
+ ...mockIssue,
labels: [list.label],
assignees: [],
- referencePath: '#1',
- webUrl: '/test/1',
weight: 1,
};
@@ -68,6 +93,7 @@ describe('Board card component', () => {
afterEach(() => {
wrapper.destroy();
wrapper = null;
+ store = null;
jest.clearAllMocks();
});
@@ -87,18 +113,38 @@ describe('Board card component', () => {
expect(wrapper.find('.confidential-icon').exists()).toBe(false);
});
- it('does not render blocked icon', () => {
- expect(wrapper.find('.issue-blocked-icon').exists()).toBe(false);
- });
-
it('renders issue ID with #', () => {
- expect(wrapper.find('.board-card-number').text()).toContain(`#${issue.id}`);
+ expect(wrapper.find('.board-card-number').text()).toContain(`#${issue.iid}`);
});
it('does not render assignee', () => {
expect(wrapper.find('.board-card-assignee .avatar').exists()).toBe(false);
});
+ describe('blocked', () => {
+ it('renders blocked icon if issue is blocked', async () => {
+ createWrapper({
+ item: {
+ ...issue,
+ blocked: true,
+ },
+ });
+
+ expect(findBoardBlockedIcon().exists()).toBe(true);
+ });
+
+ it('does not show blocked icon if issue is not blocked', () => {
+ createWrapper({
+ item: {
+ ...issue,
+ blocked: false,
+ },
+ });
+
+ expect(findBoardBlockedIcon().exists()).toBe(false);
+ });
+ });
+
describe('confidential issue', () => {
beforeEach(() => {
wrapper.setProps({
@@ -303,21 +349,6 @@ describe('Board card component', () => {
});
});
- describe('blocked', () => {
- beforeEach(() => {
- wrapper.setProps({
- item: {
- ...wrapper.props('item'),
- blocked: true,
- },
- });
- });
-
- it('renders blocked icon if issue is blocked', () => {
- expect(wrapper.find('.issue-blocked-icon').exists()).toBe(true);
- });
- });
-
describe('filterByLabel method', () => {
beforeEach(() => {
delete window.location;
diff --git a/spec/frontend/boards/board_new_issue_deprecated_spec.js b/spec/frontend/boards/board_new_issue_deprecated_spec.js
index 3903ad201b2..3beaf870bf5 100644
--- a/spec/frontend/boards/board_new_issue_deprecated_spec.js
+++ b/spec/frontend/boards/board_new_issue_deprecated_spec.js
@@ -111,7 +111,7 @@ describe('Issue boards new issue form', () => {
describe('submit success', () => {
it('creates new issue', () => {
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
return Vue.nextTick()
.then(submitIssue)
@@ -122,7 +122,7 @@ describe('Issue boards new issue form', () => {
it('enables button after submit', () => {
jest.spyOn(wrapper.vm, 'submit').mockImplementation();
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
return Vue.nextTick()
.then(submitIssue)
@@ -132,7 +132,7 @@ describe('Issue boards new issue form', () => {
});
it('clears title after submit', () => {
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
return Vue.nextTick()
.then(submitIssue)
@@ -143,17 +143,17 @@ describe('Issue boards new issue form', () => {
it('sets detail issue after submit', () => {
expect(boardsStore.detail.issue.title).toBe(undefined);
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
return Vue.nextTick()
.then(submitIssue)
.then(() => {
- expect(boardsStore.detail.issue.title).toBe('submit issue');
+ expect(boardsStore.detail.issue.title).toBe('create issue');
});
});
it('sets detail list after submit', () => {
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
return Vue.nextTick()
.then(submitIssue)
@@ -164,7 +164,7 @@ describe('Issue boards new issue form', () => {
it('sets detail weight after submit', () => {
boardsStore.weightFeatureAvailable = true;
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
return Vue.nextTick()
.then(submitIssue)
@@ -175,7 +175,7 @@ describe('Issue boards new issue form', () => {
it('does not set detail weight after submit', () => {
boardsStore.weightFeatureAvailable = false;
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
return Vue.nextTick()
.then(submitIssue)
diff --git a/spec/frontend/boards/components/__snapshots__/board_blocked_icon_spec.js.snap b/spec/frontend/boards/components/__snapshots__/board_blocked_icon_spec.js.snap
new file mode 100644
index 00000000000..c000f300e4d
--- /dev/null
+++ b/spec/frontend/boards/components/__snapshots__/board_blocked_icon_spec.js.snap
@@ -0,0 +1,30 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`BoardBlockedIcon on mouseenter on blocked icon with more than three blocking issues matches the snapshot 1`] = `
+"<div class=\\"gl-display-inline\\"><svg data-testid=\\"issue-blocked-icon\\" aria-hidden=\\"true\\" class=\\"issue-blocked-icon gl-mr-2 gl-cursor-pointer gl-icon s16\\" id=\\"blocked-icon-uniqueId\\">
+ <use href=\\"#issue-block\\"></use>
+ </svg>
+ <div class=\\"gl-popover\\">
+ <ul class=\\"gl-list-style-none gl-p-0\\">
+ <li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/6\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#6</a>
+ <p data-testid=\\"issuable-title\\" class=\\"gl-mb-3 gl-display-block!\\">
+ blocking issue title 1
+ </p>
+ </li>
+ <li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/5\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#5</a>
+ <p data-testid=\\"issuable-title\\" class=\\"gl-mb-3 gl-display-block!\\">
+ blocking issue title 2 + blocking issue title 2 + blocking issue title 2 + bloc…
+ </p>
+ </li>
+ <li><a href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/4\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">my-project-1#4</a>
+ <p data-testid=\\"issuable-title\\" class=\\"gl-mb-3 gl-display-block!\\">
+ blocking issue title 3
+ </p>
+ </li>
+ </ul>
+ <div class=\\"gl-mt-4\\">
+ <p data-testid=\\"hidden-blocking-count\\" class=\\"gl-mb-3\\">+ 1 more issue</p> <a data-testid=\\"view-all-issues\\" href=\\"http://gdk.test:3000/gitlab-org/my-project-1/-/issues/0#related-issues\\" class=\\"gl-link gl-text-blue-500! gl-font-sm\\">View all blocking issues</a>
+ </div><span data-testid=\\"popover-title\\">Blocked by 4 issues</span>
+ </div>
+</div>"
+`;
diff --git a/spec/frontend/boards/components/board_add_new_column_form_spec.js b/spec/frontend/boards/components/board_add_new_column_form_spec.js
index 3702f55f17b..3b26ca57d6f 100644
--- a/spec/frontend/boards/components/board_add_new_column_form_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_form_spec.js
@@ -1,6 +1,6 @@
-import { GlFormGroup, GlSearchBoxByType, GlSkeletonLoader } from '@gitlab/ui';
+import { GlDropdown, GlFormGroup, GlSearchBoxByType, GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Vue, { nextTick } from 'vue';
+import Vue from 'vue';
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BoardAddNewColumnForm from '~/boards/components/board_add_new_column_form.vue';
@@ -25,7 +25,7 @@ describe('Board card layout', () => {
const mountComponent = ({
loading = false,
- formDescription = '',
+ noneSelected = '',
searchLabel = '',
searchPlaceholder = '',
selectedId,
@@ -34,12 +34,9 @@ describe('Board card layout', () => {
} = {}) => {
wrapper = extendedWrapper(
shallowMount(BoardAddNewColumnForm, {
- stubs: {
- GlFormGroup: true,
- },
propsData: {
loading,
- formDescription,
+ noneSelected,
searchLabel,
searchPlaceholder,
selectedId,
@@ -51,13 +48,15 @@ describe('Board card layout', () => {
...actions,
},
}),
+ stubs: {
+ GlDropdown,
+ },
}),
);
};
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
const formTitle = () => wrapper.findByTestId('board-add-column-form-title').text();
@@ -65,10 +64,13 @@ describe('Board card layout', () => {
const findSearchLabel = () => wrapper.find(GlFormGroup);
const cancelButton = () => wrapper.findByTestId('cancelAddNewColumn');
const submitButton = () => wrapper.findByTestId('addNewColumnButton');
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
it('shows form title & search input', () => {
mountComponent();
+ findDropdown().vm.$emit('show');
+
expect(formTitle()).toEqual(BoardAddNewColumnForm.i18n.newList);
expect(findSearchInput().exists()).toBe(true);
});
@@ -86,16 +88,6 @@ describe('Board card layout', () => {
expect(setAddColumnFormVisibility).toHaveBeenCalledWith(expect.anything(), false);
});
- it('sets placeholder and description from props', () => {
- const props = {
- formDescription: 'Some description of a list',
- };
-
- mountComponent(props);
-
- expect(wrapper.html()).toHaveText(props.formDescription);
- });
-
describe('items', () => {
const mountWithItems = (loading) =>
mountComponent({
@@ -151,13 +143,11 @@ describe('Board card layout', () => {
expect(submitButton().props('disabled')).toBe(true);
});
- it('emits add-list event on click', async () => {
+ it('emits add-list event on click', () => {
mountComponent({
selectedId: mockLabelList.label.id,
});
- await nextTick();
-
submitButton().vm.$emit('click');
expect(wrapper.emitted('add-list')).toEqual([[]]);
diff --git a/spec/frontend/boards/components/board_add_new_column_spec.js b/spec/frontend/boards/components/board_add_new_column_spec.js
index 60584eaf6cf..61f210f566b 100644
--- a/spec/frontend/boards/components/board_add_new_column_spec.js
+++ b/spec/frontend/boards/components/board_add_new_column_spec.js
@@ -1,3 +1,4 @@
+import { GlFormRadioGroup } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
@@ -12,6 +13,10 @@ Vue.use(Vuex);
describe('Board card layout', () => {
let wrapper;
+ const selectLabel = (id) => {
+ wrapper.findComponent(GlFormRadioGroup).vm.$emit('change', id);
+ };
+
const createStore = ({ actions = {}, getters = {}, state = {} } = {}) => {
return new Vuex.Store({
state: {
@@ -57,6 +62,11 @@ describe('Board card layout', () => {
},
}),
);
+
+ // trigger change event
+ if (selectedId) {
+ selectLabel(selectedId);
+ }
};
afterEach(() => {
diff --git a/spec/frontend/boards/components/board_blocked_icon_spec.js b/spec/frontend/boards/components/board_blocked_icon_spec.js
new file mode 100644
index 00000000000..7b04942f056
--- /dev/null
+++ b/spec/frontend/boards/components/board_blocked_icon_spec.js
@@ -0,0 +1,226 @@
+import { GlIcon, GlLink, GlPopover, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount, mount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import BoardBlockedIcon from '~/boards/components/board_blocked_icon.vue';
+import { blockingIssuablesQueries, issuableTypes } from '~/boards/constants';
+import { truncate } from '~/lib/utils/text_utility';
+import {
+ mockIssue,
+ mockBlockingIssue1,
+ mockBlockingIssue2,
+ mockBlockingIssuablesResponse1,
+ mockBlockingIssuablesResponse2,
+ mockBlockingIssuablesResponse3,
+ mockBlockedIssue1,
+ mockBlockedIssue2,
+} from '../mock_data';
+
+describe('BoardBlockedIcon', () => {
+ let wrapper;
+ let mockApollo;
+
+ const findGlIcon = () => wrapper.find(GlIcon);
+ const findGlPopover = () => wrapper.find(GlPopover);
+ const findGlLink = () => wrapper.find(GlLink);
+ const findPopoverTitle = () => wrapper.findByTestId('popover-title');
+ const findIssuableTitle = () => wrapper.findByTestId('issuable-title');
+ const findHiddenBlockingCount = () => wrapper.findByTestId('hidden-blocking-count');
+ const findViewAllIssuableLink = () => wrapper.findByTestId('view-all-issues');
+
+ const waitForApollo = async () => {
+ jest.runOnlyPendingTimers();
+ await waitForPromises();
+ };
+
+ const mouseenter = async () => {
+ findGlIcon().vm.$emit('mouseenter');
+
+ await wrapper.vm.$nextTick();
+ await waitForApollo();
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const createWrapperWithApollo = ({
+ item = mockBlockedIssue1,
+ blockingIssuablesSpy = jest.fn().mockResolvedValue(mockBlockingIssuablesResponse1),
+ } = {}) => {
+ mockApollo = createMockApollo([
+ [blockingIssuablesQueries[issuableTypes.issue].query, blockingIssuablesSpy],
+ ]);
+
+ Vue.use(VueApollo);
+ wrapper = extendedWrapper(
+ mount(BoardBlockedIcon, {
+ apolloProvider: mockApollo,
+ propsData: {
+ item: {
+ ...mockIssue,
+ ...item,
+ },
+ uniqueId: 'uniqueId',
+ issuableType: issuableTypes.issue,
+ },
+ attachTo: document.body,
+ }),
+ );
+ };
+
+ const createWrapper = ({ item = {}, queries = {}, data = {}, loading = false } = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(BoardBlockedIcon, {
+ propsData: {
+ item: {
+ ...mockIssue,
+ ...item,
+ },
+ uniqueId: 'uniqueid',
+ issuableType: issuableTypes.issue,
+ },
+ data() {
+ return {
+ ...data,
+ };
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ blockingIssuables: { loading },
+ ...queries,
+ },
+ },
+ },
+ stubs: {
+ GlPopover,
+ },
+ attachTo: document.body,
+ }),
+ );
+ };
+
+ it('should render blocked icon', () => {
+ createWrapper();
+
+ expect(findGlIcon().exists()).toBe(true);
+ });
+
+ it('should display a loading spinner while loading', () => {
+ createWrapper({ loading: true });
+
+ expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('should not query for blocking issuables by default', async () => {
+ createWrapperWithApollo();
+
+ expect(findGlPopover().text()).not.toContain(mockBlockingIssue1.title);
+ });
+
+ describe('on mouseenter on blocked icon', () => {
+ it('should query for blocking issuables and render the result', async () => {
+ createWrapperWithApollo();
+
+ expect(findGlPopover().text()).not.toContain(mockBlockingIssue1.title);
+
+ await mouseenter();
+
+ expect(findGlPopover().exists()).toBe(true);
+ expect(findIssuableTitle().text()).toContain(mockBlockingIssue1.title);
+ expect(wrapper.vm.skip).toBe(true);
+ });
+
+ it('should emit "blocking-issuables-error" event on query error', async () => {
+ const mockError = new Error('mayday');
+ createWrapperWithApollo({ blockingIssuablesSpy: jest.fn().mockRejectedValue(mockError) });
+
+ await mouseenter();
+
+ const [
+ [
+ {
+ message,
+ error: { networkError },
+ },
+ ],
+ ] = wrapper.emitted('blocking-issuables-error');
+ expect(message).toBe('Failed to fetch blocking issues');
+ expect(networkError).toBe(mockError);
+ });
+
+ describe('with a single blocking issue', () => {
+ beforeEach(async () => {
+ createWrapperWithApollo();
+
+ await mouseenter();
+ });
+
+ it('should render a title of the issuable', async () => {
+ expect(findIssuableTitle().text()).toBe(mockBlockingIssue1.title);
+ });
+
+ it('should render issuable reference and link to the issuable', async () => {
+ const formattedRef = mockBlockingIssue1.reference.split('/')[1];
+
+ expect(findGlLink().text()).toBe(formattedRef);
+ expect(findGlLink().attributes('href')).toBe(mockBlockingIssue1.webUrl);
+ });
+
+ it('should render popover title with correct blocking issuable count', async () => {
+ expect(findPopoverTitle().text()).toBe('Blocked by 1 issue');
+ });
+ });
+
+ describe('when issue has a long title', () => {
+ it('should render a truncated title', async () => {
+ createWrapperWithApollo({
+ blockingIssuablesSpy: jest.fn().mockResolvedValue(mockBlockingIssuablesResponse2),
+ });
+
+ await mouseenter();
+
+ const truncatedTitle = truncate(
+ mockBlockingIssue2.title,
+ wrapper.vm.$options.textTruncateWidth,
+ );
+ expect(findIssuableTitle().text()).toBe(truncatedTitle);
+ });
+ });
+
+ describe('with more than three blocking issues', () => {
+ beforeEach(async () => {
+ createWrapperWithApollo({
+ item: mockBlockedIssue2,
+ blockingIssuablesSpy: jest.fn().mockResolvedValue(mockBlockingIssuablesResponse3),
+ });
+
+ await mouseenter();
+ });
+
+ it('matches the snapshot', () => {
+ expect(wrapper.html()).toMatchSnapshot();
+ });
+
+ it('should render popover title with correct blocking issuable count', async () => {
+ expect(findPopoverTitle().text()).toBe('Blocked by 4 issues');
+ });
+
+ it('should render the number of hidden blocking issuables', () => {
+ expect(findHiddenBlockingCount().text()).toBe('+ 1 more issue');
+ });
+
+ it('should link to the blocked issue page at the related issue anchor', async () => {
+ expect(findViewAllIssuableLink().text()).toBe('View all blocking issues');
+ expect(findViewAllIssuableLink().attributes('href')).toBe(
+ `${mockBlockedIssue2.webUrl}#related-issues`,
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js
new file mode 100644
index 00000000000..7f949739891
--- /dev/null
+++ b/spec/frontend/boards/components/board_content_sidebar_spec.js
@@ -0,0 +1,140 @@
+import { GlDrawer } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vuex from 'vuex';
+import { stubComponent } from 'helpers/stub_component';
+import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue';
+import BoardSidebarDueDate from '~/boards/components/sidebar/board_sidebar_due_date.vue';
+import BoardSidebarLabelsSelect from '~/boards/components/sidebar/board_sidebar_labels_select.vue';
+import BoardSidebarMilestoneSelect from '~/boards/components/sidebar/board_sidebar_milestone_select.vue';
+import BoardSidebarSubscription from '~/boards/components/sidebar/board_sidebar_subscription.vue';
+import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
+import { ISSUABLE } from '~/boards/constants';
+import { mockIssue, mockIssueGroupPath, mockIssueProjectPath } from '../mock_data';
+
+describe('BoardContentSidebar', () => {
+ let wrapper;
+ let store;
+
+ const createStore = ({ mockGetters = {}, mockActions = {} } = {}) => {
+ store = new Vuex.Store({
+ state: {
+ sidebarType: ISSUABLE,
+ issues: { [mockIssue.id]: { ...mockIssue, epic: null } },
+ activeId: mockIssue.id,
+ issuableType: 'issue',
+ },
+ getters: {
+ activeBoardItem: () => {
+ return { ...mockIssue, epic: null };
+ },
+ groupPathForActiveIssue: () => mockIssueGroupPath,
+ projectPathForActiveIssue: () => mockIssueProjectPath,
+ isSidebarOpen: () => true,
+ ...mockGetters,
+ },
+ actions: mockActions,
+ });
+ };
+
+ const createComponent = () => {
+ /*
+ Dynamically imported components (in our case ee imports)
+ aren't stubbed automatically in VTU v1:
+ https://github.com/vuejs/vue-test-utils/issues/1279.
+
+ This requires us to additionally mock apollo or vuex stores.
+ */
+ wrapper = shallowMount(BoardContentSidebar, {
+ provide: {
+ canUpdate: true,
+ rootPath: '/',
+ groupId: 1,
+ },
+ store,
+ stubs: {
+ GlDrawer: stubComponent(GlDrawer, {
+ template: '<div><slot name="header"></slot><slot></slot></div>',
+ }),
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ participants: {
+ loading: false,
+ },
+ currentIteration: {
+ loading: false,
+ },
+ iterations: {
+ loading: false,
+ },
+ },
+ },
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createStore();
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('confirms we render GlDrawer', () => {
+ expect(wrapper.find(GlDrawer).exists()).toBe(true);
+ });
+
+ it('does not render GlDrawer when isSidebarOpen is false', () => {
+ createStore({ mockGetters: { isSidebarOpen: () => false } });
+ createComponent();
+
+ expect(wrapper.find(GlDrawer).exists()).toBe(false);
+ });
+
+ it('applies an open attribute', () => {
+ expect(wrapper.find(GlDrawer).props('open')).toBe(true);
+ });
+
+ it('renders BoardSidebarLabelsSelect', () => {
+ expect(wrapper.find(BoardSidebarLabelsSelect).exists()).toBe(true);
+ });
+
+ it('renders BoardSidebarTitle', () => {
+ expect(wrapper.find(BoardSidebarTitle).exists()).toBe(true);
+ });
+
+ it('renders BoardSidebarDueDate', () => {
+ expect(wrapper.find(BoardSidebarDueDate).exists()).toBe(true);
+ });
+
+ it('renders BoardSidebarSubscription', () => {
+ expect(wrapper.find(BoardSidebarSubscription).exists()).toBe(true);
+ });
+
+ it('renders BoardSidebarMilestoneSelect', () => {
+ expect(wrapper.find(BoardSidebarMilestoneSelect).exists()).toBe(true);
+ });
+
+ describe('when we emit close', () => {
+ let toggleBoardItem;
+
+ beforeEach(() => {
+ toggleBoardItem = jest.fn();
+ createStore({ mockActions: { toggleBoardItem } });
+ createComponent();
+ });
+
+ it('calls toggleBoardItem with correct parameters', async () => {
+ wrapper.find(GlDrawer).vm.$emit('close');
+
+ expect(toggleBoardItem).toHaveBeenCalledTimes(1);
+ expect(toggleBoardItem).toHaveBeenCalledWith(expect.any(Object), {
+ boardItem: { ...mockIssue, epic: null },
+ sidebarType: ISSUABLE,
+ });
+ });
+ });
+});
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 159b67ccc67..8c1a7bd3947 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -33,7 +33,12 @@ describe('BoardContent', () => {
});
};
- const createComponent = ({ state, props = {}, graphqlBoardListsEnabled = false } = {}) => {
+ const createComponent = ({
+ state,
+ props = {},
+ graphqlBoardListsEnabled = false,
+ canAdminList = true,
+ } = {}) => {
const store = createStore({
...defaultState,
...state,
@@ -42,11 +47,11 @@ describe('BoardContent', () => {
localVue,
propsData: {
lists: mockListsWithModel,
- canAdminList: true,
disabled: false,
...props,
},
provide: {
+ canAdminList,
glFeatures: { graphqlBoardLists: graphqlBoardListsEnabled },
},
store,
@@ -82,7 +87,7 @@ describe('BoardContent', () => {
describe('can admin list', () => {
beforeEach(() => {
- createComponent({ graphqlBoardListsEnabled: true, props: { canAdminList: true } });
+ createComponent({ graphqlBoardListsEnabled: true, canAdminList: true });
});
it('renders draggable component', () => {
@@ -92,7 +97,7 @@ describe('BoardContent', () => {
describe('can not admin list', () => {
beforeEach(() => {
- createComponent({ graphqlBoardListsEnabled: true, props: { canAdminList: false } });
+ createComponent({ graphqlBoardListsEnabled: true, canAdminList: false });
});
it('does not render draggable component', () => {
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index 32499bd5480..24fcdd528d5 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -226,7 +226,7 @@ describe('BoardForm', () => {
it('passes correct primary action text and variant', () => {
expect(findModalActionPrimary().text).toBe('Save changes');
- expect(findModalActionPrimary().attributes[0].variant).toBe('info');
+ expect(findModalActionPrimary().attributes[0].variant).toBe('confirm');
});
it('does not render delete confirmation message', () => {
diff --git a/spec/frontend/boards/components/board_new_issue_spec.js b/spec/frontend/boards/components/board_new_issue_spec.js
index 737a18294bc..e6405bbcff3 100644
--- a/spec/frontend/boards/components/board_new_issue_spec.js
+++ b/spec/frontend/boards/components/board_new_issue_spec.js
@@ -86,7 +86,7 @@ describe('Issue boards new issue form', () => {
describe('submit success', () => {
it('creates new issue', async () => {
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
await vm.$nextTick();
await submitIssue();
@@ -95,7 +95,7 @@ describe('Issue boards new issue form', () => {
it('enables button after submit', async () => {
jest.spyOn(wrapper.vm, 'submit').mockImplementation();
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
await vm.$nextTick();
await submitIssue();
@@ -103,7 +103,7 @@ describe('Issue boards new issue form', () => {
});
it('clears title after submit', async () => {
- wrapper.setData({ title: 'submit issue' });
+ wrapper.setData({ title: 'create issue' });
await vm.$nextTick();
await submitIssue();
diff --git a/spec/frontend/boards/components/board_settings_sidebar_spec.js b/spec/frontend/boards/components/board_settings_sidebar_spec.js
index 52b4d71f7b9..464331b6e30 100644
--- a/spec/frontend/boards/components/board_settings_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_settings_sidebar_spec.js
@@ -4,6 +4,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import Vuex from 'vuex';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import BoardSettingsSidebar from '~/boards/components/board_settings_sidebar.vue';
import { inactiveId, LIST } from '~/boards/constants';
import { createStore } from '~/boards/stores';
@@ -22,11 +23,18 @@ describe('BoardSettingsSidebar', () => {
const labelColor = '#FFFF';
const listId = 1;
- const createComponent = () => {
- wrapper = shallowMount(BoardSettingsSidebar, {
- store,
- localVue,
- });
+ const findRemoveButton = () => wrapper.findByTestId('remove-list');
+
+ const createComponent = ({ canAdminList = false } = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(BoardSettingsSidebar, {
+ store,
+ localVue,
+ provide: {
+ canAdminList,
+ },
+ }),
+ );
};
const findLabel = () => wrapper.find(GlLabel);
const findDrawer = () => wrapper.find(GlDrawer);
@@ -164,4 +172,29 @@ describe('BoardSettingsSidebar', () => {
expect(findDrawer().exists()).toBe(false);
});
});
+
+ it('does not render "Remove list" when user cannot admin the boards list', () => {
+ createComponent();
+
+ expect(findRemoveButton().exists()).toBe(false);
+ });
+
+ describe('when user can admin the boards list', () => {
+ beforeEach(() => {
+ store.state.activeId = listId;
+ store.state.sidebarType = LIST;
+
+ boardsStore.addList({
+ id: listId,
+ label: { title: labelTitle, color: labelColor },
+ list_type: 'label',
+ });
+
+ createComponent({ canAdminList: true });
+ });
+
+ it('renders "Remove list" button', () => {
+ expect(findRemoveButton().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/boards/components/filtered_search_spec.js b/spec/frontend/boards/components/filtered_search_spec.js
deleted file mode 100644
index 7f238aa671f..00000000000
--- a/spec/frontend/boards/components/filtered_search_spec.js
+++ /dev/null
@@ -1,65 +0,0 @@
-import { createLocalVue, shallowMount } from '@vue/test-utils';
-import Vuex from 'vuex';
-import FilteredSearch from '~/boards/components/filtered_search.vue';
-import { createStore } from '~/boards/stores';
-import * as commonUtils from '~/lib/utils/common_utils';
-import FilteredSearchBarRoot from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('FilteredSearch', () => {
- let wrapper;
- let store;
-
- const createComponent = () => {
- wrapper = shallowMount(FilteredSearch, {
- localVue,
- propsData: { search: '' },
- store,
- attachTo: document.body,
- });
- };
-
- beforeEach(() => {
- // this needed for actions call for performSearch
- window.gon = { features: {} };
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('default', () => {
- beforeEach(() => {
- store = createStore();
-
- jest.spyOn(store, 'dispatch');
-
- createComponent();
- });
-
- it('finds FilteredSearch', () => {
- expect(wrapper.find(FilteredSearchBarRoot).exists()).toBe(true);
- });
-
- describe('when onFilter is emitted', () => {
- it('calls performSearch', () => {
- wrapper.find(FilteredSearchBarRoot).vm.$emit('onFilter', [{ value: { data: '' } }]);
-
- expect(store.dispatch).toHaveBeenCalledWith('performSearch');
- });
-
- it('calls historyPushState', () => {
- commonUtils.historyPushState = jest.fn();
- wrapper
- .find(FilteredSearchBarRoot)
- .vm.$emit('onFilter', [{ value: { data: 'searchQuery' } }]);
-
- expect(commonUtils.historyPushState).toHaveBeenCalledWith(
- 'http://test.host/?search=searchQuery',
- );
- });
- });
- });
-});
diff --git a/spec/frontend/boards/components/issue_time_estimate_spec.js b/spec/frontend/boards/components/issue_time_estimate_spec.js
index 2e253d24125..635964b6b4a 100644
--- a/spec/frontend/boards/components/issue_time_estimate_spec.js
+++ b/spec/frontend/boards/components/issue_time_estimate_spec.js
@@ -1,5 +1,5 @@
import { shallowMount } from '@vue/test-utils';
-import { config as vueConfig } from 'vue';
+import Vue from 'vue';
import IssueTimeEstimate from '~/boards/components/issue_time_estimate.vue';
describe('Issue Time Estimate component', () => {
@@ -34,10 +34,10 @@ describe('Issue Time Estimate component', () => {
try {
// This will raise props validating warning by Vue, silencing it
- vueConfig.silent = true;
+ Vue.config.silent = true;
await wrapper.setProps({ estimate: 'Foo <script>alert("XSS")</script>' });
} finally {
- vueConfig.silent = false;
+ Vue.config.silent = false;
}
expect(alertSpy).not.toHaveBeenCalled();
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
index 98ac211238c..153d0640b23 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_labels_select_spec.js
@@ -64,7 +64,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
beforeEach(async () => {
createWrapper();
- jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => TEST_LABELS);
+ jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => TEST_LABELS);
findLabelsSelect().vm.$emit('updateSelectedLabels', TEST_LABELS_PAYLOAD);
store.state.boardItems[TEST_ISSUE.id].labels = TEST_LABELS;
await wrapper.vm.$nextTick();
@@ -76,7 +76,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
});
it('commits change to the server', () => {
- expect(wrapper.vm.setActiveIssueLabels).toHaveBeenCalledWith({
+ expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({
addLabelIds: TEST_LABELS.map((label) => label.id),
projectPath: 'gitlab-org/test-subgroup/gitlab-test',
removeLabelIds: [],
@@ -94,13 +94,13 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
beforeEach(async () => {
createWrapper({ labels: TEST_LABELS });
- jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => expectedLabels);
+ jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => expectedLabels);
findLabelsSelect().vm.$emit('updateSelectedLabels', testLabelsPayload);
await wrapper.vm.$nextTick();
});
it('commits change to the server', () => {
- expect(wrapper.vm.setActiveIssueLabels).toHaveBeenCalledWith({
+ expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({
addLabelIds: [5, 7],
removeLabelIds: [6],
projectPath: 'gitlab-org/test-subgroup/gitlab-test',
@@ -114,13 +114,13 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
beforeEach(async () => {
createWrapper({ labels: [testLabel] });
- jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => {});
});
it('commits change to the server', () => {
wrapper.find(GlLabel).vm.$emit('close', testLabel);
- expect(wrapper.vm.setActiveIssueLabels).toHaveBeenCalledWith({
+ expect(wrapper.vm.setActiveBoardItemLabels).toHaveBeenCalledWith({
removeLabelIds: [getIdFromGraphQLId(testLabel.id)],
projectPath: 'gitlab-org/test-subgroup/gitlab-test',
});
@@ -131,7 +131,7 @@ describe('~/boards/components/sidebar/board_sidebar_labels_select.vue', () => {
beforeEach(async () => {
createWrapper({ labels: TEST_LABELS });
- jest.spyOn(wrapper.vm, 'setActiveIssueLabels').mockImplementation(() => {
+ jest.spyOn(wrapper.vm, 'setActiveBoardItemLabels').mockImplementation(() => {
throw new Error(['failed mutation']);
});
findLabelsSelect().vm.$emit('updateSelectedLabels', [{ id: '?' }]);
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
index cfd7f32b2cc..7976e73ff2f 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_subscription_spec.js
@@ -1,5 +1,6 @@
import { GlToggle, GlLoadingIcon } from '@gitlab/ui';
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
+import Vue from 'vue';
import Vuex from 'vuex';
import BoardSidebarSubscription from '~/boards/components/sidebar/board_sidebar_subscription.vue';
import { createStore } from '~/boards/stores';
@@ -9,8 +10,7 @@ import { mockActiveIssue } from '../../mock_data';
jest.mock('~/flash.js');
-const localVue = createLocalVue();
-localVue.use(Vuex);
+Vue.use(Vuex);
describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () => {
let wrapper;
@@ -20,14 +20,16 @@ describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () =
const findToggle = () => wrapper.find(GlToggle);
const findGlLoadingIcon = () => wrapper.find(GlLoadingIcon);
- const createComponent = (activeIssue = { ...mockActiveIssue }) => {
+ const createComponent = (activeBoardItem = { ...mockActiveIssue }) => {
store = createStore();
- store.state.boardItems = { [activeIssue.id]: activeIssue };
- store.state.activeId = activeIssue.id;
+ store.state.boardItems = { [activeBoardItem.id]: activeBoardItem };
+ store.state.activeId = activeBoardItem.id;
wrapper = mount(BoardSidebarSubscription, {
- localVue,
store,
+ provide: {
+ emailsDisabled: false,
+ },
});
};
@@ -90,9 +92,9 @@ describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () =
describe('Board sidebar subscription component `behavior`', () => {
const mockSetActiveIssueSubscribed = (subscribedState) => {
- jest.spyOn(wrapper.vm, 'setActiveIssueSubscribed').mockImplementation(async () => {
- store.commit(types.UPDATE_ISSUE_BY_ID, {
- issueId: mockActiveIssue.id,
+ jest.spyOn(wrapper.vm, 'setActiveItemSubscribed').mockImplementation(async () => {
+ store.commit(types.UPDATE_BOARD_ITEM_BY_ID, {
+ itemId: mockActiveIssue.id,
prop: 'subscribed',
value: subscribedState,
});
@@ -110,7 +112,7 @@ describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () =
await wrapper.vm.$nextTick();
expect(findGlLoadingIcon().exists()).toBe(true);
- expect(wrapper.vm.setActiveIssueSubscribed).toHaveBeenCalledWith({
+ expect(wrapper.vm.setActiveItemSubscribed).toHaveBeenCalledWith({
subscribed: true,
projectPath: 'gitlab-org/test-subgroup/gitlab-test',
});
@@ -134,7 +136,7 @@ describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () =
await wrapper.vm.$nextTick();
- expect(wrapper.vm.setActiveIssueSubscribed).toHaveBeenCalledWith({
+ expect(wrapper.vm.setActiveItemSubscribed).toHaveBeenCalledWith({
subscribed: false,
projectPath: 'gitlab-org/test-subgroup/gitlab-test',
});
@@ -148,7 +150,7 @@ describe('~/boards/components/sidebar/board_sidebar_subscription_spec.vue', () =
it('flashes an error message when setting the subscribed state fails', async () => {
createComponent();
- jest.spyOn(wrapper.vm, 'setActiveIssueSubscribed').mockImplementation(async () => {
+ jest.spyOn(wrapper.vm, 'setActiveItemSubscribed').mockImplementation(async () => {
throw new Error();
});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js
new file mode 100644
index 00000000000..03924bfa8d3
--- /dev/null
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_time_tracker_spec.js
@@ -0,0 +1,58 @@
+/*
+ To avoid duplicating tests in time_tracker.spec,
+ this spec only contains a simple test to check rendering.
+
+ A detailed feature spec is used to test time tracking feature
+ in swimlanes sidebar.
+*/
+
+import { shallowMount } from '@vue/test-utils';
+import BoardSidebarTimeTracker from '~/boards/components/sidebar/board_sidebar_time_tracker.vue';
+import { createStore } from '~/boards/stores';
+import IssuableTimeTracker from '~/sidebar/components/time_tracking/time_tracker.vue';
+
+describe('BoardSidebarTimeTracker', () => {
+ let wrapper;
+ let store;
+
+ const createComponent = (options) => {
+ wrapper = shallowMount(BoardSidebarTimeTracker, {
+ store,
+ ...options,
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ store.state.boardItems = {
+ 1: {
+ timeEstimate: 3600,
+ totalTimeSpent: 1800,
+ humanTimeEstimate: '1h',
+ humanTotalTimeSpent: '30min',
+ },
+ };
+ store.state.activeId = '1';
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it.each([[true], [false]])(
+ 'renders IssuableTimeTracker with correct spent and estimated time (timeTrackingLimitToHours=%s)',
+ (timeTrackingLimitToHours) => {
+ createComponent({ provide: { timeTrackingLimitToHours } });
+
+ expect(wrapper.find(IssuableTimeTracker).props()).toEqual({
+ timeEstimate: 3600,
+ timeSpent: 1800,
+ humanTimeEstimate: '1h',
+ humanTimeSpent: '30min',
+ limitToHours: timeTrackingLimitToHours,
+ showCollapsed: false,
+ });
+ },
+ );
+});
diff --git a/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
index 723d0345f76..c8ccd4c88a5 100644
--- a/spec/frontend/boards/components/sidebar/board_sidebar_issue_title_spec.js
+++ b/spec/frontend/boards/components/sidebar/board_sidebar_title_spec.js
@@ -1,11 +1,11 @@
import { GlAlert, GlFormInput, GlForm } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import BoardEditableItem from '~/boards/components/sidebar/board_editable_item.vue';
-import BoardSidebarIssueTitle from '~/boards/components/sidebar/board_sidebar_issue_title.vue';
+import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
import { createStore } from '~/boards/stores';
import createFlash from '~/flash';
-const TEST_TITLE = 'New issue title';
+const TEST_TITLE = 'New item title';
const TEST_ISSUE_A = {
id: 'gid://gitlab/Issue/1',
iid: 8,
@@ -21,7 +21,7 @@ const TEST_ISSUE_B = {
jest.mock('~/flash');
-describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
+describe('~/boards/components/sidebar/board_sidebar_title.vue', () => {
let wrapper;
let store;
@@ -32,12 +32,12 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
wrapper = null;
});
- const createWrapper = (issue = TEST_ISSUE_A) => {
+ const createWrapper = (item = TEST_ISSUE_A) => {
store = createStore();
- store.state.boardItems = { [issue.id]: { ...issue } };
- store.dispatch('setActiveId', { id: issue.id });
+ store.state.boardItems = { [item.id]: { ...item } };
+ store.dispatch('setActiveId', { id: item.id });
- wrapper = shallowMount(BoardSidebarIssueTitle, {
+ wrapper = shallowMount(BoardSidebarTitle, {
store,
provide: {
canUpdate: true,
@@ -53,7 +53,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
const findFormInput = () => wrapper.find(GlFormInput);
const findEditableItem = () => wrapper.find(BoardEditableItem);
const findCancelButton = () => wrapper.find('[data-testid="cancel-button"]');
- const findTitle = () => wrapper.find('[data-testid="issue-title"]');
+ const findTitle = () => wrapper.find('[data-testid="item-title"]');
const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
it('renders title and reference', () => {
@@ -73,7 +73,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
beforeEach(async () => {
createWrapper();
- jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {
+ jest.spyOn(wrapper.vm, 'setActiveItemTitle').mockImplementation(() => {
store.state.boardItems[TEST_ISSUE_A.id].title = TEST_TITLE;
});
findFormInput().vm.$emit('input', TEST_TITLE);
@@ -87,7 +87,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
});
it('commits change to the server', () => {
- expect(wrapper.vm.setActiveIssueTitle).toHaveBeenCalledWith({
+ expect(wrapper.vm.setActiveItemTitle).toHaveBeenCalledWith({
title: TEST_TITLE,
projectPath: 'h/b',
});
@@ -98,14 +98,14 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
beforeEach(async () => {
createWrapper();
- jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {});
+ jest.spyOn(wrapper.vm, 'setActiveItemTitle').mockImplementation(() => {});
findFormInput().vm.$emit('input', '');
findForm().vm.$emit('submit', { preventDefault: () => {} });
await wrapper.vm.$nextTick();
});
it('commits change to the server', () => {
- expect(wrapper.vm.setActiveIssueTitle).not.toHaveBeenCalled();
+ expect(wrapper.vm.setActiveItemTitle).not.toHaveBeenCalled();
});
});
@@ -122,7 +122,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
it('does not collapses sidebar and shows alert', () => {
expect(findCollapsed().isVisible()).toBe(false);
expect(findAlert().exists()).toBe(true);
- expect(localStorage.getItem(`${TEST_ISSUE_A.id}/issue-title-pending-changes`)).toBe(
+ expect(localStorage.getItem(`${TEST_ISSUE_A.id}/item-title-pending-changes`)).toBe(
TEST_TITLE,
);
});
@@ -130,7 +130,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
describe('when accessing the form with pending changes', () => {
beforeAll(() => {
- localStorage.setItem(`${TEST_ISSUE_A.id}/issue-title-pending-changes`, TEST_TITLE);
+ localStorage.setItem(`${TEST_ISSUE_A.id}/item-title-pending-changes`, TEST_TITLE);
createWrapper();
});
@@ -146,7 +146,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
beforeEach(async () => {
createWrapper(TEST_ISSUE_B);
- jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {
+ jest.spyOn(wrapper.vm, 'setActiveItemTitle').mockImplementation(() => {
store.state.boardItems[TEST_ISSUE_B.id].title = TEST_TITLE;
});
findFormInput().vm.$emit('input', TEST_TITLE);
@@ -155,7 +155,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
});
it('collapses sidebar and render former title', () => {
- expect(wrapper.vm.setActiveIssueTitle).not.toHaveBeenCalled();
+ expect(wrapper.vm.setActiveItemTitle).not.toHaveBeenCalled();
expect(findCollapsed().isVisible()).toBe(true);
expect(findTitle().text()).toBe(TEST_ISSUE_B.title);
});
@@ -165,7 +165,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
beforeEach(async () => {
createWrapper(TEST_ISSUE_B);
- jest.spyOn(wrapper.vm, 'setActiveIssueTitle').mockImplementation(() => {
+ jest.spyOn(wrapper.vm, 'setActiveItemTitle').mockImplementation(() => {
throw new Error(['failed mutation']);
});
findFormInput().vm.$emit('input', 'Invalid title');
@@ -173,7 +173,7 @@ describe('~/boards/components/sidebar/board_sidebar_issue_title.vue', () => {
await wrapper.vm.$nextTick();
});
- it('collapses sidebar and renders former issue title', () => {
+ it('collapses sidebar and renders former item title', () => {
expect(findCollapsed().isVisible()).toBe(true);
expect(findTitle().text()).toContain(TEST_ISSUE_B.title);
expect(createFlash).toHaveBeenCalled();
diff --git a/spec/frontend/boards/mock_data.js b/spec/frontend/boards/mock_data.js
index 500240d00fc..1c5b7cf8248 100644
--- a/spec/frontend/boards/mock_data.js
+++ b/spec/frontend/boards/mock_data.js
@@ -3,6 +3,7 @@
import { keyBy } from 'lodash';
import Vue from 'vue';
import '~/boards/models/list';
+import { ListType } from '~/boards/constants';
import boardsStore from '~/boards/stores/boards_store';
export const boardObj = {
@@ -125,7 +126,7 @@ export const labels = [
export const rawIssue = {
title: 'Issue 1',
id: 'gid://gitlab/Issue/436',
- iid: 27,
+ iid: '27',
dueDate: null,
timeEstimate: 0,
weight: null,
@@ -152,7 +153,7 @@ export const rawIssue = {
export const mockIssue = {
id: 'gid://gitlab/Issue/436',
- iid: 27,
+ iid: '27',
title: 'Issue 1',
dueDate: null,
timeEstimate: 0,
@@ -398,3 +399,128 @@ export const mockActiveGroupProjects = [
{ ...mockGroupProject1, archived: false },
{ ...mockGroupProject2, archived: false },
];
+
+export const mockIssueGroupPath = 'gitlab-org';
+export const mockIssueProjectPath = `${mockIssueGroupPath}/gitlab-test`;
+
+export const mockBlockingIssue1 = {
+ id: 'gid://gitlab/Issue/525',
+ iid: '6',
+ title: 'blocking issue title 1',
+ reference: 'gitlab-org/my-project-1#6',
+ webUrl: 'http://gdk.test:3000/gitlab-org/my-project-1/-/issues/6',
+ __typename: 'Issue',
+};
+
+export const mockBlockingIssue2 = {
+ id: 'gid://gitlab/Issue/524',
+ iid: '5',
+ title:
+ 'blocking issue title 2 + blocking issue title 2 + blocking issue title 2 + blocking issue title 2',
+ reference: 'gitlab-org/my-project-1#5',
+ webUrl: 'http://gdk.test:3000/gitlab-org/my-project-1/-/issues/5',
+ __typename: 'Issue',
+};
+
+export const mockBlockingIssue3 = {
+ id: 'gid://gitlab/Issue/523',
+ iid: '4',
+ title: 'blocking issue title 3',
+ reference: 'gitlab-org/my-project-1#4',
+ webUrl: 'http://gdk.test:3000/gitlab-org/my-project-1/-/issues/4',
+ __typename: 'Issue',
+};
+
+export const mockBlockingIssue4 = {
+ id: 'gid://gitlab/Issue/522',
+ iid: '3',
+ title: 'blocking issue title 4',
+ reference: 'gitlab-org/my-project-1#3',
+ webUrl: 'http://gdk.test:3000/gitlab-org/my-project-1/-/issues/3',
+ __typename: 'Issue',
+};
+
+export const mockBlockingIssuablesResponse1 = {
+ data: {
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/527',
+ blockingIssuables: {
+ __typename: 'IssueConnection',
+ nodes: [mockBlockingIssue1],
+ },
+ },
+ },
+};
+
+export const mockBlockingIssuablesResponse2 = {
+ data: {
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/527',
+ blockingIssuables: {
+ __typename: 'IssueConnection',
+ nodes: [mockBlockingIssue2],
+ },
+ },
+ },
+};
+
+export const mockBlockingIssuablesResponse3 = {
+ data: {
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/527',
+ blockingIssuables: {
+ __typename: 'IssueConnection',
+ nodes: [mockBlockingIssue1, mockBlockingIssue2, mockBlockingIssue3, mockBlockingIssue4],
+ },
+ },
+ },
+};
+
+export const mockBlockedIssue1 = {
+ id: '527',
+ blockedByCount: 1,
+};
+
+export const mockBlockedIssue2 = {
+ id: '527',
+ blockedByCount: 4,
+ webUrl: 'http://gdk.test:3000/gitlab-org/my-project-1/-/issues/0',
+};
+
+export const mockMoveIssueParams = {
+ itemId: 1,
+ fromListId: 'gid://gitlab/List/1',
+ toListId: 'gid://gitlab/List/2',
+ moveBeforeId: undefined,
+ moveAfterId: undefined,
+};
+
+export const mockMoveState = {
+ boardLists: {
+ 'gid://gitlab/List/1': {
+ listType: ListType.backlog,
+ },
+ 'gid://gitlab/List/2': {
+ listType: ListType.closed,
+ },
+ },
+ boardItems: {
+ [mockMoveIssueParams.itemId]: { foo: 'bar' },
+ },
+ boardItemsByListId: {
+ [mockMoveIssueParams.fromListId]: [mockMoveIssueParams.itemId],
+ [mockMoveIssueParams.toListId]: [],
+ },
+};
+
+export const mockMoveData = {
+ reordering: false,
+ shouldClone: false,
+ itemNotInToList: true,
+ originalIndex: 0,
+ originalIssue: { foo: 'bar' },
+ ...mockMoveIssueParams,
+};
diff --git a/spec/frontend/boards/modal_store_spec.js b/spec/frontend/boards/modal_store_spec.js
deleted file mode 100644
index 5b5ae4b6556..00000000000
--- a/spec/frontend/boards/modal_store_spec.js
+++ /dev/null
@@ -1,134 +0,0 @@
-/* global ListIssue */
-
-import '~/boards/models/label';
-import '~/boards/models/assignee';
-import '~/boards/models/issue';
-import '~/boards/models/list';
-import Store from '~/boards/stores/modal_store';
-
-describe('Modal store', () => {
- let issue;
- let issue2;
-
- beforeEach(() => {
- // Set up default state
- Store.store.issues = [];
- Store.store.selectedIssues = [];
-
- issue = new ListIssue({
- title: 'Testing',
- id: 1,
- iid: 1,
- confidential: false,
- labels: [],
- assignees: [],
- });
- issue2 = new ListIssue({
- title: 'Testing',
- id: 2,
- iid: 2,
- confidential: false,
- labels: [],
- assignees: [],
- });
- Store.store.issues.push(issue);
- Store.store.issues.push(issue2);
- });
-
- it('returns selected count', () => {
- expect(Store.selectedCount()).toBe(0);
- });
-
- it('toggles the issue as selected', () => {
- Store.toggleIssue(issue);
-
- expect(issue.selected).toBe(true);
- expect(Store.selectedCount()).toBe(1);
- });
-
- it('toggles the issue as un-selected', () => {
- Store.toggleIssue(issue);
- Store.toggleIssue(issue);
-
- expect(issue.selected).toBe(false);
- expect(Store.selectedCount()).toBe(0);
- });
-
- it('toggles all issues as selected', () => {
- Store.toggleAll();
-
- expect(issue.selected).toBe(true);
- expect(issue2.selected).toBe(true);
- expect(Store.selectedCount()).toBe(2);
- });
-
- it('toggles all issues as un-selected', () => {
- Store.toggleAll();
- Store.toggleAll();
-
- expect(issue.selected).toBe(false);
- expect(issue2.selected).toBe(false);
- expect(Store.selectedCount()).toBe(0);
- });
-
- it('toggles all if a single issue is selected', () => {
- Store.toggleIssue(issue);
- Store.toggleAll();
-
- expect(issue.selected).toBe(true);
- expect(issue2.selected).toBe(true);
- expect(Store.selectedCount()).toBe(2);
- });
-
- it('adds issue to selected array', () => {
- issue.selected = true;
- Store.addSelectedIssue(issue);
-
- expect(Store.selectedCount()).toBe(1);
- });
-
- it('removes issue from selected array', () => {
- Store.addSelectedIssue(issue);
- Store.removeSelectedIssue(issue);
-
- expect(Store.selectedCount()).toBe(0);
- });
-
- it('returns selected issue index if present', () => {
- Store.toggleIssue(issue);
-
- expect(Store.selectedIssueIndex(issue)).toBe(0);
- });
-
- it('returns -1 if issue is not selected', () => {
- expect(Store.selectedIssueIndex(issue)).toBe(-1);
- });
-
- it('finds the selected issue', () => {
- Store.toggleIssue(issue);
-
- expect(Store.findSelectedIssue(issue)).toBe(issue);
- });
-
- it('does not find a selected issue', () => {
- expect(Store.findSelectedIssue(issue)).toBe(undefined);
- });
-
- it('does not remove from selected issue if tab is not all', () => {
- Store.store.activeTab = 'selected';
-
- Store.toggleIssue(issue);
- Store.toggleIssue(issue);
-
- expect(Store.store.selectedIssues.length).toBe(1);
- expect(Store.selectedCount()).toBe(0);
- });
-
- it('gets selected issue array with only selected issues', () => {
- Store.toggleIssue(issue);
- Store.toggleIssue(issue2);
- Store.toggleIssue(issue2);
-
- expect(Store.getSelectedIssues().length).toBe(1);
- });
-});
diff --git a/spec/frontend/boards/stores/actions_spec.js b/spec/frontend/boards/stores/actions_spec.js
index 69d2c8977fb..460e77a3f03 100644
--- a/spec/frontend/boards/stores/actions_spec.js
+++ b/spec/frontend/boards/stores/actions_spec.js
@@ -1,16 +1,21 @@
+import * as Sentry from '@sentry/browser';
+import issueMoveListMutation from 'ee_else_ce/boards/graphql/issue_move_list.mutation.graphql';
import testAction from 'helpers/vuex_action_helper';
import {
fullBoardId,
formatListIssues,
formatBoardLists,
formatIssueInput,
+ formatIssue,
+ getMoveData,
} from '~/boards/boards_util';
-import { inactiveId, ISSUABLE } from '~/boards/constants';
+import { inactiveId, ISSUABLE, ListType } from '~/boards/constants';
import destroyBoardListMutation from '~/boards/graphql/board_list_destroy.mutation.graphql';
import issueCreateMutation from '~/boards/graphql/issue_create.mutation.graphql';
-import issueMoveListMutation from '~/boards/graphql/issue_move_list.mutation.graphql';
import actions, { gqlClient } from '~/boards/stores/actions';
import * as types from '~/boards/stores/mutation_types';
+import { getIdFromGraphQLId } from '~/graphql_shared/utils';
+
import {
mockLists,
mockListsById,
@@ -22,6 +27,9 @@ import {
labels,
mockActiveIssue,
mockGroupProjects,
+ mockMoveIssueParams,
+ mockMoveState,
+ mockMoveData,
} from '../mock_data';
jest.mock('~/flash');
@@ -638,73 +646,314 @@ describe('resetIssues', () => {
});
describe('moveItem', () => {
- it('should dispatch moveIssue action', () => {
+ it('should dispatch moveIssue action with payload', () => {
+ const payload = { mock: 'payload' };
+
testAction({
action: actions.moveItem,
- expectedActions: [{ type: 'moveIssue' }],
+ payload,
+ expectedActions: [{ type: 'moveIssue', payload }],
});
});
});
describe('moveIssue', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [436, 437],
- 'gid://gitlab/List/2': [],
- };
-
- const issues = {
- 436: mockIssue,
- 437: mockIssue2,
- };
-
- const state = {
- fullPath: 'gitlab-org',
- boardId: '1',
- boardType: 'group',
- disabled: false,
- boardLists: mockLists,
- boardItemsByListId: listIssues,
- boardItems: issues,
- };
+ it('should dispatch a correct set of actions', () => {
+ testAction({
+ action: actions.moveIssue,
+ payload: mockMoveIssueParams,
+ state: mockMoveState,
+ expectedActions: [
+ { type: 'moveIssueCard', payload: mockMoveData },
+ { type: 'updateMovedIssue', payload: mockMoveData },
+ { type: 'updateIssueOrder', payload: { moveData: mockMoveData } },
+ ],
+ });
+ });
+});
- it('should commit MOVE_ISSUE mutation and MOVE_ISSUE_SUCCESS mutation when successful', (done) => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- issueMoveList: {
- issue: rawIssue,
- errors: [],
+describe('moveIssueCard and undoMoveIssueCard', () => {
+ describe('card should move without clonning', () => {
+ let state;
+ let params;
+ let moveMutations;
+ let undoMutations;
+
+ describe('when re-ordering card', () => {
+ beforeEach(
+ ({
+ itemId = 123,
+ fromListId = 'gid://gitlab/List/1',
+ toListId = 'gid://gitlab/List/1',
+ originalIssue = { foo: 'bar' },
+ originalIndex = 0,
+ moveBeforeId = undefined,
+ moveAfterId = undefined,
+ } = {}) => {
+ state = {
+ boardLists: {
+ [toListId]: { listType: ListType.backlog },
+ [fromListId]: { listType: ListType.backlog },
+ },
+ boardItems: { [itemId]: originalIssue },
+ boardItemsByListId: { [fromListId]: [123] },
+ };
+ params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
+ moveMutations = [
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
+ },
+ ];
+ undoMutations = [
+ { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
},
- },
+ );
+
+ it('moveIssueCard commits a correct set of actions', () => {
+ testAction({
+ action: actions.moveIssueCard,
+ state,
+ payload: getMoveData(state, params),
+ expectedMutations: moveMutations,
+ });
+ });
+
+ it('undoMoveIssueCard commits a correct set of actions', () => {
+ testAction({
+ action: actions.undoMoveIssueCard,
+ state,
+ payload: getMoveData(state, params),
+ expectedMutations: undoMutations,
+ });
+ });
});
- testAction(
- actions.moveIssue,
- {
- itemId: '436',
- itemIid: mockIssue.iid,
- itemPath: mockIssue.referencePath,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- },
- state,
+ describe.each([
[
+ 'issue moves out of backlog',
{
- type: types.MOVE_ISSUE,
- payload: {
- originalIssue: mockIssue,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- },
+ fromListType: ListType.backlog,
+ toListType: ListType.label,
},
+ ],
+ [
+ 'issue card moves to closed',
{
- type: types.MOVE_ISSUE_SUCCESS,
- payload: { issue: rawIssue },
+ fromListType: ListType.label,
+ toListType: ListType.closed,
},
],
- [],
- done,
- );
+ [
+ 'issue card moves to non-closed, non-backlog list of the same type',
+ {
+ fromListType: ListType.label,
+ toListType: ListType.label,
+ },
+ ],
+ ])('when %s', (_, { toListType, fromListType }) => {
+ beforeEach(
+ ({
+ itemId = 123,
+ fromListId = 'gid://gitlab/List/1',
+ toListId = 'gid://gitlab/List/2',
+ originalIssue = { foo: 'bar' },
+ originalIndex = 0,
+ moveBeforeId = undefined,
+ moveAfterId = undefined,
+ } = {}) => {
+ state = {
+ boardLists: {
+ [fromListId]: { listType: fromListType },
+ [toListId]: { listType: toListType },
+ },
+ boardItems: { [itemId]: originalIssue },
+ boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
+ };
+ params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
+ moveMutations = [
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
+ },
+ ];
+ undoMutations = [
+ { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
+ },
+ );
+
+ it('moveIssueCard commits a correct set of actions', () => {
+ testAction({
+ action: actions.moveIssueCard,
+ state,
+ payload: getMoveData(state, params),
+ expectedMutations: moveMutations,
+ });
+ });
+
+ it('undoMoveIssueCard commits a correct set of actions', () => {
+ testAction({
+ action: actions.undoMoveIssueCard,
+ state,
+ payload: getMoveData(state, params),
+ expectedMutations: undoMutations,
+ });
+ });
+ });
+ });
+
+ describe('card should clone on move', () => {
+ let state;
+ let params;
+ let moveMutations;
+ let undoMutations;
+
+ describe.each([
+ [
+ 'issue card moves to non-closed, non-backlog list of a different type',
+ {
+ fromListType: ListType.label,
+ toListType: ListType.assignee,
+ },
+ ],
+ ])('when %s', (_, { toListType, fromListType }) => {
+ beforeEach(
+ ({
+ itemId = 123,
+ fromListId = 'gid://gitlab/List/1',
+ toListId = 'gid://gitlab/List/2',
+ originalIssue = { foo: 'bar' },
+ originalIndex = 0,
+ moveBeforeId = undefined,
+ moveAfterId = undefined,
+ } = {}) => {
+ state = {
+ boardLists: {
+ [fromListId]: { listType: fromListType },
+ [toListId]: { listType: toListType },
+ },
+ boardItems: { [itemId]: originalIssue },
+ boardItemsByListId: { [fromListId]: [123], [toListId]: [] },
+ };
+ params = { itemId, fromListId, toListId, moveBeforeId, moveAfterId };
+ moveMutations = [
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: toListId, moveBeforeId, moveAfterId },
+ },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
+ undoMutations = [
+ { type: types.UPDATE_BOARD_ITEM, payload: originalIssue },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: fromListId } },
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload: { itemId, listId: toListId } },
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: { itemId, listId: fromListId, atIndex: originalIndex },
+ },
+ ];
+ },
+ );
+
+ it('moveIssueCard commits a correct set of actions', () => {
+ testAction({
+ action: actions.moveIssueCard,
+ state,
+ payload: getMoveData(state, params),
+ expectedMutations: moveMutations,
+ });
+ });
+
+ it('undoMoveIssueCard commits a correct set of actions', () => {
+ testAction({
+ action: actions.undoMoveIssueCard,
+ state,
+ payload: getMoveData(state, params),
+ expectedMutations: undoMutations,
+ });
+ });
+ });
});
+});
+
+describe('updateMovedIssueCard', () => {
+ const label1 = {
+ id: 'label1',
+ };
+
+ it.each([
+ [
+ 'issue without a label is moved to a label list',
+ {
+ state: {
+ boardLists: {
+ from: {},
+ to: {
+ listType: ListType.label,
+ label: label1,
+ },
+ },
+ boardItems: {
+ 1: {
+ labels: [],
+ },
+ },
+ },
+ moveData: {
+ itemId: 1,
+ fromListId: 'from',
+ toListId: 'to',
+ },
+ updatedIssue: { labels: [label1] },
+ },
+ ],
+ ])(
+ 'should commit UPDATE_BOARD_ITEM with a correctly updated issue data when %s',
+ (_, { state, moveData, updatedIssue }) => {
+ testAction({
+ action: actions.updateMovedIssue,
+ payload: moveData,
+ state,
+ expectedMutations: [{ type: types.UPDATE_BOARD_ITEM, payload: updatedIssue }],
+ });
+ },
+ );
+});
+
+describe('updateIssueOrder', () => {
+ const issues = {
+ 436: mockIssue,
+ 437: mockIssue2,
+ };
+
+ const state = {
+ boardItems: issues,
+ boardId: 'gid://gitlab/Board/1',
+ };
+
+ const moveData = {
+ itemId: 436,
+ fromListId: 'gid://gitlab/List/1',
+ toListId: 'gid://gitlab/List/2',
+ };
it('calls mutate with the correct variables', () => {
const mutationVariables = {
@@ -728,61 +977,56 @@ describe('moveIssue', () => {
},
});
- actions.moveIssue(
- { state, commit: () => {} },
- {
- itemId: mockIssue.id,
- itemIid: mockIssue.iid,
- itemPath: mockIssue.referencePath,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- },
- );
+ actions.updateIssueOrder({ state, commit: () => {}, dispatch: () => {} }, { moveData });
expect(gqlClient.mutate).toHaveBeenCalledWith(mutationVariables);
});
- it('should commit MOVE_ISSUE mutation and MOVE_ISSUE_FAILURE mutation when unsuccessful', (done) => {
+ it('should commit MUTATE_ISSUE_SUCCESS mutation when successful', () => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
issueMoveList: {
- issue: {},
- errors: [{ foo: 'bar' }],
+ issue: rawIssue,
+ errors: [],
},
},
});
testAction(
- actions.moveIssue,
- {
- itemId: '436',
- itemIid: mockIssue.iid,
- itemPath: mockIssue.referencePath,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- },
+ actions.updateIssueOrder,
+ { moveData },
state,
[
{
- type: types.MOVE_ISSUE,
- payload: {
- originalIssue: mockIssue,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- },
+ type: types.MUTATE_ISSUE_SUCCESS,
+ payload: { issue: rawIssue },
},
+ ],
+ [],
+ );
+ });
+
+ it('should commit SET_ERROR and dispatch undoMoveIssueCard', () => {
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ issueMoveList: {
+ issue: {},
+ errors: [{ foo: 'bar' }],
+ },
+ },
+ });
+
+ testAction(
+ actions.updateIssueOrder,
+ { moveData },
+ state,
+ [
{
- type: types.MOVE_ISSUE_FAILURE,
- payload: {
- originalIssue: mockIssue,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- originalIndex: 0,
- },
+ type: types.SET_ERROR,
+ payload: 'An error occurred while moving the issue. Please try again.',
},
],
- [],
- done,
+ [{ type: 'undoMoveIssueCard', payload: moveData }],
);
});
});
@@ -798,11 +1042,11 @@ describe('setAssignees', () => {
testAction(
actions.setAssignees,
[node],
- { activeIssue: { iid, referencePath: refPath }, commit: () => {} },
+ { activeBoardItem: { iid, referencePath: refPath }, commit: () => {} },
[
{
- type: 'UPDATE_ISSUE_BY_ID',
- payload: { prop: 'assignees', issueId: undefined, value: [node] },
+ type: 'UPDATE_BOARD_ITEM_BY_ID',
+ payload: { prop: 'assignees', itemId: undefined, value: [node] },
},
],
[],
@@ -812,7 +1056,43 @@ describe('setAssignees', () => {
});
});
-describe('createNewIssue', () => {
+describe('addListItem', () => {
+ it('should commit ADD_BOARD_ITEM_TO_LIST and UPDATE_BOARD_ITEM mutations', () => {
+ const payload = {
+ list: mockLists[0],
+ item: mockIssue,
+ position: 0,
+ };
+
+ testAction(actions.addListItem, payload, {}, [
+ {
+ type: types.ADD_BOARD_ITEM_TO_LIST,
+ payload: {
+ listId: mockLists[0].id,
+ itemId: mockIssue.id,
+ atIndex: 0,
+ },
+ },
+ { type: types.UPDATE_BOARD_ITEM, payload: mockIssue },
+ ]);
+ });
+});
+
+describe('removeListItem', () => {
+ it('should commit REMOVE_BOARD_ITEM_FROM_LIST and REMOVE_BOARD_ITEM mutations', () => {
+ const payload = {
+ listId: mockLists[0].id,
+ itemId: mockIssue.id,
+ };
+
+ testAction(actions.removeListItem, payload, {}, [
+ { type: types.REMOVE_BOARD_ITEM_FROM_LIST, payload },
+ { type: types.REMOVE_BOARD_ITEM, payload: mockIssue.id },
+ ]);
+ });
+});
+
+describe('addListNewIssue', () => {
const state = {
boardType: 'group',
fullPath: 'gitlab-org/gitlab',
@@ -839,19 +1119,7 @@ describe('createNewIssue', () => {
},
};
- it('should return issue from API on success', async () => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- createIssue: {
- issue: mockIssue,
- errors: [],
- },
- },
- });
-
- const result = await actions.createNewIssue({ state }, mockIssue);
- expect(result).toEqual(mockIssue);
- });
+ const fakeList = { id: 'gid://gitlab/List/123' };
it('should add board scope to the issue being created', async () => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
@@ -863,7 +1131,11 @@ describe('createNewIssue', () => {
},
});
- await actions.createNewIssue({ state: stateWithBoardConfig }, mockIssue);
+ await actions.addListNewIssue(
+ { dispatch: jest.fn(), commit: jest.fn(), state: stateWithBoardConfig },
+ { issueInput: mockIssue, list: fakeList },
+ );
+
expect(gqlClient.mutate).toHaveBeenCalledWith({
mutation: issueCreateMutation,
variables: {
@@ -890,7 +1162,11 @@ describe('createNewIssue', () => {
const payload = formatIssueInput(issue, stateWithBoardConfig.boardConfig);
- await actions.createNewIssue({ state: stateWithBoardConfig }, issue);
+ await actions.addListNewIssue(
+ { dispatch: jest.fn(), commit: jest.fn(), state: stateWithBoardConfig },
+ { issueInput: issue, list: fakeList },
+ );
+
expect(gqlClient.mutate).toHaveBeenCalledWith({
mutation: issueCreateMutation,
variables: {
@@ -901,51 +1177,92 @@ describe('createNewIssue', () => {
expect(payload.assigneeIds).toEqual(['gid://gitlab/User/1', 'gid://gitlab/User/2']);
});
- it('should commit CREATE_ISSUE_FAILURE mutation when API returns an error', (done) => {
- jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
- data: {
- createIssue: {
- issue: mockIssue,
- errors: [{ foo: 'bar' }],
+ describe('when issue creation mutation request succeeds', () => {
+ it('dispatches a correct set of mutations', () => {
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ createIssue: {
+ issue: mockIssue,
+ errors: [],
+ },
},
- },
+ });
+
+ testAction({
+ action: actions.addListNewIssue,
+ payload: {
+ issueInput: mockIssue,
+ list: fakeList,
+ placeholderId: 'tmp',
+ },
+ state,
+ expectedActions: [
+ {
+ type: 'addListItem',
+ payload: {
+ list: fakeList,
+ item: formatIssue({ ...mockIssue, id: 'tmp' }),
+ position: 0,
+ },
+ },
+ { type: 'removeListItem', payload: { listId: fakeList.id, itemId: 'tmp' } },
+ {
+ type: 'addListItem',
+ payload: {
+ list: fakeList,
+ item: formatIssue({ ...mockIssue, id: getIdFromGraphQLId(mockIssue.id) }),
+ position: 0,
+ },
+ },
+ ],
+ });
});
-
- const payload = mockIssue;
-
- testAction(
- actions.createNewIssue,
- payload,
- state,
- [{ type: types.CREATE_ISSUE_FAILURE }],
- [],
- done,
- );
});
-});
-
-describe('addListIssue', () => {
- it('should commit ADD_ISSUE_TO_LIST mutation', (done) => {
- const payload = {
- list: mockLists[0],
- issue: mockIssue,
- position: 0,
- };
- testAction(
- actions.addListIssue,
- payload,
- {},
- [{ type: types.ADD_ISSUE_TO_LIST, payload }],
- [],
- done,
- );
+ describe('when issue creation mutation request fails', () => {
+ it('dispatches a correct set of mutations', () => {
+ jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
+ data: {
+ createIssue: {
+ issue: mockIssue,
+ errors: [{ foo: 'bar' }],
+ },
+ },
+ });
+
+ testAction({
+ action: actions.addListNewIssue,
+ payload: {
+ issueInput: mockIssue,
+ list: fakeList,
+ placeholderId: 'tmp',
+ },
+ state,
+ expectedActions: [
+ {
+ type: 'addListItem',
+ payload: {
+ list: fakeList,
+ item: formatIssue({ ...mockIssue, id: 'tmp' }),
+ position: 0,
+ },
+ },
+ { type: 'removeListItem', payload: { listId: fakeList.id, itemId: 'tmp' } },
+ ],
+ expectedMutations: [
+ {
+ type: types.SET_ERROR,
+ payload: 'An error occurred while creating the issue. Please try again.',
+ },
+ ],
+ });
+ });
});
});
describe('setActiveIssueLabels', () => {
const state = { boardItems: { [mockIssue.id]: mockIssue } };
- const getters = { activeIssue: mockIssue };
+ const getters = { activeBoardItem: mockIssue };
const testLabelIds = labels.map((label) => label.id);
const input = {
addLabelIds: testLabelIds,
@@ -959,7 +1276,7 @@ describe('setActiveIssueLabels', () => {
.mockResolvedValue({ data: { updateIssue: { issue: { labels: { nodes: labels } } } } });
const payload = {
- issueId: getters.activeIssue.id,
+ itemId: getters.activeBoardItem.id,
prop: 'labels',
value: labels,
};
@@ -970,7 +1287,7 @@ describe('setActiveIssueLabels', () => {
{ ...state, ...getters },
[
{
- type: types.UPDATE_ISSUE_BY_ID,
+ type: types.UPDATE_BOARD_ITEM_BY_ID,
payload,
},
],
@@ -990,7 +1307,7 @@ describe('setActiveIssueLabels', () => {
describe('setActiveIssueDueDate', () => {
const state = { boardItems: { [mockIssue.id]: mockIssue } };
- const getters = { activeIssue: mockIssue };
+ const getters = { activeBoardItem: mockIssue };
const testDueDate = '2020-02-20';
const input = {
dueDate: testDueDate,
@@ -1010,7 +1327,7 @@ describe('setActiveIssueDueDate', () => {
});
const payload = {
- issueId: getters.activeIssue.id,
+ itemId: getters.activeBoardItem.id,
prop: 'dueDate',
value: testDueDate,
};
@@ -1021,7 +1338,7 @@ describe('setActiveIssueDueDate', () => {
{ ...state, ...getters },
[
{
- type: types.UPDATE_ISSUE_BY_ID,
+ type: types.UPDATE_BOARD_ITEM_BY_ID,
payload,
},
],
@@ -1039,9 +1356,15 @@ describe('setActiveIssueDueDate', () => {
});
});
-describe('setActiveIssueSubscribed', () => {
- const state = { boardItems: { [mockActiveIssue.id]: mockActiveIssue } };
- const getters = { activeIssue: mockActiveIssue };
+describe('setActiveItemSubscribed', () => {
+ const state = {
+ boardItems: {
+ [mockActiveIssue.id]: mockActiveIssue,
+ },
+ fullPath: 'gitlab-org',
+ issuableType: 'issue',
+ };
+ const getters = { activeBoardItem: mockActiveIssue, isEpicBoard: false };
const subscribedState = true;
const input = {
subscribedState,
@@ -1051,7 +1374,7 @@ describe('setActiveIssueSubscribed', () => {
it('should commit subscribed status', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
- issueSetSubscription: {
+ updateIssuableSubscription: {
issue: {
subscribed: subscribedState,
},
@@ -1061,18 +1384,18 @@ describe('setActiveIssueSubscribed', () => {
});
const payload = {
- issueId: getters.activeIssue.id,
+ itemId: getters.activeBoardItem.id,
prop: 'subscribed',
value: subscribedState,
};
testAction(
- actions.setActiveIssueSubscribed,
+ actions.setActiveItemSubscribed,
input,
{ ...state, ...getters },
[
{
- type: types.UPDATE_ISSUE_BY_ID,
+ type: types.UPDATE_BOARD_ITEM_BY_ID,
payload,
},
],
@@ -1084,15 +1407,15 @@ describe('setActiveIssueSubscribed', () => {
it('throws error if fails', async () => {
jest
.spyOn(gqlClient, 'mutate')
- .mockResolvedValue({ data: { issueSetSubscription: { errors: ['failed mutation'] } } });
+ .mockResolvedValue({ data: { updateIssuableSubscription: { errors: ['failed mutation'] } } });
- await expect(actions.setActiveIssueSubscribed({ getters }, input)).rejects.toThrow(Error);
+ await expect(actions.setActiveItemSubscribed({ getters }, input)).rejects.toThrow(Error);
});
});
describe('setActiveIssueMilestone', () => {
const state = { boardItems: { [mockIssue.id]: mockIssue } };
- const getters = { activeIssue: mockIssue };
+ const getters = { activeBoardItem: mockIssue };
const testMilestone = {
...mockMilestone,
id: 'gid://gitlab/Milestone/1',
@@ -1115,7 +1438,7 @@ describe('setActiveIssueMilestone', () => {
});
const payload = {
- issueId: getters.activeIssue.id,
+ itemId: getters.activeBoardItem.id,
prop: 'milestone',
value: testMilestone,
};
@@ -1126,7 +1449,7 @@ describe('setActiveIssueMilestone', () => {
{ ...state, ...getters },
[
{
- type: types.UPDATE_ISSUE_BY_ID,
+ type: types.UPDATE_BOARD_ITEM_BY_ID,
payload,
},
],
@@ -1144,9 +1467,13 @@ describe('setActiveIssueMilestone', () => {
});
});
-describe('setActiveIssueTitle', () => {
- const state = { boardItems: { [mockIssue.id]: mockIssue } };
- const getters = { activeIssue: mockIssue };
+describe('setActiveItemTitle', () => {
+ const state = {
+ boardItems: { [mockIssue.id]: mockIssue },
+ issuableType: 'issue',
+ fullPath: 'path/f',
+ };
+ const getters = { activeBoardItem: mockIssue, isEpicBoard: false };
const testTitle = 'Test Title';
const input = {
title: testTitle,
@@ -1156,7 +1483,7 @@ describe('setActiveIssueTitle', () => {
it('should commit title after setting the issue', (done) => {
jest.spyOn(gqlClient, 'mutate').mockResolvedValue({
data: {
- updateIssue: {
+ updateIssuableTitle: {
issue: {
title: testTitle,
},
@@ -1166,18 +1493,18 @@ describe('setActiveIssueTitle', () => {
});
const payload = {
- issueId: getters.activeIssue.id,
+ itemId: getters.activeBoardItem.id,
prop: 'title',
value: testTitle,
};
testAction(
- actions.setActiveIssueTitle,
+ actions.setActiveItemTitle,
input,
{ ...state, ...getters },
[
{
- type: types.UPDATE_ISSUE_BY_ID,
+ type: types.UPDATE_BOARD_ITEM_BY_ID,
payload,
},
],
@@ -1191,7 +1518,7 @@ describe('setActiveIssueTitle', () => {
.spyOn(gqlClient, 'mutate')
.mockResolvedValue({ data: { updateIssue: { errors: ['failed mutation'] } } });
- await expect(actions.setActiveIssueTitle({ getters }, input)).rejects.toThrow(Error);
+ await expect(actions.setActiveItemTitle({ getters }, input)).rejects.toThrow(Error);
});
});
@@ -1321,7 +1648,7 @@ describe('toggleBoardItemMultiSelection', () => {
testAction(
actions.toggleBoardItemMultiSelection,
boardItem2,
- { activeId: mockActiveIssue.id, activeIssue: mockActiveIssue, selectedBoardItems: [] },
+ { activeId: mockActiveIssue.id, activeBoardItem: mockActiveIssue, selectedBoardItems: [] },
[
{
type: types.ADD_BOARD_ITEM_TO_SELECTION,
@@ -1378,6 +1705,51 @@ describe('toggleBoardItem', () => {
});
});
+describe('setError', () => {
+ it('should commit mutation SET_ERROR', () => {
+ testAction({
+ action: actions.setError,
+ payload: { message: 'mayday' },
+ expectedMutations: [
+ {
+ payload: 'mayday',
+ type: types.SET_ERROR,
+ },
+ ],
+ });
+ });
+
+ it('should capture error using Sentry when captureError is true', () => {
+ jest.spyOn(Sentry, 'captureException');
+
+ const mockError = new Error();
+ actions.setError(
+ { commit: () => {} },
+ {
+ message: 'mayday',
+ error: mockError,
+ captureError: true,
+ },
+ );
+
+ expect(Sentry.captureException).toHaveBeenNthCalledWith(1, mockError);
+ });
+});
+
+describe('unsetError', () => {
+ it('should commit mutation SET_ERROR with undefined as payload', () => {
+ testAction({
+ action: actions.unsetError,
+ expectedMutations: [
+ {
+ payload: undefined,
+ type: types.SET_ERROR,
+ },
+ ],
+ });
+ });
+});
+
describe('fetchBacklog', () => {
expectNotImplemented(actions.fetchBacklog);
});
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
index 32d73d861bc..6114ba0af5f 100644
--- a/spec/frontend/boards/stores/getters_spec.js
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -88,7 +88,7 @@ describe('Boards - Getters', () => {
});
});
- describe('activeIssue', () => {
+ describe('activeBoardItem', () => {
it.each`
id | expected
${'1'} | ${'issue'}
@@ -96,7 +96,7 @@ describe('Boards - Getters', () => {
`('returns $expected when $id is passed to state', ({ id, expected }) => {
const state = { boardItems: { 1: 'issue' }, activeId: id };
- expect(getters.activeIssue(state)).toEqual(expected);
+ expect(getters.activeBoardItem(state)).toEqual(expected);
});
});
@@ -105,14 +105,14 @@ describe('Boards - Getters', () => {
const mockActiveIssue = {
referencePath: 'gitlab-org/gitlab-test#1',
};
- expect(getters.groupPathForActiveIssue({}, { activeIssue: mockActiveIssue })).toEqual(
+ expect(getters.groupPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
'gitlab-org',
);
});
it('returns empty string as group path when active issue is an empty object', () => {
const mockActiveIssue = {};
- expect(getters.groupPathForActiveIssue({}, { activeIssue: mockActiveIssue })).toEqual('');
+ expect(getters.groupPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual('');
});
});
@@ -121,14 +121,16 @@ describe('Boards - Getters', () => {
const mockActiveIssue = {
referencePath: 'gitlab-org/gitlab-test#1',
};
- expect(getters.projectPathForActiveIssue({}, { activeIssue: mockActiveIssue })).toEqual(
+ expect(getters.projectPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
'gitlab-org/gitlab-test',
);
});
it('returns empty string as project path when active issue is an empty object', () => {
const mockActiveIssue = {};
- expect(getters.projectPathForActiveIssue({}, { activeIssue: mockActiveIssue })).toEqual('');
+ expect(getters.projectPathForActiveIssue({}, { activeBoardItem: mockActiveIssue })).toEqual(
+ '',
+ );
});
});
@@ -177,4 +179,31 @@ describe('Boards - Getters', () => {
expect(getters.activeGroupProjects(state)).toEqual([mockGroupProject1]);
});
});
+
+ describe('isIssueBoard', () => {
+ it.each`
+ issuableType | expected
+ ${'issue'} | ${true}
+ ${'epic'} | ${false}
+ `(
+ 'returns $expected when issuableType on state is $issuableType',
+ ({ issuableType, expected }) => {
+ const state = {
+ issuableType,
+ };
+
+ expect(getters.isIssueBoard(state)).toBe(expected);
+ },
+ );
+ });
+
+ describe('isEpicBoard', () => {
+ afterEach(() => {
+ window.gon = { features: {} };
+ });
+
+ it('returns false', () => {
+ expect(getters.isEpicBoard()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/boards/stores/mutations_spec.js b/spec/frontend/boards/stores/mutations_spec.js
index 33897cc0250..af6d439e294 100644
--- a/spec/frontend/boards/stores/mutations_spec.js
+++ b/spec/frontend/boards/stores/mutations_spec.js
@@ -1,3 +1,4 @@
+import { cloneDeep } from 'lodash';
import { issuableTypes } from '~/boards/constants';
import * as types from '~/boards/stores/mutation_types';
import mutations from '~/boards/stores/mutations';
@@ -9,6 +10,7 @@ import {
mockIssue2,
mockGroupProjects,
labels,
+ mockList,
} from '../mock_data';
const expectNotImplemented = (action) => {
@@ -25,6 +27,14 @@ describe('Board Store Mutations', () => {
'gid://gitlab/List/2': mockLists[1],
};
+ const setBoardsListsState = () => {
+ state = cloneDeep({
+ ...state,
+ boardItemsByListId: { 'gid://gitlab/List/1': [mockIssue.id] },
+ boardLists: { 'gid://gitlab/List/1': mockList },
+ });
+ };
+
beforeEach(() => {
state = defaultState();
});
@@ -335,7 +345,7 @@ describe('Board Store Mutations', () => {
expectNotImplemented(mutations.REQUEST_ADD_ISSUE);
});
- describe('UPDATE_ISSUE_BY_ID', () => {
+ describe('UPDATE_BOARD_ITEM_BY_ID', () => {
const issueId = '1';
const prop = 'id';
const value = '2';
@@ -353,8 +363,8 @@ describe('Board Store Mutations', () => {
describe('when the issue is in state', () => {
it('updates the property of the correct issue', () => {
- mutations.UPDATE_ISSUE_BY_ID(state, {
- issueId,
+ mutations.UPDATE_BOARD_ITEM_BY_ID(state, {
+ itemId: issueId,
prop,
value,
});
@@ -366,8 +376,8 @@ describe('Board Store Mutations', () => {
describe('when the issue is not in state', () => {
it('throws an error', () => {
expect(() => {
- mutations.UPDATE_ISSUE_BY_ID(state, {
- issueId: '3',
+ mutations.UPDATE_BOARD_ITEM_BY_ID(state, {
+ itemId: '3',
prop,
value,
});
@@ -384,41 +394,7 @@ describe('Board Store Mutations', () => {
expectNotImplemented(mutations.RECEIVE_ADD_ISSUE_ERROR);
});
- describe('MOVE_ISSUE', () => {
- it('updates boardItemsByListId, moving issue between lists', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id],
- 'gid://gitlab/List/2': [],
- };
-
- const issues = {
- 1: mockIssue,
- 2: mockIssue2,
- };
-
- state = {
- ...state,
- boardItemsByListId: listIssues,
- boardLists: initialBoardListsState,
- boardItems: issues,
- };
-
- mutations.MOVE_ISSUE(state, {
- originalIssue: mockIssue2,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- });
-
- const updatedListIssues = {
- 'gid://gitlab/List/1': [mockIssue.id],
- 'gid://gitlab/List/2': [mockIssue2.id],
- };
-
- expect(state.boardItemsByListId).toEqual(updatedListIssues);
- });
- });
-
- describe('MOVE_ISSUE_SUCCESS', () => {
+ describe('MUTATE_ISSUE_SUCCESS', () => {
it('updates issue in issues state', () => {
const issues = {
436: { id: rawIssue.id },
@@ -429,7 +405,7 @@ describe('Board Store Mutations', () => {
boardItems: issues,
};
- mutations.MOVE_ISSUE_SUCCESS(state, {
+ mutations.MUTATE_ISSUE_SUCCESS(state, {
issue: rawIssue,
});
@@ -437,33 +413,24 @@ describe('Board Store Mutations', () => {
});
});
- describe('MOVE_ISSUE_FAILURE', () => {
- it('updates boardItemsByListId, reverting moving issue between lists, and sets error message', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id],
- 'gid://gitlab/List/2': [mockIssue2.id],
- };
+ describe('UPDATE_BOARD_ITEM', () => {
+ it('updates the given issue in state.boardItems', () => {
+ const updatedIssue = { id: 'some_gid', foo: 'bar' };
+ state = { boardItems: { some_gid: { id: 'some_gid' } } };
- state = {
- ...state,
- boardItemsByListId: listIssues,
- boardLists: initialBoardListsState,
- };
+ mutations.UPDATE_BOARD_ITEM(state, updatedIssue);
- mutations.MOVE_ISSUE_FAILURE(state, {
- originalIssue: mockIssue2,
- fromListId: 'gid://gitlab/List/1',
- toListId: 'gid://gitlab/List/2',
- originalIndex: 1,
- });
+ expect(state.boardItems.some_gid).toEqual(updatedIssue);
+ });
+ });
- const updatedListIssues = {
- 'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id],
- 'gid://gitlab/List/2': [],
- };
+ describe('REMOVE_BOARD_ITEM', () => {
+ it('removes the given issue from state.boardItems', () => {
+ state = { boardItems: { some_gid: {}, some_gid2: {} } };
+
+ mutations.REMOVE_BOARD_ITEM(state, 'some_gid');
- expect(state.boardItemsByListId).toEqual(updatedListIssues);
- expect(state.error).toEqual('An error occurred while moving the issue. Please try again.');
+ expect(state.boardItems).toEqual({ some_gid2: {} });
});
});
@@ -479,85 +446,89 @@ describe('Board Store Mutations', () => {
expectNotImplemented(mutations.RECEIVE_UPDATE_ISSUE_ERROR);
});
- describe('CREATE_ISSUE_FAILURE', () => {
- it('sets error message on state', () => {
- mutations.CREATE_ISSUE_FAILURE(state);
+ describe('ADD_BOARD_ITEM_TO_LIST', () => {
+ beforeEach(() => {
+ setBoardsListsState();
+ });
+
+ it.each([
+ [
+ 'at position 0 by default',
+ {
+ payload: {
+ itemId: mockIssue2.id,
+ listId: mockList.id,
+ },
+ listState: [mockIssue2.id, mockIssue.id],
+ },
+ ],
+ [
+ 'at a given position',
+ {
+ payload: {
+ itemId: mockIssue2.id,
+ listId: mockList.id,
+ atIndex: 1,
+ },
+ listState: [mockIssue.id, mockIssue2.id],
+ },
+ ],
+ [
+ "below the issue with id of 'moveBeforeId'",
+ {
+ payload: {
+ itemId: mockIssue2.id,
+ listId: mockList.id,
+ moveBeforeId: mockIssue.id,
+ },
+ listState: [mockIssue.id, mockIssue2.id],
+ },
+ ],
+ [
+ "above the issue with id of 'moveAfterId'",
+ {
+ payload: {
+ itemId: mockIssue2.id,
+ listId: mockList.id,
+ moveAfterId: mockIssue.id,
+ },
+ listState: [mockIssue2.id, mockIssue.id],
+ },
+ ],
+ ])(`inserts an item into a list %s`, (_, { payload, listState }) => {
+ mutations.ADD_BOARD_ITEM_TO_LIST(state, payload);
- expect(state.error).toBe('An error occurred while creating the issue. Please try again.');
+ expect(state.boardItemsByListId[payload.listId]).toEqual(listState);
});
- });
-
- describe('ADD_ISSUE_TO_LIST', () => {
- it('adds issue to issues state and issue id in list in boardItemsByListId', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id],
- };
- const issues = {
- 1: mockIssue,
- };
-
- state = {
- ...state,
- boardItemsByListId: listIssues,
- boardItems: issues,
- boardLists: initialBoardListsState,
- };
+ it("updates the list's items count", () => {
expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(1);
- mutations.ADD_ISSUE_TO_LIST(state, { list: mockLists[0], issue: mockIssue2 });
+ mutations.ADD_BOARD_ITEM_TO_LIST(state, {
+ itemId: mockIssue2.id,
+ listId: mockList.id,
+ });
- expect(state.boardItemsByListId['gid://gitlab/List/1']).toContain(mockIssue2.id);
- expect(state.boardItems[mockIssue2.id]).toEqual(mockIssue2);
expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(2);
});
});
- describe('ADD_ISSUE_TO_LIST_FAILURE', () => {
- it('removes issue id from list in boardItemsByListId and sets error message', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id],
- };
- const issues = {
- 1: mockIssue,
- 2: mockIssue2,
- };
-
- state = {
- ...state,
- boardItemsByListId: listIssues,
- boardItems: issues,
- boardLists: initialBoardListsState,
- };
-
- mutations.ADD_ISSUE_TO_LIST_FAILURE(state, { list: mockLists[0], issueId: mockIssue2.id });
-
- expect(state.boardItemsByListId['gid://gitlab/List/1']).not.toContain(mockIssue2.id);
- expect(state.error).toBe('An error occurred while creating the issue. Please try again.');
+ describe('REMOVE_BOARD_ITEM_FROM_LIST', () => {
+ beforeEach(() => {
+ setBoardsListsState();
});
- });
- describe('REMOVE_ISSUE_FROM_LIST', () => {
- it('removes issue id from list in boardItemsByListId and deletes issue from state', () => {
- const listIssues = {
- 'gid://gitlab/List/1': [mockIssue.id, mockIssue2.id],
- };
- const issues = {
- 1: mockIssue,
- 2: mockIssue2,
- };
-
- state = {
- ...state,
- boardItemsByListId: listIssues,
- boardItems: issues,
- boardLists: initialBoardListsState,
- };
+ it("removes an item from a list and updates the list's items count", () => {
+ expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(1);
+ expect(state.boardItemsByListId['gid://gitlab/List/1']).toContain(mockIssue.id);
- mutations.ADD_ISSUE_TO_LIST_FAILURE(state, { list: mockLists[0], issueId: mockIssue2.id });
+ mutations.REMOVE_BOARD_ITEM_FROM_LIST(state, {
+ itemId: mockIssue.id,
+ listId: mockList.id,
+ });
- expect(state.boardItemsByListId['gid://gitlab/List/1']).not.toContain(mockIssue2.id);
- expect(state.boardItems).not.toContain(mockIssue2);
+ expect(state.boardItemsByListId['gid://gitlab/List/1']).not.toContain(mockIssue.id);
+ expect(state.boardLists['gid://gitlab/List/1'].issuesCount).toBe(0);
});
});
@@ -666,4 +637,14 @@ describe('Board Store Mutations', () => {
expect(state.selectedBoardItems).toEqual([]);
});
});
+
+ describe('SET_ERROR', () => {
+ it('Should set error state', () => {
+ state.error = undefined;
+
+ mutations[types.SET_ERROR](state, 'mayday');
+
+ expect(state.error).toBe('mayday');
+ });
+ });
});
diff --git a/spec/frontend/branches/components/sort_dropdown_spec.js b/spec/frontend/branches/components/sort_dropdown_spec.js
new file mode 100644
index 00000000000..16ed02bfa88
--- /dev/null
+++ b/spec/frontend/branches/components/sort_dropdown_spec.js
@@ -0,0 +1,91 @@
+import { GlSearchBoxByClick } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import SortDropdown from '~/branches/components/sort_dropdown.vue';
+import * as urlUtils from '~/lib/utils/url_utility';
+
+describe('Branches Sort Dropdown', () => {
+ let wrapper;
+
+ const createWrapper = (props = {}) => {
+ return extendedWrapper(
+ mount(SortDropdown, {
+ provide: {
+ mode: 'overview',
+ projectBranchesFilteredPath: '/root/ci-cd-project-demo/-/branches?state=all',
+ sortOptions: {
+ name_asc: 'Name',
+ updated_asc: 'Oldest updated',
+ updated_desc: 'Last updated',
+ },
+ ...props,
+ },
+ }),
+ );
+ };
+
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByClick);
+ const findBranchesDropdown = () => wrapper.findByTestId('branches-dropdown');
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('When in overview mode', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('should have a search box with a placeholder', () => {
+ const searchBox = findSearchBox();
+
+ expect(searchBox.exists()).toBe(true);
+ expect(searchBox.find('input').attributes('placeholder')).toBe('Filter by branch name');
+ });
+
+ it('should not have a branches dropdown when in overview mode', () => {
+ const branchesDropdown = findBranchesDropdown();
+
+ expect(branchesDropdown.exists()).toBe(false);
+ });
+ });
+
+ describe('when in All branches mode', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ mode: 'all' });
+ });
+
+ it('should have a search box with a placeholder', () => {
+ const searchBox = findSearchBox();
+
+ expect(searchBox.exists()).toBe(true);
+ expect(searchBox.find('input').attributes('placeholder')).toBe('Filter by branch name');
+ });
+
+ it('should have a branches dropdown when in all branches mode', () => {
+ const branchesDropdown = findBranchesDropdown();
+
+ expect(branchesDropdown.exists()).toBe(true);
+ });
+ });
+
+ describe('when submitting a search term', () => {
+ beforeEach(() => {
+ urlUtils.visitUrl = jest.fn();
+
+ wrapper = createWrapper();
+ });
+
+ it('should call visitUrl', () => {
+ const searchBox = findSearchBox();
+
+ searchBox.vm.$emit('submit');
+
+ expect(urlUtils.visitUrl).toHaveBeenCalledWith(
+ '/root/ci-cd-project-demo/-/branches?state=all',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/captcha/apollo_captcha_link_spec.js b/spec/frontend/captcha/apollo_captcha_link_spec.js
new file mode 100644
index 00000000000..e7ff4812ee7
--- /dev/null
+++ b/spec/frontend/captcha/apollo_captcha_link_spec.js
@@ -0,0 +1,165 @@
+import { ApolloLink, Observable } from 'apollo-link';
+
+import { apolloCaptchaLink } from '~/captcha/apollo_captcha_link';
+import UnsolvedCaptchaError from '~/captcha/unsolved_captcha_error';
+import { waitForCaptchaToBeSolved } from '~/captcha/wait_for_captcha_to_be_solved';
+
+jest.mock('~/captcha/wait_for_captcha_to_be_solved');
+
+describe('apolloCaptchaLink', () => {
+ const SPAM_LOG_ID = 'SPAM_LOG_ID';
+ const CAPTCHA_SITE_KEY = 'CAPTCHA_SITE_KEY';
+ const CAPTCHA_RESPONSE = 'CAPTCHA_RESPONSE';
+
+ const SUCCESS_RESPONSE = {
+ data: {
+ user: {
+ id: 3,
+ name: 'foo',
+ },
+ },
+ errors: [],
+ };
+
+ const NON_CAPTCHA_ERROR_RESPONSE = {
+ data: {
+ user: null,
+ },
+ errors: [
+ {
+ message: 'Something is severely wrong with your query.',
+ path: ['user'],
+ locations: [{ line: 2, column: 3 }],
+ extensions: {
+ message: 'Object not found',
+ type: 2,
+ },
+ },
+ ],
+ };
+
+ const SPAM_ERROR_RESPONSE = {
+ data: {
+ user: null,
+ },
+ errors: [
+ {
+ message: 'Your Query was detected to be spam.',
+ path: ['user'],
+ locations: [{ line: 2, column: 3 }],
+ extensions: {
+ spam: true,
+ },
+ },
+ ],
+ };
+
+ const CAPTCHA_ERROR_RESPONSE = {
+ data: {
+ user: null,
+ },
+ errors: [
+ {
+ message: 'This is an unrelated error, captcha should still work despite this.',
+ path: ['user'],
+ locations: [{ line: 2, column: 3 }],
+ },
+ {
+ message: 'You need to solve a Captcha.',
+ path: ['user'],
+ locations: [{ line: 2, column: 3 }],
+ extensions: {
+ spam: true,
+ needs_captcha_response: true,
+ captcha_site_key: CAPTCHA_SITE_KEY,
+ spam_log_id: SPAM_LOG_ID,
+ },
+ },
+ ],
+ };
+
+ let link;
+
+ let mockLinkImplementation;
+ let mockContext;
+
+ const setupLink = (...responses) => {
+ mockLinkImplementation = jest.fn().mockImplementation(() => {
+ return Observable.of(responses.shift());
+ });
+ link = ApolloLink.from([apolloCaptchaLink, new ApolloLink(mockLinkImplementation)]);
+ };
+
+ function mockOperation() {
+ mockContext = jest.fn();
+ return { operationName: 'operation', variables: {}, setContext: mockContext };
+ }
+
+ it('successful responses are passed through', (done) => {
+ setupLink(SUCCESS_RESPONSE);
+ link.request(mockOperation()).subscribe((result) => {
+ expect(result).toEqual(SUCCESS_RESPONSE);
+ expect(mockLinkImplementation).toHaveBeenCalledTimes(1);
+ expect(waitForCaptchaToBeSolved).not.toHaveBeenCalled();
+ done();
+ });
+ });
+
+ it('non-spam related errors are passed through', (done) => {
+ setupLink(NON_CAPTCHA_ERROR_RESPONSE);
+ link.request(mockOperation()).subscribe((result) => {
+ expect(result).toEqual(NON_CAPTCHA_ERROR_RESPONSE);
+ expect(mockLinkImplementation).toHaveBeenCalledTimes(1);
+ expect(mockContext).not.toHaveBeenCalled();
+ expect(waitForCaptchaToBeSolved).not.toHaveBeenCalled();
+ done();
+ });
+ });
+
+ it('unresolvable spam errors are passed through', (done) => {
+ setupLink(SPAM_ERROR_RESPONSE);
+ link.request(mockOperation()).subscribe((result) => {
+ expect(result).toEqual(SPAM_ERROR_RESPONSE);
+ expect(mockLinkImplementation).toHaveBeenCalledTimes(1);
+ expect(mockContext).not.toHaveBeenCalled();
+ expect(waitForCaptchaToBeSolved).not.toHaveBeenCalled();
+ done();
+ });
+ });
+
+ describe('resolvable spam errors', () => {
+ it('re-submits request with spam headers if the captcha modal was solved correctly', (done) => {
+ waitForCaptchaToBeSolved.mockResolvedValue(CAPTCHA_RESPONSE);
+ setupLink(CAPTCHA_ERROR_RESPONSE, SUCCESS_RESPONSE);
+ link.request(mockOperation()).subscribe((result) => {
+ expect(result).toEqual(SUCCESS_RESPONSE);
+ expect(waitForCaptchaToBeSolved).toHaveBeenCalledWith(CAPTCHA_SITE_KEY);
+ expect(mockContext).toHaveBeenCalledWith({
+ headers: {
+ 'X-GitLab-Captcha-Response': CAPTCHA_RESPONSE,
+ 'X-GitLab-Spam-Log-Id': SPAM_LOG_ID,
+ },
+ });
+ expect(mockLinkImplementation).toHaveBeenCalledTimes(2);
+ done();
+ });
+ });
+
+ it('throws error if the captcha modal was not solved correctly', (done) => {
+ const error = new UnsolvedCaptchaError();
+ waitForCaptchaToBeSolved.mockRejectedValue(error);
+
+ setupLink(CAPTCHA_ERROR_RESPONSE, SUCCESS_RESPONSE);
+ link.request(mockOperation()).subscribe({
+ next: done.catch,
+ error: (result) => {
+ expect(result).toEqual(error);
+ expect(waitForCaptchaToBeSolved).toHaveBeenCalledWith(CAPTCHA_SITE_KEY);
+ expect(mockContext).not.toHaveBeenCalled();
+ expect(mockLinkImplementation).toHaveBeenCalledTimes(1);
+ done();
+ },
+ });
+ });
+ });
+});
diff --git a/spec/frontend/cascading_settings/components/lock_popovers_spec.js b/spec/frontend/cascading_settings/components/lock_popovers_spec.js
new file mode 100644
index 00000000000..585e6ac505b
--- /dev/null
+++ b/spec/frontend/cascading_settings/components/lock_popovers_spec.js
@@ -0,0 +1,152 @@
+import { GlPopover } from '@gitlab/ui';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
+import LockPopovers from '~/namespaces/cascading_settings/components/lock_popovers.vue';
+
+describe('LockPopovers', () => {
+ const mockNamespace = {
+ full_name: 'GitLab Org / GitLab',
+ path: '/gitlab-org/gitlab/-/edit',
+ };
+
+ const createPopoverMountEl = ({
+ lockedByApplicationSetting = false,
+ lockedByAncestor = false,
+ }) => {
+ const popoverMountEl = document.createElement('div');
+ popoverMountEl.classList.add('js-cascading-settings-lock-popover-target');
+
+ const popoverData = {
+ locked_by_application_setting: lockedByApplicationSetting,
+ locked_by_ancestor: lockedByAncestor,
+ };
+
+ if (lockedByApplicationSetting) {
+ popoverMountEl.setAttribute('data-popover-data', JSON.stringify(popoverData));
+ } else if (lockedByAncestor) {
+ popoverMountEl.setAttribute(
+ 'data-popover-data',
+ JSON.stringify({ ...popoverData, ancestor_namespace: mockNamespace }),
+ );
+ }
+
+ document.body.appendChild(popoverMountEl);
+
+ return popoverMountEl;
+ };
+
+ let wrapper;
+ const createWrapper = () => {
+ wrapper = mountExtended(LockPopovers);
+ };
+
+ const findPopover = () => extendedWrapper(wrapper.find(GlPopover));
+ const findByTextInPopover = (text, options) =>
+ findPopover().findByText((_, element) => element.textContent === text, options);
+
+ const expectPopoverMessageExists = (message) => {
+ expect(findByTextInPopover(message).exists()).toBe(true);
+ };
+ const expectCorrectPopoverTarget = (popoverMountEl, popover = findPopover()) => {
+ expect(popover.props('target')).toEqual(popoverMountEl);
+ };
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ });
+
+ describe('when setting is locked by an application setting', () => {
+ let popoverMountEl;
+
+ beforeEach(() => {
+ popoverMountEl = createPopoverMountEl({ lockedByApplicationSetting: true });
+ createWrapper();
+ });
+
+ it('displays correct popover message', () => {
+ expectPopoverMessageExists('This setting has been enforced by an instance admin.');
+ });
+
+ it('sets `target` prop correctly', () => {
+ expectCorrectPopoverTarget(popoverMountEl);
+ });
+ });
+
+ describe('when setting is locked by an ancestor namespace', () => {
+ let popoverMountEl;
+
+ beforeEach(() => {
+ popoverMountEl = createPopoverMountEl({ lockedByAncestor: true });
+ createWrapper();
+ });
+
+ it('displays correct popover message', () => {
+ expectPopoverMessageExists(
+ `This setting has been enforced by an owner of ${mockNamespace.full_name}.`,
+ );
+ });
+
+ it('displays link to ancestor namespace', () => {
+ expect(
+ findByTextInPopover(mockNamespace.full_name, {
+ selector: `a[href="${mockNamespace.path}"]`,
+ }).exists(),
+ ).toBe(true);
+ });
+
+ it('sets `target` prop correctly', () => {
+ expectCorrectPopoverTarget(popoverMountEl);
+ });
+ });
+
+ describe('when setting is locked by an application setting and an ancestor namespace', () => {
+ let popoverMountEl;
+
+ beforeEach(() => {
+ popoverMountEl = createPopoverMountEl({
+ lockedByAncestor: true,
+ lockedByApplicationSetting: true,
+ });
+ createWrapper();
+ });
+
+ it('application setting takes precedence and correct message is shown', () => {
+ expectPopoverMessageExists('This setting has been enforced by an instance admin.');
+ });
+
+ it('sets `target` prop correctly', () => {
+ expectCorrectPopoverTarget(popoverMountEl);
+ });
+ });
+
+ describe('when setting is not locked', () => {
+ beforeEach(() => {
+ createPopoverMountEl({
+ lockedByAncestor: false,
+ lockedByApplicationSetting: false,
+ });
+ createWrapper();
+ });
+
+ it('does not render popover', () => {
+ expect(findPopover().exists()).toBe(false);
+ });
+ });
+
+ describe('when there are multiple mount elements', () => {
+ let popoverMountEl1;
+ let popoverMountEl2;
+
+ beforeEach(() => {
+ popoverMountEl1 = createPopoverMountEl({ lockedByApplicationSetting: true });
+ popoverMountEl2 = createPopoverMountEl({ lockedByAncestor: true });
+ createWrapper();
+ });
+
+ it('mounts multiple popovers', () => {
+ const popovers = wrapper.findAll(GlPopover).wrappers;
+
+ expectCorrectPopoverTarget(popoverMountEl1, popovers[0]);
+ expectCorrectPopoverTarget(popoverMountEl2, popovers[1]);
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
index 991dc8592e9..752783a306a 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,6 +1,7 @@
-import { GlButton } from '@gitlab/ui';
+import { GlButton, GlFormInput } from '@gitlab/ui';
import { createLocalVue, shallowMount, mount } from '@vue/test-utils';
import Vuex from 'vuex';
+import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
import { AWS_ACCESS_KEY_ID } from '~/ci_variable_list/constants';
import createStore from '~/ci_variable_list/store';
@@ -15,7 +16,7 @@ describe('Ci variable modal', () => {
let store;
const createComponent = (method, options = {}) => {
- store = createStore();
+ store = createStore({ isGroup: options.isGroup });
wrapper = method(CiVariableModal, {
attachTo: document.body,
stubs: {
@@ -27,6 +28,7 @@ describe('Ci variable modal', () => {
});
};
+ const findCiEnvironmentsDropdown = () => wrapper.find(CiEnvironmentsDropdown);
const findModal = () => wrapper.find(ModalStub);
const findAddorUpdateButton = () =>
findModal()
@@ -149,6 +151,43 @@ describe('Ci variable modal', () => {
});
});
+ describe('Environment scope', () => {
+ describe('group level variables', () => {
+ it('renders the environment dropdown', () => {
+ createComponent(shallowMount, {
+ isGroup: true,
+ provide: {
+ glFeatures: {
+ groupScopedCiVariables: true,
+ },
+ },
+ });
+
+ expect(findCiEnvironmentsDropdown().exists()).toBe(true);
+ expect(findCiEnvironmentsDropdown().isVisible()).toBe(true);
+ });
+
+ describe('licensed feature is not available', () => {
+ it('disables the dropdown', () => {
+ createComponent(mount, {
+ isGroup: true,
+ provide: {
+ glFeatures: {
+ groupScopedCiVariables: false,
+ },
+ },
+ });
+
+ const environmentScopeInput = wrapper
+ .find('[data-testid="environment-scope"]')
+ .find(GlFormInput);
+ expect(findCiEnvironmentsDropdown().exists()).toBe(false);
+ expect(environmentScopeInput.attributes('readonly')).toBe('readonly');
+ });
+ });
+ });
+ });
+
describe('Validations', () => {
const maskError = 'This variable can not be masked.';
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
index ade2d65b857..8367c3f6bb8 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
+++ b/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
@@ -1,4 +1,3 @@
-import { GlTable } from '@gitlab/ui';
import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import CiVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
@@ -14,7 +13,6 @@ describe('Ci variable table', () => {
const createComponent = () => {
store = createStore();
- store.state.isGroup = true;
jest.spyOn(store, 'dispatch').mockImplementation();
wrapper = mount(CiVariableTable, {
attachTo: document.body,
@@ -26,7 +24,6 @@ describe('Ci variable table', () => {
const findRevealButton = () => wrapper.find({ ref: 'secret-value-reveal-button' });
const findEditButton = () => wrapper.find({ ref: 'edit-ci-variable' });
const findEmptyVariablesPlaceholder = () => wrapper.find({ ref: 'empty-variables' });
- const findTable = () => wrapper.find(GlTable);
beforeEach(() => {
createComponent();
@@ -40,17 +37,6 @@ describe('Ci variable table', () => {
expect(store.dispatch).toHaveBeenCalledWith('fetchVariables');
});
- it('fields prop does not contain environment_scope if group', () => {
- expect(findTable().props('fields')).not.toEqual(
- expect.arrayContaining([
- expect.objectContaining({
- key: 'environment_scope',
- label: 'Environment Scope',
- }),
- ]),
- );
- });
-
describe('Renders correct data', () => {
it('displays empty message when variables are not present', () => {
expect(findEmptyVariablesPlaceholder().exists()).toBe(true);
diff --git a/spec/frontend/clusters/components/application_row_spec.js b/spec/frontend/clusters/components/application_row_spec.js
index eff3493d7bd..6bad1db542b 100644
--- a/spec/frontend/clusters/components/application_row_spec.js
+++ b/spec/frontend/clusters/components/application_row_spec.js
@@ -89,6 +89,12 @@ describe('Application Row', () => {
checkButtonState('Install', false, true);
});
+ it('has disabled "Externally installed" when APPLICATION_STATUS.EXTERNALLY_INSTALLED', () => {
+ mountComponent({ status: APPLICATION_STATUS.EXTERNALLY_INSTALLED });
+
+ checkButtonState('Externally installed', false, true);
+ });
+
it('has disabled "Installed" when application is installed and not uninstallable', () => {
mountComponent({
status: APPLICATION_STATUS.INSTALLED,
diff --git a/spec/frontend/clusters/services/application_state_machine_spec.js b/spec/frontend/clusters/services/application_state_machine_spec.js
index 55230625ba4..4e731e331c2 100644
--- a/spec/frontend/clusters/services/application_state_machine_spec.js
+++ b/spec/frontend/clusters/services/application_state_machine_spec.js
@@ -20,6 +20,8 @@ const {
UNINSTALLING,
UNINSTALL_ERRORED,
UNINSTALLED,
+ PRE_INSTALLED,
+ EXTERNALLY_INSTALLED,
} = APPLICATION_STATUS;
const NO_EFFECTS = 'no effects';
@@ -29,19 +31,21 @@ describe('applicationStateMachine', () => {
describe(`current state is ${NO_STATUS}`, () => {
it.each`
- expectedState | event | effects
- ${INSTALLING} | ${SCHEDULED} | ${NO_EFFECTS}
- ${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${INSTALLABLE} | ${NO_EFFECTS}
- ${INSTALLING} | ${INSTALLING} | ${NO_EFFECTS}
- ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
- ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
- ${UPDATING} | ${UPDATING} | ${NO_EFFECTS}
- ${INSTALLED} | ${UPDATED} | ${NO_EFFECTS}
- ${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
- ${UNINSTALLING} | ${UNINSTALLING} | ${NO_EFFECTS}
- ${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
- ${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
+ expectedState | event | effects
+ ${INSTALLING} | ${SCHEDULED} | ${NO_EFFECTS}
+ ${NOT_INSTALLABLE} | ${NOT_INSTALLABLE} | ${NO_EFFECTS}
+ ${INSTALLABLE} | ${INSTALLABLE} | ${NO_EFFECTS}
+ ${INSTALLING} | ${INSTALLING} | ${NO_EFFECTS}
+ ${INSTALLED} | ${INSTALLED} | ${NO_EFFECTS}
+ ${INSTALLABLE} | ${ERROR} | ${{ installFailed: true }}
+ ${UPDATING} | ${UPDATING} | ${NO_EFFECTS}
+ ${INSTALLED} | ${UPDATED} | ${NO_EFFECTS}
+ ${INSTALLED} | ${UPDATE_ERRORED} | ${{ updateFailed: true }}
+ ${UNINSTALLING} | ${UNINSTALLING} | ${NO_EFFECTS}
+ ${INSTALLED} | ${UNINSTALL_ERRORED} | ${{ uninstallFailed: true }}
+ ${UNINSTALLED} | ${UNINSTALLED} | ${NO_EFFECTS}
+ ${PRE_INSTALLED} | ${PRE_INSTALLED} | ${NO_EFFECTS}
+ ${EXTERNALLY_INSTALLED} | ${EXTERNALLY_INSTALLED} | ${NO_EFFECTS}
`(`transitions to $expectedState on $event event and applies $effects`, (data) => {
const { expectedState, event, effects } = data;
const currentAppState = {
diff --git a/spec/frontend/content_editor/components/content_editor_spec.js b/spec/frontend/content_editor/components/content_editor_spec.js
new file mode 100644
index 00000000000..f055a49135b
--- /dev/null
+++ b/spec/frontend/content_editor/components/content_editor_spec.js
@@ -0,0 +1,26 @@
+import { shallowMount } from '@vue/test-utils';
+import { EditorContent } from 'tiptap';
+import ContentEditor from '~/content_editor/components/content_editor.vue';
+import createEditor from '~/content_editor/services/create_editor';
+
+jest.mock('~/content_editor/services/create_editor');
+
+describe('ContentEditor', () => {
+ let wrapper;
+
+ const buildWrapper = () => {
+ wrapper = shallowMount(ContentEditor);
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders editor content component and attaches editor instance', () => {
+ const editor = {};
+
+ createEditor.mockReturnValueOnce(editor);
+ buildWrapper();
+ expect(wrapper.findComponent(EditorContent).props().editor).toBe(editor);
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_processing_examples.js b/spec/frontend/content_editor/markdown_processing_examples.js
new file mode 100644
index 00000000000..12bf2cbb747
--- /dev/null
+++ b/spec/frontend/content_editor/markdown_processing_examples.js
@@ -0,0 +1,19 @@
+import fs from 'fs';
+import path from 'path';
+import jsYaml from 'js-yaml';
+import { toArray } from 'lodash';
+import { getJSONFixture } from 'helpers/fixtures';
+
+export const loadMarkdownApiResult = (testName) => {
+ const fixturePathPrefix = `api/markdown/${testName}.json`;
+
+ return getJSONFixture(fixturePathPrefix);
+};
+
+export const loadMarkdownApiExamples = () => {
+ const apiMarkdownYamlPath = path.join(__dirname, '..', 'fixtures', 'api_markdown.yml');
+ const apiMarkdownYamlText = fs.readFileSync(apiMarkdownYamlPath);
+ const apiMarkdownExampleObjects = jsYaml.safeLoad(apiMarkdownYamlText);
+
+ return apiMarkdownExampleObjects.map((example) => toArray(example));
+};
diff --git a/spec/frontend/content_editor/markdown_processing_spec.js b/spec/frontend/content_editor/markdown_processing_spec.js
new file mode 100644
index 00000000000..e435af30e9f
--- /dev/null
+++ b/spec/frontend/content_editor/markdown_processing_spec.js
@@ -0,0 +1,12 @@
+import { createEditor } from '~/content_editor';
+import { loadMarkdownApiExamples, loadMarkdownApiResult } from './markdown_processing_examples';
+
+describe('markdown processing', () => {
+ // Ensure we generate same markdown that was provided to Markdown API.
+ it.each(loadMarkdownApiExamples())('correctly handles %s', async (testName, markdown) => {
+ const { html } = loadMarkdownApiResult(testName);
+ const editor = await createEditor({ content: markdown, renderMarkdown: () => html });
+
+ expect(editor.getSerializedContent()).toBe(markdown);
+ });
+});
diff --git a/spec/frontend/content_editor/services/create_editor_spec.js b/spec/frontend/content_editor/services/create_editor_spec.js
new file mode 100644
index 00000000000..4cf63e608eb
--- /dev/null
+++ b/spec/frontend/content_editor/services/create_editor_spec.js
@@ -0,0 +1,39 @@
+import { PROVIDE_SERIALIZER_OR_RENDERER_ERROR } from '~/content_editor/constants';
+import createEditor from '~/content_editor/services/create_editor';
+import createMarkdownSerializer from '~/content_editor/services/markdown_serializer';
+
+jest.mock('~/content_editor/services/markdown_serializer');
+
+describe('content_editor/services/create_editor', () => {
+ const buildMockSerializer = () => ({
+ serialize: jest.fn(),
+ deserialize: jest.fn(),
+ });
+
+ describe('creating an editor', () => {
+ it('uses markdown serializer when a renderMarkdown function is provided', async () => {
+ const renderMarkdown = () => true;
+ const mockSerializer = buildMockSerializer();
+ createMarkdownSerializer.mockReturnValueOnce(mockSerializer);
+
+ await createEditor({ renderMarkdown });
+
+ expect(createMarkdownSerializer).toHaveBeenCalledWith({ render: renderMarkdown });
+ });
+
+ it('uses custom serializer when it is provided', async () => {
+ const mockSerializer = buildMockSerializer();
+ const serializedContent = '**bold**';
+
+ mockSerializer.serialize.mockReturnValueOnce(serializedContent);
+
+ const editor = await createEditor({ serializer: mockSerializer });
+
+ expect(editor.getSerializedContent()).toBe(serializedContent);
+ });
+
+ it('throws an error when neither a serializer or renderMarkdown fn are provided', async () => {
+ await expect(createEditor()).rejects.toThrow(PROVIDE_SERIALIZER_OR_RENDERER_ERROR);
+ });
+ });
+});
diff --git a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
index a5eb42e0f08..15b052fffbb 100644
--- a/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
+++ b/spec/frontend/contributors/component/__snapshots__/contributors_spec.js.snap
@@ -5,7 +5,9 @@ exports[`Contributors charts should render charts when loading completed and the
<div
class="contributors-charts"
>
- <h4>
+ <h4
+ class="gl-mb-2 gl-mt-5"
+ >
Commits to master
</h4>
@@ -16,6 +18,7 @@ exports[`Contributors charts should render charts when loading completed and the
<div>
<glareachart-stub
annotations=""
+ class="gl-mb-5"
data="[object Object]"
height="264"
includelegendavgmax="true"
@@ -34,14 +37,20 @@ exports[`Contributors charts should render charts when loading completed and the
class="row"
>
<div
- class="col-lg-6 col-12"
+ class="col-lg-6 col-12 gl-my-5"
>
- <h4>
+ <h4
+ class="gl-mb-2 gl-mt-0"
+ >
John
</h4>
- <p>
+ <p
+ class="gl-mb-3"
+ >
+
2 commits (jawnnypoo@gmail.com)
+
</p>
<div>
diff --git a/spec/frontend/create_merge_request_dropdown_spec.js b/spec/frontend/create_merge_request_dropdown_spec.js
index 08c05c6ec38..b4c13981dd5 100644
--- a/spec/frontend/create_merge_request_dropdown_spec.js
+++ b/spec/frontend/create_merge_request_dropdown_spec.js
@@ -20,7 +20,9 @@ describe('CreateMergeRequestDropdown', () => {
</div>
<div class="js-ref"></div>
<div class="js-create-mr"></div>
- <div class="js-create-merge-request"></div>
+ <div class="js-create-merge-request">
+ <span class="js-spinner"></span>
+ </div>
<div class="js-create-target"></div>
<div class="js-dropdown-toggle"></div>
</div>
@@ -100,4 +102,18 @@ describe('CreateMergeRequestDropdown', () => {
expect(dropdown.createMergeRequestButton.classList).toContain('disabled');
});
});
+
+ describe('setLoading', () => {
+ it.each`
+ loading | hasClass
+ ${true} | ${false}
+ ${false} | ${true}
+ `('it toggle loading spinner when loading is $loading', ({ loading, hasClass }) => {
+ dropdown.setLoading(loading);
+
+ expect(document.querySelector('.js-spinner').classList.contains('gl-display-none')).toEqual(
+ hasClass,
+ );
+ });
+ });
});
diff --git a/spec/frontend/cycle_analytics/banner_spec.js b/spec/frontend/cycle_analytics/banner_spec.js
index 0cae0298cee..ef7998c5ff5 100644
--- a/spec/frontend/cycle_analytics/banner_spec.js
+++ b/spec/frontend/cycle_analytics/banner_spec.js
@@ -1,45 +1,47 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import banner from '~/cycle_analytics/components/banner.vue';
+import { shallowMount } from '@vue/test-utils';
+import Banner from '~/cycle_analytics/components/banner.vue';
describe('Value Stream Analytics banner', () => {
- let vm;
+ let wrapper;
- beforeEach(() => {
- const Component = Vue.extend(banner);
- vm = mountComponent(Component, {
- documentationLink: 'path',
+ const createComponent = () => {
+ wrapper = shallowMount(Banner, {
+ propsData: {
+ documentationLink: 'path',
+ },
});
+ };
+
+ beforeEach(() => {
+ createComponent();
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('should render value stream analytics information', () => {
- expect(vm.$el.querySelector('h4').textContent.trim()).toEqual(
- 'Introducing Value Stream Analytics',
- );
+ expect(wrapper.find('h4').text().trim()).toBe('Introducing Value Stream Analytics');
expect(
- vm.$el
- .querySelector('p')
- .textContent.trim()
+ wrapper
+ .find('p')
+ .text()
+ .trim()
.replace(/[\r\n]+/g, ' '),
).toContain(
'Value Stream Analytics gives an overview of how much time it takes to go from idea to production in your project.',
);
- expect(vm.$el.querySelector('a').textContent.trim()).toEqual('Read more');
-
- expect(vm.$el.querySelector('a').getAttribute('href')).toEqual('path');
+ expect(wrapper.find('a').text().trim()).toBe('Read more');
+ expect(wrapper.find('a').attributes('href')).toBe('path');
});
- it('should emit an event when close button is clicked', () => {
- jest.spyOn(vm, '$emit').mockImplementation(() => {});
+ it('should emit an event when close button is clicked', async () => {
+ jest.spyOn(wrapper.vm, '$emit').mockImplementation(() => {});
- vm.$el.querySelector('.js-ca-dismiss-button').click();
+ await wrapper.find('.js-ca-dismiss-button').trigger('click');
- expect(vm.$emit).toHaveBeenCalled();
+ expect(wrapper.vm.$emit).toHaveBeenCalled();
});
});
diff --git a/spec/frontend/cycle_analytics/total_time_component_spec.js b/spec/frontend/cycle_analytics/total_time_component_spec.js
index 0f7f2628aca..e831bc311ed 100644
--- a/spec/frontend/cycle_analytics/total_time_component_spec.js
+++ b/spec/frontend/cycle_analytics/total_time_component_spec.js
@@ -1,58 +1,58 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import component from '~/cycle_analytics/components/total_time_component.vue';
+import { shallowMount } from '@vue/test-utils';
+import TotalTime from '~/cycle_analytics/components/total_time_component.vue';
describe('Total time component', () => {
- let vm;
- let Component;
+ let wrapper;
- beforeEach(() => {
- Component = Vue.extend(component);
- });
+ const createComponent = (propsData) => {
+ wrapper = shallowMount(TotalTime, {
+ propsData,
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
describe('With data', () => {
it('should render information for days and hours', () => {
- vm = mountComponent(Component, {
+ createComponent({
time: {
days: 3,
hours: 4,
},
});
- expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toEqual('3 days 4 hrs');
+ expect(wrapper.text()).toMatchInterpolatedText('3 days 4 hrs');
});
it('should render information for hours and minutes', () => {
- vm = mountComponent(Component, {
+ createComponent({
time: {
hours: 4,
mins: 35,
},
});
- expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toEqual('4 hrs 35 mins');
+ expect(wrapper.text()).toMatchInterpolatedText('4 hrs 35 mins');
});
it('should render information for seconds', () => {
- vm = mountComponent(Component, {
+ createComponent({
time: {
seconds: 45,
},
});
- expect(vm.$el.textContent.trim().replace(/\s\s+/g, ' ')).toEqual('45 s');
+ expect(wrapper.text()).toMatchInterpolatedText('45 s');
});
});
describe('Without data', () => {
it('should render no information', () => {
- vm = mountComponent(Component);
+ createComponent();
- expect(vm.$el.textContent.trim()).toEqual('--');
+ expect(wrapper.text()).toBe('--');
});
});
});
diff --git a/spec/frontend/delete_label_modal_spec.js b/spec/frontend/delete_label_modal_spec.js
new file mode 100644
index 00000000000..df70d3a8393
--- /dev/null
+++ b/spec/frontend/delete_label_modal_spec.js
@@ -0,0 +1,83 @@
+import { TEST_HOST } from 'helpers/test_constants';
+import initDeleteLabelModal from '~/delete_label_modal';
+
+describe('DeleteLabelModal', () => {
+ const buttons = [
+ {
+ labelName: 'label 1',
+ subjectName: 'GitLab Org',
+ destroyPath: `${TEST_HOST}/1`,
+ },
+ {
+ labelName: 'label 2',
+ subjectName: 'GitLab Org',
+ destroyPath: `${TEST_HOST}/2`,
+ },
+ ];
+
+ beforeEach(() => {
+ const buttonContainer = document.createElement('div');
+
+ buttons.forEach((x) => {
+ const button = document.createElement('button');
+ button.setAttribute('class', 'js-delete-label-modal-button');
+ button.setAttribute('data-label-name', x.labelName);
+ button.setAttribute('data-subject-name', x.subjectName);
+ button.setAttribute('data-destroy-path', x.destroyPath);
+ button.innerHTML = 'Action';
+ buttonContainer.appendChild(button);
+ });
+
+ document.body.appendChild(buttonContainer);
+ });
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ });
+
+ const findJsHooks = () => document.querySelectorAll('.js-delete-label-modal-button');
+ const findModal = () => document.querySelector('.gl-modal');
+
+ it('starts with only js-containers', () => {
+ expect(findJsHooks()).toHaveLength(buttons.length);
+ expect(findModal()).not.toExist();
+ });
+
+ describe('when first button clicked', () => {
+ beforeEach(() => {
+ initDeleteLabelModal();
+ findJsHooks().item(0).click();
+ });
+
+ it('does not replace js-containers with GlModal', () => {
+ expect(findJsHooks()).toHaveLength(buttons.length);
+ });
+
+ it('renders GlModal', () => {
+ expect(findModal()).toExist();
+ });
+ });
+
+ describe.each`
+ index
+ ${0}
+ ${1}
+ `(`when multiple buttons exist`, ({ index }) => {
+ beforeEach(() => {
+ initDeleteLabelModal();
+ findJsHooks().item(index).click();
+ });
+
+ it('correct props are passed to gl-modal', () => {
+ expect(findModal().querySelector('.modal-title').innerHTML).toContain(
+ buttons[index].labelName,
+ );
+ expect(findModal().querySelector('.modal-body').innerHTML).toContain(
+ buttons[index].subjectName,
+ );
+ expect(findModal().querySelector('.modal-footer .btn-danger').href).toContain(
+ buttons[index].destroyPath,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
index d8ce184940a..7c46c280d46 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_modal_spec.js
@@ -1,13 +1,16 @@
import { GlButton, GlModal } from '@gitlab/ui';
-import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
import Vuex from 'vuex';
+import Api from '~/api';
import DeployFreezeModal from '~/deploy_freeze/components/deploy_freeze_modal.vue';
import createStore from '~/deploy_freeze/store';
import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown.vue';
import { freezePeriodsFixture, timezoneDataFixture } from '../helpers';
-const localVue = createLocalVue();
-localVue.use(Vuex);
+jest.mock('~/api');
+
+Vue.use(Vuex);
describe('Deploy freeze modal', () => {
let wrapper;
@@ -23,18 +26,19 @@ describe('Deploy freeze modal', () => {
stubs: {
GlModal,
},
- localVue,
store,
});
});
- const findModal = () => wrapper.find(GlModal);
- const addDeployFreezeButton = () => findModal().findAll(GlButton).at(1);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const submitDeployFreezeButton = () => findModal().findAllComponents(GlButton).at(1);
- const setInput = (freezeStartCron, freezeEndCron, selectedTimezone) => {
+ const setInput = (freezeStartCron, freezeEndCron, selectedTimezone, id = '') => {
store.state.freezeStartCron = freezeStartCron;
store.state.freezeEndCron = freezeEndCron;
store.state.selectedTimezone = selectedTimezone;
+ store.state.selectedTimezoneIdentifier = selectedTimezone;
+ store.state.selectedId = id;
wrapper.find('#deploy-freeze-start').trigger('input');
wrapper.find('#deploy-freeze-end').trigger('input');
@@ -48,18 +52,36 @@ describe('Deploy freeze modal', () => {
describe('Basic interactions', () => {
it('button is disabled when freeze period is invalid', () => {
- expect(addDeployFreezeButton().attributes('disabled')).toBeTruthy();
+ expect(submitDeployFreezeButton().attributes('disabled')).toBeTruthy();
});
});
describe('Adding a new deploy freeze', () => {
+ const { freeze_start, freeze_end, cron_timezone } = freezePeriodsFixture[0];
+
beforeEach(() => {
- const { freeze_start, freeze_end, cron_timezone } = freezePeriodsFixture[0];
setInput(freeze_start, freeze_end, cron_timezone);
});
it('button is enabled when valid freeze period settings are present', () => {
- expect(addDeployFreezeButton().attributes('disabled')).toBeUndefined();
+ expect(submitDeployFreezeButton().attributes('disabled')).toBeUndefined();
+ });
+
+ it('should display Add deploy freeze', () => {
+ expect(findModal().props('title')).toBe('Add deploy freeze');
+ expect(submitDeployFreezeButton().text()).toBe('Add deploy freeze');
+ });
+
+ it('should call the add deploy freze API', () => {
+ Api.createFreezePeriod.mockResolvedValue();
+ findModal().vm.$emit('primary');
+
+ expect(Api.createFreezePeriod).toHaveBeenCalledTimes(1);
+ expect(Api.createFreezePeriod).toHaveBeenCalledWith(store.state.projectId, {
+ freeze_start,
+ freeze_end,
+ cron_timezone,
+ });
});
});
@@ -70,7 +92,7 @@ describe('Deploy freeze modal', () => {
});
it('disables the add deploy freeze button', () => {
- expect(addDeployFreezeButton().attributes('disabled')).toBeTruthy();
+ expect(submitDeployFreezeButton().attributes('disabled')).toBeTruthy();
});
});
@@ -81,7 +103,32 @@ describe('Deploy freeze modal', () => {
});
it('does not disable the submit button', () => {
- expect(addDeployFreezeButton().attributes('disabled')).toBeFalsy();
+ expect(submitDeployFreezeButton().attributes('disabled')).toBeFalsy();
+ });
+ });
+ });
+
+ describe('Editing an existing deploy freeze', () => {
+ const { freeze_start, freeze_end, cron_timezone, id } = freezePeriodsFixture[0];
+ beforeEach(() => {
+ setInput(freeze_start, freeze_end, cron_timezone, id);
+ });
+
+ it('should display Edit deploy freeze', () => {
+ expect(findModal().props('title')).toBe('Edit deploy freeze');
+ expect(submitDeployFreezeButton().text()).toBe('Save deploy freeze');
+ });
+
+ it('should call the update deploy freze API', () => {
+ Api.updateFreezePeriod.mockResolvedValue();
+ findModal().vm.$emit('primary');
+
+ expect(Api.updateFreezePeriod).toHaveBeenCalledTimes(1);
+ expect(Api.updateFreezePeriod).toHaveBeenCalledWith(store.state.projectId, {
+ id,
+ freeze_start,
+ freeze_end,
+ cron_timezone,
});
});
});
diff --git a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
index e4ee1b9ad26..168ddcfeacc 100644
--- a/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
+++ b/spec/frontend/deploy_freeze/components/deploy_freeze_table_spec.js
@@ -2,6 +2,7 @@ import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import DeployFreezeTable from '~/deploy_freeze/components/deploy_freeze_table.vue';
import createStore from '~/deploy_freeze/store';
+import { RECEIVE_FREEZE_PERIODS_SUCCESS } from '~/deploy_freeze/store/mutation_types';
import { freezePeriodsFixture, timezoneDataFixture } from '../helpers';
const localVue = createLocalVue();
@@ -26,6 +27,7 @@ describe('Deploy freeze table', () => {
const findEmptyFreezePeriods = () => wrapper.find('[data-testid="empty-freeze-periods"]');
const findAddDeployFreezeButton = () => wrapper.find('[data-testid="add-deploy-freeze"]');
+ const findEditDeployFreezeButton = () => wrapper.find('[data-testid="edit-deploy-freeze"]');
const findDeployFreezeTable = () => wrapper.find('[data-testid="deploy-freeze-table"]');
beforeEach(() => {
@@ -45,17 +47,31 @@ describe('Deploy freeze table', () => {
it('displays empty', () => {
expect(findEmptyFreezePeriods().exists()).toBe(true);
expect(findEmptyFreezePeriods().text()).toBe(
- 'No deploy freezes exist for this project. To add one, click Add deploy freeze',
+ 'No deploy freezes exist for this project. To add one, select Add deploy freeze',
);
});
- it('displays data', () => {
- store.state.freezePeriods = freezePeriodsFixture;
+ describe('with data', () => {
+ beforeEach(async () => {
+ store.commit(RECEIVE_FREEZE_PERIODS_SUCCESS, freezePeriodsFixture);
+ await wrapper.vm.$nextTick();
+ });
- return wrapper.vm.$nextTick(() => {
+ it('displays data', () => {
const tableRows = findDeployFreezeTable().findAll('tbody tr');
expect(tableRows.length).toBe(freezePeriodsFixture.length);
expect(findEmptyFreezePeriods().exists()).toBe(false);
+ expect(findEditDeployFreezeButton().exists()).toBe(true);
+ });
+
+ it('allows user to edit deploy freeze', async () => {
+ findEditDeployFreezeButton().trigger('click');
+ await wrapper.vm.$nextTick();
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'setFreezePeriod',
+ store.state.freezePeriods[0],
+ );
});
});
});
diff --git a/spec/frontend/deploy_freeze/store/actions_spec.js b/spec/frontend/deploy_freeze/store/actions_spec.js
index f4d9802e39a..9c784f3c5a2 100644
--- a/spec/frontend/deploy_freeze/store/actions_spec.js
+++ b/spec/frontend/deploy_freeze/store/actions_spec.js
@@ -23,12 +23,46 @@ describe('deploy freeze store actions', () => {
});
Api.freezePeriods.mockResolvedValue({ data: freezePeriodsFixture });
Api.createFreezePeriod.mockResolvedValue();
+ Api.updateFreezePeriod.mockResolvedValue();
});
afterEach(() => {
mock.restore();
});
+ describe('setSelectedFreezePeriod', () => {
+ it('commits SET_SELECTED_TIMEZONE mutation', () => {
+ testAction(
+ actions.setFreezePeriod,
+ {
+ id: 3,
+ cronTimezone: 'UTC',
+ freezeStart: 'start',
+ freezeEnd: 'end',
+ },
+ {},
+ [
+ {
+ payload: 3,
+ type: types.SET_SELECTED_ID,
+ },
+ {
+ payload: 'UTC',
+ type: types.SET_SELECTED_TIMEZONE,
+ },
+ {
+ payload: 'start',
+ type: types.SET_FREEZE_START_CRON,
+ },
+ {
+ payload: 'end',
+ type: types.SET_FREEZE_END_CRON,
+ },
+ ],
+ );
+ });
+ });
+
describe('setSelectedTimezone', () => {
it('commits SET_SELECTED_TIMEZONE mutation', () => {
testAction(actions.setSelectedTimezone, {}, {}, [
@@ -68,10 +102,16 @@ describe('deploy freeze store actions', () => {
state,
[{ type: 'RESET_MODAL' }],
[
- { type: 'requestAddFreezePeriod' },
- { type: 'receiveAddFreezePeriodSuccess' },
+ { type: 'requestFreezePeriod' },
+ { type: 'receiveFreezePeriodSuccess' },
{ type: 'fetchFreezePeriods' },
],
+ () =>
+ expect(Api.createFreezePeriod).toHaveBeenCalledWith(state.projectId, {
+ freeze_start: state.freezeStartCron,
+ freeze_end: state.freezeEndCron,
+ cron_timezone: state.selectedTimezoneIdentifier,
+ }),
);
});
@@ -83,7 +123,43 @@ describe('deploy freeze store actions', () => {
{},
state,
[],
- [{ type: 'requestAddFreezePeriod' }, { type: 'receiveAddFreezePeriodError' }],
+ [{ type: 'requestFreezePeriod' }, { type: 'receiveFreezePeriodError' }],
+ () => expect(createFlash).toHaveBeenCalled(),
+ );
+ });
+ });
+
+ describe('updateFreezePeriod', () => {
+ it('dispatch correct actions on updating a freeze period', () => {
+ testAction(
+ actions.updateFreezePeriod,
+ {},
+ state,
+ [{ type: 'RESET_MODAL' }],
+ [
+ { type: 'requestFreezePeriod' },
+ { type: 'receiveFreezePeriodSuccess' },
+ { type: 'fetchFreezePeriods' },
+ ],
+ () =>
+ expect(Api.updateFreezePeriod).toHaveBeenCalledWith(state.projectId, {
+ id: state.selectedId,
+ freeze_start: state.freezeStartCron,
+ freeze_end: state.freezeEndCron,
+ cron_timezone: state.selectedTimezoneIdentifier,
+ }),
+ );
+ });
+
+ it('should show flash error and set error in state on add failure', () => {
+ Api.updateFreezePeriod.mockRejectedValue();
+
+ testAction(
+ actions.updateFreezePeriod,
+ {},
+ state,
+ [],
+ [{ type: 'requestFreezePeriod' }, { type: 'receiveFreezePeriodError' }],
() => expect(createFlash).toHaveBeenCalled(),
);
});
diff --git a/spec/frontend/deploy_freeze/store/mutations_spec.js b/spec/frontend/deploy_freeze/store/mutations_spec.js
index 54cbdfcb64c..ce75e3b89c3 100644
--- a/spec/frontend/deploy_freeze/store/mutations_spec.js
+++ b/spec/frontend/deploy_freeze/store/mutations_spec.js
@@ -33,7 +33,10 @@ describe('Deploy freeze mutations', () => {
const expectedFreezePeriods = freezePeriodsFixture.map((freezePeriod, index) => ({
...convertObjectPropsToCamelCase(freezePeriod),
- cronTimezone: timezoneNames[index],
+ cronTimezone: {
+ formattedTimezone: timezoneNames[index],
+ identifier: freezePeriod.cronTimezone,
+ },
}));
expect(stateCopy.freezePeriods).toMatchObject(expectedFreezePeriods);
@@ -62,11 +65,19 @@ describe('Deploy freeze mutations', () => {
});
});
- describe('SET_FREEZE_ENDT_CRON', () => {
+ describe('SET_FREEZE_END_CRON', () => {
it('should set freezeEndCron', () => {
mutations[types.SET_FREEZE_END_CRON](stateCopy, '5 0 * 8 *');
expect(stateCopy.freezeEndCron).toBe('5 0 * 8 *');
});
});
+
+ describe('SET_SELECTED_ID', () => {
+ it('should set selectedId', () => {
+ mutations[types.SET_SELECTED_ID](stateCopy, 5);
+
+ expect(stateCopy.selectedId).toBe(5);
+ });
+ });
});
diff --git a/spec/frontend/deploy_tokens/components/revoke_button_spec.js b/spec/frontend/deploy_tokens/components/revoke_button_spec.js
new file mode 100644
index 00000000000..e70dfe4d2e6
--- /dev/null
+++ b/spec/frontend/deploy_tokens/components/revoke_button_spec.js
@@ -0,0 +1,108 @@
+import { GlModal } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import RevokeButton from '~/deploy_tokens/components/revoke_button.vue';
+
+const mockToken = {
+ created_at: '2021-03-18T19:13:03.011Z',
+ deploy_token_type: 'project_type',
+ expires_at: null,
+ id: 1,
+ name: 'testtoken',
+ read_package_registry: true,
+ read_registry: false,
+ read_repository: true,
+ revoked: false,
+ token: 'xUVsGDfK4y_Xj5UhqvaH',
+ token_encrypted: 'JYeg+WK4obIlrhyAYWvBvaY7CNB/U3FPX3cdLrivAly5qToy',
+ username: 'gitlab+deploy-token-1',
+ write_package_registry: true,
+ write_registry: false,
+};
+const mockRevokePath = '';
+
+describe('RevokeButton', () => {
+ let wrapper;
+ let glModalDirective;
+
+ function createComponent(injectedProperties = {}) {
+ glModalDirective = jest.fn();
+ return extendedWrapper(
+ mount(RevokeButton, {
+ provide: {
+ token: mockToken,
+ revokePath: mockRevokePath,
+ ...injectedProperties,
+ },
+ directives: {
+ glModal: {
+ bind(_, { value }) {
+ glModalDirective(value);
+ },
+ },
+ },
+ stubs: {
+ GlModal: stubComponent(GlModal, {
+ template:
+ '<div><slot name="modal-title"></slot><slot></slot><slot name="modal-footer"></slot></div>',
+ }),
+ },
+ }),
+ );
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findRevokeButton = () => wrapper.findByTestId('revoke-button');
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findPrimaryModalButton = () => wrapper.findByTestId('primary-revoke-btn');
+
+ describe('template', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ describe('revoke button', () => {
+ it('displays the revoke button', () => {
+ expect(findRevokeButton().exists()).toBe(true);
+ });
+
+ it('passes the buttonClass to the button', () => {
+ wrapper = createComponent({ buttonClass: 'my-revoke-button' });
+ expect(findRevokeButton().classes()).toContain('my-revoke-button');
+ });
+
+ it('opens the modal', () => {
+ findRevokeButton().trigger('click');
+ expect(glModalDirective).toHaveBeenCalledWith(wrapper.vm.modalId);
+ });
+ });
+
+ describe('modal', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('renders the revoke modal', () => {
+ expect(findModal().exists()).toBe(true);
+ });
+
+ it('displays the token name in the modal title', () => {
+ expect(findModal().text()).toContain('Revoke testtoken');
+ });
+
+ it('displays the token name in the primary action button"', () => {
+ expect(findPrimaryModalButton().text()).toBe('Revoke testtoken');
+ });
+
+ it('passes the revokePath to the button', () => {
+ const revokePath = 'gitlab-org/gitlab-test/-/deploy-tokens/1/revoke';
+ wrapper = createComponent({ revokePath });
+ expect(findPrimaryModalButton().attributes('href')).toBe(revokePath);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
index 5eb86d4f9cb..3cb48d7632f 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/design_navigation_spec.js.snap
@@ -13,6 +13,7 @@ exports[`Design management pagination component renders navigation buttons 1`] =
class="gl-mx-5"
>
<gl-button-stub
+ aria-label="Go to previous design"
buttontextclasses=""
category="primary"
class="js-previous-design"
@@ -24,6 +25,7 @@ exports[`Design management pagination component renders navigation buttons 1`] =
/>
<gl-button-stub
+ aria-label="Go to next design"
buttontextclasses=""
category="primary"
class="js-next-design"
diff --git a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
index e2ad4c68bea..6dfd57906d8 100644
--- a/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
+++ b/spec/frontend/design_management/components/toolbar/__snapshots__/index_spec.js.snap
@@ -41,6 +41,7 @@ exports[`Design management toolbar component renders design and updated data 1`]
/>
<gl-button-stub
+ aria-label="Download design"
buttontextclasses=""
category="primary"
href="/-/designs/306/7f747adcd4693afadbe968d7ba7d983349b9012d"
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
index 904bb2022ca..191bcc2d484 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/button_spec.js.snap
@@ -6,11 +6,11 @@ exports[`Design management upload button component renders inverted upload desig
>
<gl-button-stub
buttontextclasses=""
- category="primary"
+ category="secondary"
icon=""
size="small"
title="Adding a design with the same filename replaces the file in a new version."
- variant="default"
+ variant="confirm"
>
Upload designs
@@ -31,11 +31,11 @@ exports[`Design management upload button component renders upload design button
<div>
<gl-button-stub
buttontextclasses=""
- category="primary"
+ category="secondary"
icon=""
size="small"
title="Adding a design with the same filename replaces the file in a new version."
- variant="default"
+ variant="confirm"
>
Upload designs
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 34547238c23..8a1c5547581 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -56,6 +56,7 @@ describe('diffs/components/app', () => {
endpointMetadata: `${TEST_HOST}/diff/endpointMetadata`,
endpointBatch: `${TEST_HOST}/diff/endpointBatch`,
endpointCoverage: `${TEST_HOST}/diff/endpointCoverage`,
+ endpointCodequality: '',
projectPath: 'namespace/project',
currentUser: {},
changesEmptyStateIllustration: '',
@@ -105,7 +106,6 @@ describe('diffs/components/app', () => {
jest.spyOn(wrapper.vm, 'fetchDiffFilesBatch').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'fetchCoverageFiles').mockImplementation(fetchResolver);
jest.spyOn(wrapper.vm, 'setDiscussions').mockImplementation(() => {});
- jest.spyOn(wrapper.vm, 'startRenderDiffsQueue').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'unwatchDiscussions').mockImplementation(() => {});
jest.spyOn(wrapper.vm, 'unwatchRetrievingBatches').mockImplementation(() => {});
store.state.diffs.retrievingBatches = true;
@@ -119,7 +119,6 @@ describe('diffs/components/app', () => {
await nextTick();
- expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
@@ -134,7 +133,6 @@ describe('diffs/components/app', () => {
await nextTick();
- expect(wrapper.vm.startRenderDiffsQueue).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesMeta).toHaveBeenCalled();
expect(wrapper.vm.fetchDiffFilesBatch).toHaveBeenCalled();
expect(wrapper.vm.fetchCoverageFiles).toHaveBeenCalled();
@@ -144,6 +142,16 @@ describe('diffs/components/app', () => {
});
});
+ describe('codequality diff', () => {
+ it('does not fetch code quality data on FOSS', async () => {
+ createComponent();
+ jest.spyOn(wrapper.vm, 'fetchCodequality');
+ wrapper.vm.fetchData(false);
+
+ expect(wrapper.vm.fetchCodequality).not.toHaveBeenCalled();
+ });
+ });
+
it.each`
props | state | expected
${{ isFluidLayout: true }} | ${{ isParallelView: false }} | ${false}
@@ -697,4 +705,24 @@ describe('diffs/components/app', () => {
);
});
});
+
+ describe('diff file tree is aware of review bar', () => {
+ it('it does not have review-bar-visible class when review bar is not visible', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles = [{ file_hash: '111', file_path: '111.js' }];
+ });
+
+ expect(wrapper.find('.js-diff-tree-list').exists()).toBe(true);
+ expect(wrapper.find('.js-diff-tree-list.review-bar-visible').exists()).toBe(false);
+ });
+
+ it('it does have review-bar-visible class when review bar is visible', () => {
+ createComponent({}, ({ state }) => {
+ state.diffs.diffFiles = [{ file_hash: '111', file_path: '111.js' }];
+ state.batchComments.drafts = ['draft message'];
+ });
+
+ expect(wrapper.find('.js-diff-tree-list.review-bar-visible').exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/commit_item_spec.js b/spec/frontend/diffs/components/commit_item_spec.js
index 8cb4fd20063..0191822d97a 100644
--- a/spec/frontend/diffs/components/commit_item_spec.js
+++ b/spec/frontend/diffs/components/commit_item_spec.js
@@ -13,8 +13,6 @@ const TEST_AUTHOR_EMAIL = 'test+test@gitlab.com';
const TEST_AUTHOR_GRAVATAR = `${TEST_HOST}/avatar/test?s=40`;
const TEST_SIGNATURE_HTML = '<a>Legit commit</a>';
const TEST_PIPELINE_STATUS_PATH = `${TEST_HOST}/pipeline/status`;
-const NEXT_COMMIT_URL = `${TEST_HOST}/?commit_id=next`;
-const PREV_COMMIT_URL = `${TEST_HOST}/?commit_id=prev`;
describe('diffs/components/commit_item', () => {
let wrapper;
@@ -31,12 +29,6 @@ describe('diffs/components/commit_item', () => {
const getCommitActionsElement = () => wrapper.find('.commit-actions');
const getCommitPipelineStatus = () => wrapper.find(CommitPipelineStatus);
- const getCommitNavButtonsElement = () => wrapper.find('.commit-nav-buttons');
- const getNextCommitNavElement = () =>
- getCommitNavButtonsElement().find('.btn-group > *:last-child');
- const getPrevCommitNavElement = () =>
- getCommitNavButtonsElement().find('.btn-group > *:first-child');
-
const mountComponent = (propsData) => {
wrapper = mount(Component, {
propsData: {
@@ -180,126 +172,4 @@ describe('diffs/components/commit_item', () => {
expect(getCommitPipelineStatus().exists()).toBe(true);
});
});
-
- describe('without neighbor commits', () => {
- beforeEach(() => {
- mountComponent({ commit: { ...commit, prev_commit_id: null, next_commit_id: null } });
- });
-
- it('does not render any navigation buttons', () => {
- expect(getCommitNavButtonsElement().exists()).toEqual(false);
- });
- });
-
- describe('with neighbor commits', () => {
- let mrCommit;
-
- beforeEach(() => {
- mrCommit = {
- ...commit,
- next_commit_id: 'next',
- prev_commit_id: 'prev',
- };
-
- mountComponent({ commit: mrCommit });
- });
-
- it('renders the commit navigation buttons', () => {
- expect(getCommitNavButtonsElement().exists()).toEqual(true);
-
- mountComponent({
- commit: { ...mrCommit, next_commit_id: null },
- });
- expect(getCommitNavButtonsElement().exists()).toEqual(true);
-
- mountComponent({
- commit: { ...mrCommit, prev_commit_id: null },
- });
- expect(getCommitNavButtonsElement().exists()).toEqual(true);
- });
-
- describe('prev commit', () => {
- const { location } = window;
-
- beforeAll(() => {
- delete window.location;
- window.location = { href: `${TEST_HOST}?commit_id=${mrCommit.id}` };
- });
-
- beforeEach(() => {
- jest.spyOn(wrapper.vm, 'moveToNeighboringCommit').mockImplementation(() => {});
- });
-
- afterAll(() => {
- window.location = location;
- });
-
- it('uses the correct href', () => {
- const link = getPrevCommitNavElement();
-
- expect(link.element.getAttribute('href')).toEqual(PREV_COMMIT_URL);
- });
-
- it('triggers the correct Vuex action on click', () => {
- const link = getPrevCommitNavElement();
-
- link.trigger('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.moveToNeighboringCommit).toHaveBeenCalledWith({
- direction: 'previous',
- });
- });
- });
-
- it('renders a disabled button when there is no prev commit', () => {
- mountComponent({ commit: { ...mrCommit, prev_commit_id: null } });
-
- const button = getPrevCommitNavElement();
-
- expect(button.element.tagName).toEqual('BUTTON');
- expect(button.element.hasAttribute('disabled')).toEqual(true);
- });
- });
-
- describe('next commit', () => {
- const { location } = window;
-
- beforeAll(() => {
- delete window.location;
- window.location = { href: `${TEST_HOST}?commit_id=${mrCommit.id}` };
- });
-
- beforeEach(() => {
- jest.spyOn(wrapper.vm, 'moveToNeighboringCommit').mockImplementation(() => {});
- });
-
- afterAll(() => {
- window.location = location;
- });
-
- it('uses the correct href', () => {
- const link = getNextCommitNavElement();
-
- expect(link.element.getAttribute('href')).toEqual(NEXT_COMMIT_URL);
- });
-
- it('triggers the correct Vuex action on click', () => {
- const link = getNextCommitNavElement();
-
- link.trigger('click');
- return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.moveToNeighboringCommit).toHaveBeenCalledWith({ direction: 'next' });
- });
- });
-
- it('renders a disabled button when there is no next commit', () => {
- mountComponent({ commit: { ...mrCommit, next_commit_id: null } });
-
- const button = getNextCommitNavElement();
-
- expect(button.element.tagName).toEqual('BUTTON');
- expect(button.element.hasAttribute('disabled')).toEqual(true);
- });
- });
- });
});
diff --git a/spec/frontend/diffs/components/compare_versions_spec.js b/spec/frontend/diffs/components/compare_versions_spec.js
index c93a3771ec0..a01ec1db35c 100644
--- a/spec/frontend/diffs/components/compare_versions_spec.js
+++ b/spec/frontend/diffs/components/compare_versions_spec.js
@@ -1,5 +1,6 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
+import { TEST_HOST } from 'helpers/test_constants';
import { trimText } from 'helpers/text_helper';
import CompareVersionsComponent from '~/diffs/components/compare_versions.vue';
import { createStore } from '~/mr_notes/stores';
@@ -9,12 +10,17 @@ import diffsMockData from '../mock_data/merge_request_diffs';
const localVue = createLocalVue();
localVue.use(Vuex);
+const NEXT_COMMIT_URL = `${TEST_HOST}/?commit_id=next`;
+const PREV_COMMIT_URL = `${TEST_HOST}/?commit_id=prev`;
+
describe('CompareVersions', () => {
let wrapper;
let store;
const targetBranchName = 'tmp-wine-dev';
+ const { commit } = getDiffWithCommit();
- const createWrapper = (props) => {
+ const createWrapper = (props = {}, commitArgs = {}) => {
+ store.state.diffs.commit = { ...store.state.diffs.commit, ...commitArgs };
wrapper = mount(CompareVersionsComponent, {
localVue,
store,
@@ -28,6 +34,11 @@ describe('CompareVersions', () => {
const findLimitedContainer = () => wrapper.find('.container-limited.limit-container-width');
const findCompareSourceDropdown = () => wrapper.find('.mr-version-dropdown');
const findCompareTargetDropdown = () => wrapper.find('.mr-version-compare-dropdown');
+ const getCommitNavButtonsElement = () => wrapper.find('.commit-nav-buttons');
+ const getNextCommitNavElement = () =>
+ getCommitNavButtonsElement().find('.btn-group > *:last-child');
+ const getPrevCommitNavElement = () =>
+ getCommitNavButtonsElement().find('.btn-group > *:first-child');
beforeEach(() => {
store = createStore();
@@ -161,4 +172,126 @@ describe('CompareVersions', () => {
expect(findCompareTargetDropdown().exists()).toBe(false);
});
});
+
+ describe('without neighbor commits', () => {
+ beforeEach(() => {
+ createWrapper({ commit: { ...commit, prev_commit_id: null, next_commit_id: null } });
+ });
+
+ it('does not render any navigation buttons', () => {
+ expect(getCommitNavButtonsElement().exists()).toEqual(false);
+ });
+ });
+
+ describe('with neighbor commits', () => {
+ let mrCommit;
+
+ beforeEach(() => {
+ mrCommit = {
+ ...commit,
+ next_commit_id: 'next',
+ prev_commit_id: 'prev',
+ };
+
+ createWrapper({}, mrCommit);
+ });
+
+ it('renders the commit navigation buttons', () => {
+ expect(getCommitNavButtonsElement().exists()).toEqual(true);
+
+ createWrapper({
+ commit: { ...mrCommit, next_commit_id: null },
+ });
+ expect(getCommitNavButtonsElement().exists()).toEqual(true);
+
+ createWrapper({
+ commit: { ...mrCommit, prev_commit_id: null },
+ });
+ expect(getCommitNavButtonsElement().exists()).toEqual(true);
+ });
+
+ describe('prev commit', () => {
+ beforeAll(() => {
+ global.jsdom.reconfigure({
+ url: `${TEST_HOST}?commit_id=${mrCommit.id}`,
+ });
+ });
+
+ afterAll(() => {
+ global.jsdom.reconfigure({
+ url: TEST_HOST,
+ });
+ });
+
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm, 'moveToNeighboringCommit').mockImplementation(() => {});
+ });
+
+ it('uses the correct href', () => {
+ const link = getPrevCommitNavElement();
+
+ expect(link.element.getAttribute('href')).toEqual(PREV_COMMIT_URL);
+ });
+
+ it('triggers the correct Vuex action on click', () => {
+ const link = getPrevCommitNavElement();
+
+ link.trigger('click');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.moveToNeighboringCommit).toHaveBeenCalledWith({
+ direction: 'previous',
+ });
+ });
+ });
+
+ it('renders a disabled button when there is no prev commit', () => {
+ createWrapper({}, { ...mrCommit, prev_commit_id: null });
+
+ const button = getPrevCommitNavElement();
+
+ expect(button.element.hasAttribute('disabled')).toEqual(true);
+ });
+ });
+
+ describe('next commit', () => {
+ beforeAll(() => {
+ global.jsdom.reconfigure({
+ url: `${TEST_HOST}?commit_id=${mrCommit.id}`,
+ });
+ });
+
+ afterAll(() => {
+ global.jsdom.reconfigure({
+ url: TEST_HOST,
+ });
+ });
+
+ beforeEach(() => {
+ jest.spyOn(wrapper.vm, 'moveToNeighboringCommit').mockImplementation(() => {});
+ });
+
+ it('uses the correct href', () => {
+ const link = getNextCommitNavElement();
+
+ expect(link.element.getAttribute('href')).toEqual(NEXT_COMMIT_URL);
+ });
+
+ it('triggers the correct Vuex action on click', () => {
+ const link = getNextCommitNavElement();
+
+ link.trigger('click');
+ return wrapper.vm.$nextTick().then(() => {
+ expect(wrapper.vm.moveToNeighboringCommit).toHaveBeenCalledWith({ direction: 'next' });
+ });
+ });
+
+ it('renders a disabled button when there is no next commit', () => {
+ createWrapper({}, { ...mrCommit, next_commit_id: null });
+
+ const button = getNextCommitNavElement();
+
+ expect(button.element.hasAttribute('disabled')).toEqual(true);
+ });
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/diff_row_spec.js b/spec/frontend/diffs/components/diff_row_spec.js
index 5682b29d697..0bc1bd40f06 100644
--- a/spec/frontend/diffs/components/diff_row_spec.js
+++ b/spec/frontend/diffs/components/diff_row_spec.js
@@ -4,6 +4,7 @@ import Vuex from 'vuex';
import DiffRow from '~/diffs/components/diff_row.vue';
import { mapParallel } from '~/diffs/components/diff_row_utils';
import diffsModule from '~/diffs/store/modules';
+import { findInteropAttributes } from '../find_interop_attributes';
import diffFileMockData from '../mock_data/diff_file';
describe('DiffRow', () => {
@@ -211,4 +212,20 @@ describe('DiffRow', () => {
expect(coverage.classes('no-coverage')).toBeFalsy();
});
});
+
+ describe('interoperability', () => {
+ it.each`
+ desc | line | inline | leftSide | rightSide
+ ${'with inline and new_line'} | ${{ left: { old_line: 3, new_line: 5, type: 'new' } }} | ${true} | ${{ type: 'new', line: '5', oldLine: '3', newLine: '5' }} | ${null}
+ ${'with inline and no new_line'} | ${{ left: { old_line: 3, type: 'old' } }} | ${true} | ${{ type: 'old', line: '3', oldLine: '3' }} | ${null}
+ ${'with parallel and no right side'} | ${{ left: { old_line: 3, new_line: 5 } }} | ${false} | ${{ type: 'old', line: '3', oldLine: '3' }} | ${null}
+ ${'with parallel and no left side'} | ${{ right: { old_line: 3, new_line: 5 } }} | ${false} | ${null} | ${{ type: 'new', line: '5', newLine: '5' }}
+ ${'with parallel and right side'} | ${{ left: { old_line: 3 }, right: { new_line: 5 } }} | ${false} | ${{ type: 'old', line: '3', oldLine: '3' }} | ${{ type: 'new', line: '5', newLine: '5' }}
+ `('$desc, sets interop data attributes', ({ line, inline, leftSide, rightSide }) => {
+ const wrapper = createWrapper({ props: { line, inline } });
+
+ expect(findInteropAttributes(wrapper, '[data-testid="left-side"]')).toEqual(leftSide);
+ expect(findInteropAttributes(wrapper, '[data-testid="right-side"]')).toEqual(rightSide);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/inline_diff_table_row_spec.js b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
index 28b3055b58c..66b63a7a1d0 100644
--- a/spec/frontend/diffs/components/inline_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/inline_diff_table_row_spec.js
@@ -3,6 +3,7 @@ import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
import { mapInline } from '~/diffs/components/diff_row_utils';
import InlineDiffTableRow from '~/diffs/components/inline_diff_table_row.vue';
import { createStore } from '~/mr_notes/stores';
+import { findInteropAttributes } from '../find_interop_attributes';
import discussionsMockData from '../mock_data/diff_discussions';
import diffFileMockData from '../mock_data/diff_file';
@@ -310,4 +311,16 @@ describe('InlineDiffTableRow', () => {
});
});
});
+
+ describe('interoperability', () => {
+ it.each`
+ desc | line | expectation
+ ${'with type old'} | ${{ ...thisLine, type: 'old', old_line: 3, new_line: 5 }} | ${{ type: 'old', line: '3', oldLine: '3', newLine: '5' }}
+ ${'with type new'} | ${{ ...thisLine, type: 'new', old_line: 3, new_line: 5 }} | ${{ type: 'new', line: '5', oldLine: '3', newLine: '5' }}
+ `('$desc, sets interop data attributes', ({ line, expectation }) => {
+ createComponent({ line });
+
+ expect(findInteropAttributes(wrapper)).toEqual(expectation);
+ });
+ });
});
diff --git a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
index dbe8303077d..ed191d849fd 100644
--- a/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
+++ b/spec/frontend/diffs/components/parallel_diff_table_row_spec.js
@@ -5,6 +5,7 @@ import DiffGutterAvatars from '~/diffs/components/diff_gutter_avatars.vue';
import { mapParallel } from '~/diffs/components/diff_row_utils';
import ParallelDiffTableRow from '~/diffs/components/parallel_diff_table_row.vue';
import { createStore } from '~/mr_notes/stores';
+import { findInteropAttributes } from '../find_interop_attributes';
import discussionsMockData from '../mock_data/diff_discussions';
import diffFileMockData from '../mock_data/diff_file';
@@ -418,5 +419,27 @@ describe('ParallelDiffTableRow', () => {
});
});
});
+
+ describe('interoperability', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('adds old side interoperability data attributes', () => {
+ expect(findInteropAttributes(wrapper, '.line_content.left-side')).toEqual({
+ type: 'old',
+ line: thisLine.left.old_line.toString(),
+ oldLine: thisLine.left.old_line.toString(),
+ });
+ });
+
+ it('adds new side interoperability data attributes', () => {
+ expect(findInteropAttributes(wrapper, '.line_content.right-side')).toEqual({
+ type: 'new',
+ line: thisLine.right.new_line.toString(),
+ newLine: thisLine.right.new_line.toString(),
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/diffs/create_diffs_store.js b/spec/frontend/diffs/create_diffs_store.js
index aacde99964c..e6a8b7a72ae 100644
--- a/spec/frontend/diffs/create_diffs_store.js
+++ b/spec/frontend/diffs/create_diffs_store.js
@@ -1,5 +1,6 @@
import Vue from 'vue';
import Vuex from 'vuex';
+import batchCommentsModule from '~/batch_comments/stores/modules/batch_comments';
import diffsModule from '~/diffs/store/modules';
import notesModule from '~/notes/stores/modules';
@@ -10,6 +11,7 @@ export default function createDiffsStore() {
modules: {
diffs: diffsModule(),
notes: notesModule(),
+ batchComments: batchCommentsModule(),
},
});
}
diff --git a/spec/frontend/diffs/find_interop_attributes.js b/spec/frontend/diffs/find_interop_attributes.js
new file mode 100644
index 00000000000..d2266b20e16
--- /dev/null
+++ b/spec/frontend/diffs/find_interop_attributes.js
@@ -0,0 +1,20 @@
+export const findInteropAttributes = (parent, sel) => {
+ const target = sel ? parent.find(sel) : parent;
+
+ if (!target.exists()) {
+ return null;
+ }
+
+ const type = target.attributes('data-interop-type');
+
+ if (!type) {
+ return null;
+ }
+
+ return {
+ type,
+ line: target.attributes('data-interop-line'),
+ oldLine: target.attributes('data-interop-old-line'),
+ newLine: target.attributes('data-interop-new-line'),
+ };
+};
diff --git a/spec/frontend/diffs/store/actions_spec.js b/spec/frontend/diffs/store/actions_spec.js
index ed3210ecfaf..f46a42fae7a 100644
--- a/spec/frontend/diffs/store/actions_spec.js
+++ b/spec/frontend/diffs/store/actions_spec.js
@@ -80,7 +80,7 @@ describe('DiffsStoreActions', () => {
jest.spyOn(utils, 'idleCallback').mockImplementation(() => null);
['requestAnimationFrame', 'requestIdleCallback'].forEach((method) => {
global[method] = (cb) => {
- cb();
+ cb({ timeRemaining: () => 10 });
};
});
});
@@ -198,7 +198,7 @@ describe('DiffsStoreActions', () => {
{ type: types.VIEW_DIFF_FILE, payload: 'test2' },
{ type: types.SET_RETRIEVING_BATCHES, payload: false },
],
- [],
+ [{ type: 'startRenderDiffsQueue' }, { type: 'startRenderDiffsQueue' }],
done,
);
});
@@ -251,6 +251,8 @@ describe('DiffsStoreActions', () => {
{ type: types.SET_LOADING, payload: false },
{ type: types.SET_MERGE_REQUEST_DIFFS, payload: diffMetadata.merge_request_diffs },
{ type: types.SET_DIFF_METADATA, payload: noFilesData },
+ // Workers are synchronous in Jest environment (see https://gitlab.com/gitlab-org/gitlab/-/merge_requests/58805)
+ { type: types.SET_TREE_DATA, payload: utils.generateTreeList(diffMetadata.diff_files) },
],
[],
() => {
@@ -1459,19 +1461,42 @@ describe('DiffsStoreActions', () => {
});
describe('setFileByFile', () => {
+ const updateUserEndpoint = 'user/prefs';
+ let putSpy;
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ putSpy = jest.spyOn(axios, 'put');
+
+ mock.onPut(updateUserEndpoint).reply(200, {});
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
it.each`
value
${true}
${false}
- `('commits SET_FILE_BY_FILE with the new value $value', ({ value }) => {
- return testAction(
- setFileByFile,
- { fileByFile: value },
- { viewDiffsFileByFile: null },
- [{ type: types.SET_FILE_BY_FILE, payload: value }],
- [],
- );
- });
+ `(
+ 'commits SET_FILE_BY_FILE and persists the File-by-File user preference with the new value $value',
+ async ({ value }) => {
+ await testAction(
+ setFileByFile,
+ { fileByFile: value },
+ {
+ viewDiffsFileByFile: null,
+ endpointUpdateUser: updateUserEndpoint,
+ },
+ [{ type: types.SET_FILE_BY_FILE, payload: value }],
+ [],
+ );
+
+ expect(putSpy).toHaveBeenCalledWith(updateUserEndpoint, { view_diffs_file_by_file: value });
+ },
+ );
});
describe('reviewFile', () => {
diff --git a/spec/frontend/diffs/store/getters_spec.js b/spec/frontend/diffs/store/getters_spec.js
index 04606b48662..2e3a66d5b01 100644
--- a/spec/frontend/diffs/store/getters_spec.js
+++ b/spec/frontend/diffs/store/getters_spec.js
@@ -377,32 +377,40 @@ describe('Diffs Module Getters', () => {
});
describe('suggestionCommitMessage', () => {
+ let rootState;
+
beforeEach(() => {
Object.assign(localState, {
defaultSuggestionCommitMessage:
'%{branch_name}%{project_path}%{project_name}%{username}%{user_full_name}%{file_paths}%{suggestions_count}%{files_count}',
- branchName: 'branch',
- projectPath: '/path',
- projectName: 'name',
- username: 'user',
- userFullName: 'user userton',
});
+ rootState = {
+ page: {
+ mrMetadata: {
+ branch_name: 'branch',
+ project_path: '/path',
+ project_name: 'name',
+ username: 'user',
+ user_full_name: 'user userton',
+ },
+ },
+ };
});
it.each`
- specialState | output
- ${{}} | ${'branch/pathnameuseruser userton%{file_paths}%{suggestions_count}%{files_count}'}
- ${{ userFullName: null }} | ${'branch/pathnameuser%{user_full_name}%{file_paths}%{suggestions_count}%{files_count}'}
- ${{ username: null }} | ${'branch/pathname%{username}user userton%{file_paths}%{suggestions_count}%{files_count}'}
- ${{ projectName: null }} | ${'branch/path%{project_name}useruser userton%{file_paths}%{suggestions_count}%{files_count}'}
- ${{ projectPath: null }} | ${'branch%{project_path}nameuseruser userton%{file_paths}%{suggestions_count}%{files_count}'}
- ${{ branchName: null }} | ${'%{branch_name}/pathnameuseruser userton%{file_paths}%{suggestions_count}%{files_count}'}
+ specialState | output
+ ${{}} | ${'branch/pathnameuseruser userton%{file_paths}%{suggestions_count}%{files_count}'}
+ ${{ user_full_name: null }} | ${'branch/pathnameuser%{user_full_name}%{file_paths}%{suggestions_count}%{files_count}'}
+ ${{ username: null }} | ${'branch/pathname%{username}user userton%{file_paths}%{suggestions_count}%{files_count}'}
+ ${{ project_name: null }} | ${'branch/path%{project_name}useruser userton%{file_paths}%{suggestions_count}%{files_count}'}
+ ${{ project_path: null }} | ${'branch%{project_path}nameuseruser userton%{file_paths}%{suggestions_count}%{files_count}'}
+ ${{ branch_name: null }} | ${'%{branch_name}/pathnameuseruser userton%{file_paths}%{suggestions_count}%{files_count}'}
`(
'provides the correct "base" default commit message based on state ($specialState)',
({ specialState, output }) => {
- Object.assign(localState, specialState);
+ Object.assign(rootState.page.mrMetadata, specialState);
- expect(getters.suggestionCommitMessage(localState)()).toBe(output);
+ expect(getters.suggestionCommitMessage(localState, null, rootState)()).toBe(output);
},
);
@@ -417,7 +425,9 @@ describe('Diffs Module Getters', () => {
`(
"properly overrides state values ($stateOverrides) if they're provided",
({ stateOverrides, output }) => {
- expect(getters.suggestionCommitMessage(localState)(stateOverrides)).toBe(output);
+ expect(getters.suggestionCommitMessage(localState, null, rootState)(stateOverrides)).toBe(
+ output,
+ );
},
);
@@ -431,7 +441,9 @@ describe('Diffs Module Getters', () => {
`(
"fills in any missing interpolations ($providedValues) when they're provided at the getter callsite",
({ providedValues, output }) => {
- expect(getters.suggestionCommitMessage(localState)(providedValues)).toBe(output);
+ expect(getters.suggestionCommitMessage(localState, null, rootState)(providedValues)).toBe(
+ output,
+ );
},
);
});
diff --git a/spec/frontend/diffs/store/mutations_spec.js b/spec/frontend/diffs/store/mutations_spec.js
index a8ae759e693..b549ca42634 100644
--- a/spec/frontend/diffs/store/mutations_spec.js
+++ b/spec/frontend/diffs/store/mutations_spec.js
@@ -7,15 +7,17 @@ import diffFileMockData from '../mock_data/diff_file';
describe('DiffsStoreMutations', () => {
describe('SET_BASE_CONFIG', () => {
- it('should set endpoint and project path', () => {
+ it.each`
+ prop | value
+ ${'endpoint'} | ${'/diffs/endpoint'}
+ ${'projectPath'} | ${'/root/project'}
+ ${'endpointUpdateUser'} | ${'/user/preferences'}
+ `('should set the $prop property into state', ({ prop, value }) => {
const state = {};
- const endpoint = '/diffs/endpoint';
- const projectPath = '/root/project';
- mutations[types.SET_BASE_CONFIG](state, { endpoint, projectPath });
+ mutations[types.SET_BASE_CONFIG](state, { [prop]: value });
- expect(state.endpoint).toEqual(endpoint);
- expect(state.projectPath).toEqual(projectPath);
+ expect(state[prop]).toEqual(value);
});
});
diff --git a/spec/frontend/diffs/utils/interoperability_spec.js b/spec/frontend/diffs/utils/interoperability_spec.js
new file mode 100644
index 00000000000..2557e83cb4c
--- /dev/null
+++ b/spec/frontend/diffs/utils/interoperability_spec.js
@@ -0,0 +1,67 @@
+import {
+ getInteropInlineAttributes,
+ getInteropNewSideAttributes,
+ getInteropOldSideAttributes,
+ ATTR_TYPE,
+ ATTR_LINE,
+ ATTR_NEW_LINE,
+ ATTR_OLD_LINE,
+} from '~/diffs/utils/interoperability';
+
+describe('~/diffs/utils/interoperability', () => {
+ describe('getInteropInlineAttributes', () => {
+ it.each([
+ ['with null input', { input: null, output: null }],
+ [
+ 'with type=old input',
+ {
+ input: { type: 'old', old_line: 3, new_line: 5 },
+ output: { [ATTR_TYPE]: 'old', [ATTR_LINE]: 3, [ATTR_OLD_LINE]: 3, [ATTR_NEW_LINE]: 5 },
+ },
+ ],
+ [
+ 'with type=old-nonewline input',
+ {
+ input: { type: 'old-nonewline', old_line: 3, new_line: 5 },
+ output: { [ATTR_TYPE]: 'old', [ATTR_LINE]: 3, [ATTR_OLD_LINE]: 3, [ATTR_NEW_LINE]: 5 },
+ },
+ ],
+ [
+ 'with type=new input',
+ {
+ input: { type: 'new', old_line: 3, new_line: 5 },
+ output: { [ATTR_TYPE]: 'new', [ATTR_LINE]: 5, [ATTR_OLD_LINE]: 3, [ATTR_NEW_LINE]: 5 },
+ },
+ ],
+ [
+ 'with type=bogus input',
+ {
+ input: { type: 'bogus', old_line: 3, new_line: 5 },
+ output: { [ATTR_TYPE]: 'new', [ATTR_LINE]: 5, [ATTR_OLD_LINE]: 3, [ATTR_NEW_LINE]: 5 },
+ },
+ ],
+ ])('%s', (desc, { input, output }) => {
+ expect(getInteropInlineAttributes(input)).toEqual(output);
+ });
+ });
+
+ describe('getInteropOldSideAttributes', () => {
+ it.each`
+ input | output
+ ${null} | ${null}
+ ${{ old_line: 2 }} | ${{ [ATTR_TYPE]: 'old', [ATTR_LINE]: 2, [ATTR_OLD_LINE]: 2 }}
+ `('with input=$input', ({ input, output }) => {
+ expect(getInteropOldSideAttributes(input)).toEqual(output);
+ });
+ });
+
+ describe('getInteropNewSideAttributes', () => {
+ it.each`
+ input | output
+ ${null} | ${null}
+ ${{ new_line: 2 }} | ${{ [ATTR_TYPE]: 'new', [ATTR_LINE]: 2, [ATTR_NEW_LINE]: 2 }}
+ `('with input=$input', ({ input, output }) => {
+ expect(getInteropNewSideAttributes(input)).toEqual(output);
+ });
+ });
+});
diff --git a/spec/frontend/editor/editor_lite_extension_base_spec.js b/spec/frontend/editor/editor_lite_extension_base_spec.js
index 5490e9dc7b5..1ae8c70c741 100644
--- a/spec/frontend/editor/editor_lite_extension_base_spec.js
+++ b/spec/frontend/editor/editor_lite_extension_base_spec.js
@@ -1,44 +1,247 @@
-import { ERROR_INSTANCE_REQUIRED_FOR_EXTENSION } from '~/editor/constants';
+import { Range } from 'monaco-editor';
+import { useFakeRequestAnimationFrame } from 'helpers/fake_request_animation_frame';
+import {
+ ERROR_INSTANCE_REQUIRED_FOR_EXTENSION,
+ EDITOR_TYPE_CODE,
+ EDITOR_TYPE_DIFF,
+} from '~/editor/constants';
import { EditorLiteExtension } from '~/editor/extensions/editor_lite_extension_base';
describe('The basis for an Editor Lite extension', () => {
+ const defaultLine = 3;
let ext;
+ let event;
+
const defaultOptions = { foo: 'bar' };
+ const findLine = (num) => {
+ return document.querySelector(`.line-numbers:nth-child(${num})`);
+ };
+ const generateLines = () => {
+ let res = '';
+ for (let line = 1, lines = 5; line <= lines; line += 1) {
+ res += `<div class="line-numbers">${line}</div>`;
+ }
+ return res;
+ };
+ const generateEventMock = ({ line = defaultLine, el = null } = {}) => {
+ return {
+ target: {
+ element: el || findLine(line),
+ position: {
+ lineNumber: line,
+ },
+ },
+ };
+ };
+
+ beforeEach(() => {
+ setFixtures(generateLines());
+ event = generateEventMock();
+ });
+
+ afterEach(() => {
+ jest.clearAllMocks();
+ });
+
+ describe('constructor', () => {
+ it.each`
+ description | instance | options
+ ${'accepts configuration options and instance'} | ${{}} | ${defaultOptions}
+ ${'leaves instance intact if no options are passed'} | ${{}} | ${undefined}
+ ${'does not fail if both instance and the options are omitted'} | ${undefined} | ${undefined}
+ ${'throws if only options are passed'} | ${undefined} | ${defaultOptions}
+ `('$description', ({ instance, options } = {}) => {
+ const originalInstance = { ...instance };
- it.each`
- description | instance | options
- ${'accepts configuration options and instance'} | ${{}} | ${defaultOptions}
- ${'leaves instance intact if no options are passed'} | ${{}} | ${undefined}
- ${'does not fail if both instance and the options are omitted'} | ${undefined} | ${undefined}
- ${'throws if only options are passed'} | ${undefined} | ${defaultOptions}
- `('$description', ({ instance, options } = {}) => {
- const originalInstance = { ...instance };
-
- if (instance) {
- if (options) {
- Object.entries(options).forEach((prop) => {
- expect(instance[prop]).toBeUndefined();
- });
- // Both instance and options are passed
- ext = new EditorLiteExtension({ instance, ...options });
- Object.entries(options).forEach(([prop, value]) => {
- expect(ext[prop]).toBeUndefined();
- expect(instance[prop]).toBe(value);
- });
+ if (instance) {
+ if (options) {
+ Object.entries(options).forEach((prop) => {
+ expect(instance[prop]).toBeUndefined();
+ });
+ // Both instance and options are passed
+ ext = new EditorLiteExtension({ instance, ...options });
+ Object.entries(options).forEach(([prop, value]) => {
+ expect(ext[prop]).toBeUndefined();
+ expect(instance[prop]).toBe(value);
+ });
+ } else {
+ ext = new EditorLiteExtension({ instance });
+ expect(instance).toEqual(originalInstance);
+ }
+ } else if (options) {
+ // Options are passed without instance
+ expect(() => {
+ ext = new EditorLiteExtension({ ...options });
+ }).toThrow(ERROR_INSTANCE_REQUIRED_FOR_EXTENSION);
} else {
- ext = new EditorLiteExtension({ instance });
- expect(instance).toEqual(originalInstance);
+ // Neither options nor instance are passed
+ expect(() => {
+ ext = new EditorLiteExtension();
+ }).not.toThrow();
}
- } else if (options) {
- // Options are passed without instance
- expect(() => {
- ext = new EditorLiteExtension({ ...options });
- }).toThrow(ERROR_INSTANCE_REQUIRED_FOR_EXTENSION);
- } else {
- // Neither options nor instance are passed
- expect(() => {
- ext = new EditorLiteExtension();
- }).not.toThrow();
- }
+ });
+
+ it('initializes the line highlighting', () => {
+ const spy = jest.spyOn(EditorLiteExtension, 'highlightLines');
+ ext = new EditorLiteExtension({ instance: {} });
+ expect(spy).toHaveBeenCalled();
+ });
+
+ it('sets up the line linking for code instance', () => {
+ const spy = jest.spyOn(EditorLiteExtension, 'setupLineLinking');
+ const instance = {
+ getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_CODE),
+ onMouseMove: jest.fn(),
+ onMouseDown: jest.fn(),
+ };
+ ext = new EditorLiteExtension({ instance });
+ expect(spy).toHaveBeenCalledWith(instance);
+ });
+
+ it('does not set up the line linking for diff instance', () => {
+ const spy = jest.spyOn(EditorLiteExtension, 'setupLineLinking');
+ const instance = {
+ getEditorType: jest.fn().mockReturnValue(EDITOR_TYPE_DIFF),
+ };
+ ext = new EditorLiteExtension({ instance });
+ expect(spy).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('highlightLines', () => {
+ const revealSpy = jest.fn();
+ const decorationsSpy = jest.fn();
+ const instance = {
+ revealLineInCenter: revealSpy,
+ deltaDecorations: decorationsSpy,
+ };
+ const defaultDecorationOptions = { isWholeLine: true, className: 'active-line-text' };
+
+ useFakeRequestAnimationFrame();
+
+ beforeEach(() => {
+ delete window.location;
+ window.location = new URL(`https://localhost`);
+ });
+
+ afterEach(() => {
+ window.location.hash = '';
+ });
+
+ it.each`
+ desc | hash | shouldReveal | expectedRange
+ ${'properly decorates a single line'} | ${'#L10'} | ${true} | ${[10, 1, 10, 1]}
+ ${'properly decorates multiple lines'} | ${'#L7-42'} | ${true} | ${[7, 1, 42, 1]}
+ ${'correctly highlights if lines are reversed'} | ${'#L42-7'} | ${true} | ${[7, 1, 42, 1]}
+ ${'highlights one line if start/end are the same'} | ${'#L7-7'} | ${true} | ${[7, 1, 7, 1]}
+ ${'does not highlight if there is no hash'} | ${''} | ${false} | ${null}
+ ${'does not highlight if the hash is undefined'} | ${undefined} | ${false} | ${null}
+ ${'does not highlight if hash is incomplete 1'} | ${'#L'} | ${false} | ${null}
+ ${'does not highlight if hash is incomplete 2'} | ${'#L-'} | ${false} | ${null}
+ `('$desc', ({ hash, shouldReveal, expectedRange } = {}) => {
+ window.location.hash = hash;
+ EditorLiteExtension.highlightLines(instance);
+ if (!shouldReveal) {
+ expect(revealSpy).not.toHaveBeenCalled();
+ expect(decorationsSpy).not.toHaveBeenCalled();
+ } else {
+ expect(revealSpy).toHaveBeenCalledWith(expectedRange[0]);
+ expect(decorationsSpy).toHaveBeenCalledWith(
+ [],
+ [
+ {
+ range: new Range(...expectedRange),
+ options: defaultDecorationOptions,
+ },
+ ],
+ );
+ }
+ });
+
+ it('stores the line decorations on the instance', () => {
+ decorationsSpy.mockReturnValue('foo');
+ window.location.hash = '#L10';
+ expect(instance.lineDecorations).toBeUndefined();
+ EditorLiteExtension.highlightLines(instance);
+ expect(instance.lineDecorations).toBe('foo');
+ });
+ });
+
+ describe('setupLineLinking', () => {
+ const instance = {
+ onMouseMove: jest.fn(),
+ onMouseDown: jest.fn(),
+ deltaDecorations: jest.fn(),
+ lineDecorations: 'foo',
+ };
+
+ beforeEach(() => {
+ EditorLiteExtension.onMouseMoveHandler(event); // generate the anchor
+ });
+
+ it.each`
+ desc | spy
+ ${'onMouseMove'} | ${instance.onMouseMove}
+ ${'onMouseDown'} | ${instance.onMouseDown}
+ `('sets up the $desc listener', ({ spy } = {}) => {
+ EditorLiteExtension.setupLineLinking(instance);
+ expect(spy).toHaveBeenCalled();
+ });
+
+ it.each`
+ desc | eventTrigger | shouldRemove
+ ${'does not remove the line decorations if the event is triggered on a wrong node'} | ${null} | ${false}
+ ${'removes existing line decorations when clicking a line number'} | ${'.link-anchor'} | ${true}
+ `('$desc', ({ eventTrigger, shouldRemove } = {}) => {
+ event = generateEventMock({ el: eventTrigger ? document.querySelector(eventTrigger) : null });
+ instance.onMouseDown.mockImplementation((fn) => {
+ fn(event);
+ });
+
+ EditorLiteExtension.setupLineLinking(instance);
+ if (shouldRemove) {
+ expect(instance.deltaDecorations).toHaveBeenCalledWith(instance.lineDecorations, []);
+ } else {
+ expect(instance.deltaDecorations).not.toHaveBeenCalled();
+ }
+ });
+ });
+
+ describe('onMouseMoveHandler', () => {
+ it('stops propagation for contextmenu event on the generated anchor', () => {
+ EditorLiteExtension.onMouseMoveHandler(event);
+ const anchor = findLine(defaultLine).querySelector('a');
+ const contextMenuEvent = new Event('contextmenu');
+
+ jest.spyOn(contextMenuEvent, 'stopPropagation');
+ anchor.dispatchEvent(contextMenuEvent);
+
+ expect(contextMenuEvent.stopPropagation).toHaveBeenCalled();
+ });
+
+ it('creates an anchor if it does not exist yet', () => {
+ expect(findLine(defaultLine).querySelector('a')).toBe(null);
+ EditorLiteExtension.onMouseMoveHandler(event);
+ expect(findLine(defaultLine).querySelector('a')).not.toBe(null);
+ });
+
+ it('does not create a new anchor if it exists', () => {
+ EditorLiteExtension.onMouseMoveHandler(event);
+ expect(findLine(defaultLine).querySelector('a')).not.toBe(null);
+
+ EditorLiteExtension.createAnchor = jest.fn();
+ EditorLiteExtension.onMouseMoveHandler(event);
+ expect(EditorLiteExtension.createAnchor).not.toHaveBeenCalled();
+ expect(findLine(defaultLine).querySelectorAll('a')).toHaveLength(1);
+ });
+
+ it('does not create a link if the event is triggered on a wrong node', () => {
+ setFixtures('<div class="wrong-class">3</div>');
+ EditorLiteExtension.createAnchor = jest.fn();
+ const wrongEvent = generateEventMock({ el: document.querySelector('.wrong-class') });
+
+ EditorLiteExtension.onMouseMoveHandler(wrongEvent);
+ expect(EditorLiteExtension.createAnchor).not.toHaveBeenCalled();
+ });
});
});
diff --git a/spec/frontend/emoji/awards_app/store/actions_spec.js b/spec/frontend/emoji/awards_app/store/actions_spec.js
new file mode 100644
index 00000000000..dac4fded260
--- /dev/null
+++ b/spec/frontend/emoji/awards_app/store/actions_spec.js
@@ -0,0 +1,155 @@
+import * as Sentry from '@sentry/browser';
+import MockAdapter from 'axios-mock-adapter';
+import testAction from 'helpers/vuex_action_helper';
+import * as actions from '~/emoji/awards_app/store/actions';
+import axios from '~/lib/utils/axios_utils';
+
+jest.mock('@sentry/browser');
+
+describe('Awards app actions', () => {
+ describe('setInitialData', () => {
+ it('commits SET_INITIAL_DATA', async () => {
+ await testAction(
+ actions.setInitialData,
+ { path: 'https://gitlab.com' },
+ {},
+ [{ type: 'SET_INITIAL_DATA', payload: { path: 'https://gitlab.com' } }],
+ [],
+ );
+ });
+ });
+
+ describe('fetchAwards', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('success', () => {
+ beforeEach(() => {
+ mock
+ .onGet('/awards', { params: { per_page: 100, page: '1' } })
+ .reply(200, ['thumbsup'], { 'x-next-page': '2' });
+ mock.onGet('/awards', { params: { per_page: 100, page: '2' } }).reply(200, ['thumbsdown']);
+ });
+
+ it('commits FETCH_AWARDS_SUCCESS', async () => {
+ await testAction(
+ actions.fetchAwards,
+ '1',
+ { path: '/awards' },
+ [{ type: 'FETCH_AWARDS_SUCCESS', payload: ['thumbsup'] }],
+ [{ type: 'fetchAwards', payload: '2' }],
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onGet('/awards').reply(500);
+ });
+
+ it('calls Sentry.captureException', async () => {
+ await testAction(actions.fetchAwards, null, { path: '/awards' }, [], [], () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+
+ describe('toggleAward', () => {
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ describe('adding new award', () => {
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onPost('/awards').reply(200, { id: 1 });
+ });
+
+ it('commits ADD_NEW_AWARD', async () => {
+ testAction(actions.toggleAward, null, { path: '/awards', awards: [] }, [
+ { type: 'ADD_NEW_AWARD', payload: { id: 1 } },
+ ]);
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onPost('/awards').reply(500);
+ });
+
+ it('calls Sentry.captureException', async () => {
+ await testAction(
+ actions.toggleAward,
+ null,
+ { path: '/awards', awards: [] },
+ [],
+ [],
+ () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ },
+ );
+ });
+ });
+ });
+
+ describe('removing a award', () => {
+ const mockData = { id: 1, name: 'thumbsup', user: { id: 1 } };
+
+ describe('success', () => {
+ beforeEach(() => {
+ mock.onDelete('/awards/1').reply(200);
+ });
+
+ it('commits REMOVE_AWARD', async () => {
+ testAction(
+ actions.toggleAward,
+ 'thumbsup',
+ {
+ path: '/awards',
+ currentUserId: 1,
+ awards: [mockData],
+ },
+ [{ type: 'REMOVE_AWARD', payload: 1 }],
+ );
+ });
+ });
+
+ describe('error', () => {
+ beforeEach(() => {
+ mock.onDelete('/awards/1').reply(500);
+ });
+
+ it('calls Sentry.captureException', async () => {
+ await testAction(
+ actions.toggleAward,
+ 'thumbsup',
+ {
+ path: '/awards',
+ currentUserId: 1,
+ awards: [mockData],
+ },
+ [],
+ [],
+ () => {
+ expect(Sentry.captureException).toHaveBeenCalled();
+ },
+ );
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/emoji/awards_app/store/mutations_spec.js b/spec/frontend/emoji/awards_app/store/mutations_spec.js
new file mode 100644
index 00000000000..dd32c3a4445
--- /dev/null
+++ b/spec/frontend/emoji/awards_app/store/mutations_spec.js
@@ -0,0 +1,65 @@
+import {
+ SET_INITIAL_DATA,
+ FETCH_AWARDS_SUCCESS,
+ ADD_NEW_AWARD,
+ REMOVE_AWARD,
+} from '~/emoji/awards_app/store/mutation_types';
+import mutations from '~/emoji/awards_app/store/mutations';
+
+describe('Awards app mutations', () => {
+ describe('SET_INITIAL_DATA', () => {
+ it('sets initial data', () => {
+ const state = {};
+
+ mutations[SET_INITIAL_DATA](state, {
+ path: 'https://gitlab.com',
+ currentUserId: 1,
+ canAwardEmoji: true,
+ });
+
+ expect(state).toEqual({
+ path: 'https://gitlab.com',
+ currentUserId: 1,
+ canAwardEmoji: true,
+ });
+ });
+ });
+
+ describe('FETCH_AWARDS_SUCCESS', () => {
+ it('sets awards', () => {
+ const state = { awards: [] };
+
+ mutations[FETCH_AWARDS_SUCCESS](state, ['thumbsup']);
+
+ expect(state.awards).toEqual(['thumbsup']);
+ });
+
+ it('does not overwrite previously set awards', () => {
+ const state = { awards: ['thumbsup'] };
+
+ mutations[FETCH_AWARDS_SUCCESS](state, ['thumbsdown']);
+
+ expect(state.awards).toEqual(['thumbsup', 'thumbsdown']);
+ });
+ });
+
+ describe('ADD_NEW_AWARD', () => {
+ it('adds new award to array', () => {
+ const state = { awards: ['thumbsup'] };
+
+ mutations[ADD_NEW_AWARD](state, 'thumbsdown');
+
+ expect(state.awards).toEqual(['thumbsup', 'thumbsdown']);
+ });
+ });
+
+ describe('REMOVE_AWARD', () => {
+ it('removes award from array', () => {
+ const state = { awards: [{ id: 1 }, { id: 2 }] };
+
+ mutations[REMOVE_AWARD](state, 1);
+
+ expect(state.awards).toEqual([{ id: 2 }]);
+ });
+ });
+});
diff --git a/spec/frontend/environments/enable_review_app_modal_spec.js b/spec/frontend/environments/enable_review_app_modal_spec.js
index f5063cff620..9a3f13f19d5 100644
--- a/spec/frontend/environments/enable_review_app_modal_spec.js
+++ b/spec/frontend/environments/enable_review_app_modal_spec.js
@@ -1,4 +1,5 @@
import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import EnableReviewAppButton from '~/environments/components/enable_review_app_modal.vue';
import ModalCopyButton from '~/vue_shared/components/modal_copy_button.vue';
@@ -11,15 +12,25 @@ describe('Enable Review App Button', () => {
describe('renders the modal', () => {
beforeEach(() => {
- wrapper = shallowMount(EnableReviewAppButton, {
- propsData: {
- modalId: 'fake-id',
- },
- });
+ wrapper = extendedWrapper(
+ shallowMount(EnableReviewAppButton, {
+ propsData: {
+ modalId: 'fake-id',
+ },
+ provide: {
+ defaultBranchName: 'main',
+ },
+ }),
+ );
+ });
+
+ it('renders the defaultBranchName copy', () => {
+ const findCopyString = () => wrapper.findByTestId('enable-review-app-copy-string');
+ expect(findCopyString().text()).toContain('- main');
});
it('renders the copyToClipboard button', () => {
- expect(wrapper.find(ModalCopyButton).exists()).toBe(true);
+ expect(wrapper.findComponent(ModalCopyButton).exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index c6ce236af01..c0c542ae587 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -52,7 +52,6 @@ describe('ErrorTrackingList', () => {
beforeEach(() => {
actions = {
- getErrorList: () => {},
startPolling: jest.fn(),
restartPolling: jest.fn().mockName('restartPolling'),
addRecentSearch: jest.fn(),
diff --git a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
index 7ebaf0c3f2a..f02a261f323 100644
--- a/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
+++ b/spec/frontend/error_tracking_settings/components/error_tracking_form_spec.js
@@ -44,13 +44,13 @@ describe('error tracking settings form', () => {
const pageText = wrapper.text();
expect(pageText).toContain(
- "If you self-host Sentry, enter the full URL of your Sentry instance. If you're using Sentry's hosted solution, enter https://sentry.io",
+ "If you self-host Sentry, enter your Sentry instance's full URL. If you use Sentry's hosted solution, enter https://sentry.io",
);
expect(pageText).toContain(
- "After adding your Auth Token, use the 'Connect' button to load projects",
+ 'After adding your Auth Token, select the Connect button to load projects.',
);
- expect(pageText).not.toContain('Connection has failed. Re-check Auth Token and try again');
+ expect(pageText).not.toContain('Connection failed. Check Auth Token and try again.');
expect(wrapper.findAll(GlFormInput).at(0).attributes('placeholder')).toContain(
'https://mysentryserver.com',
);
@@ -80,9 +80,7 @@ describe('error tracking settings form', () => {
});
it('does not show an error', () => {
- expect(wrapper.text()).not.toContain(
- 'Connection has failed. Re-check Auth Token and try again',
- );
+ expect(wrapper.text()).not.toContain('Connection failed. Check Auth Token and try again.');
});
});
@@ -96,7 +94,7 @@ describe('error tracking settings form', () => {
});
it('shows an error', () => {
- expect(wrapper.text()).toContain('Connection has failed. Re-check Auth Token and try again');
+ expect(wrapper.text()).toContain('Connection failed. Check Auth Token and try again.');
});
});
});
diff --git a/spec/frontend/error_tracking_settings/store/getters_spec.js b/spec/frontend/error_tracking_settings/store/getters_spec.js
index b135fdee40b..4bb8d38e294 100644
--- a/spec/frontend/error_tracking_settings/store/getters_spec.js
+++ b/spec/frontend/error_tracking_settings/store/getters_spec.js
@@ -78,7 +78,7 @@ describe('Error Tracking Settings - Getters', () => {
describe('projectSelectionLabel', () => {
it('should show the correct message when the token is empty', () => {
expect(getters.projectSelectionLabel(state)).toEqual(
- 'To enable project selection, enter a valid Auth Token',
+ 'To enable project selection, enter a valid Auth Token.',
);
});
@@ -86,7 +86,7 @@ describe('Error Tracking Settings - Getters', () => {
state.token = 'test-token';
expect(getters.projectSelectionLabel(state)).toEqual(
- "Click 'Connect' to re-establish the connection to Sentry and activate the dropdown.",
+ 'Click Connect to reestablish the connection to Sentry and activate the dropdown.',
);
});
});
diff --git a/spec/frontend/experimentation/components/experiment_spec.js b/spec/frontend/experimentation/components/experiment_spec.js
new file mode 100644
index 00000000000..dbc7da5c535
--- /dev/null
+++ b/spec/frontend/experimentation/components/experiment_spec.js
@@ -0,0 +1,72 @@
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import ExperimentComponent from '~/experimentation/components/experiment.vue';
+
+const defaultProps = { name: 'experiment_name' };
+const defaultSlots = {
+ candidate: `<p>Candidate</p>`,
+ control: `<p>Control</p>`,
+};
+
+describe('ExperimentComponent', () => {
+ const oldGon = window.gon;
+ let wrapper;
+
+ const createComponent = (propsData = defaultProps, slots = defaultSlots) => {
+ wrapper = extendedWrapper(shallowMount(ExperimentComponent, { propsData, slots }));
+ };
+
+ const mockVariant = (expectedVariant) => {
+ window.gon = { experiment: { experiment_name: { variant: expectedVariant } } };
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ window.gon = oldGon;
+ });
+
+ describe('when variant and experiment is set', () => {
+ it('renders control when it is the active variant', () => {
+ mockVariant('control');
+
+ createComponent();
+
+ expect(wrapper.text()).toBe('Control');
+ });
+
+ it('renders candidate when it is the active variant', () => {
+ mockVariant('candidate');
+
+ createComponent();
+
+ expect(wrapper.text()).toBe('Candidate');
+ });
+ });
+
+ describe('when variant or experiment is not set', () => {
+ it('renders the control slot when no variant is defined', () => {
+ mockVariant(undefined);
+
+ createComponent();
+
+ expect(wrapper.text()).toBe('Control');
+ });
+
+ it('renders nothing when behavior is not set for variant', () => {
+ mockVariant('non-existing-variant');
+
+ createComponent(defaultProps, { control: `<p>First</p>`, other: `<p>Other</p>` });
+
+ expect(wrapper.text()).toBe('');
+ });
+
+ it('renders nothing when there are no slots', () => {
+ mockVariant('control');
+
+ createComponent(defaultProps, {});
+
+ expect(wrapper.text()).toBe('');
+ });
+ });
+});
diff --git a/spec/frontend/experimentation/utils_spec.js b/spec/frontend/experimentation/utils_spec.js
index 87dd2d595ba..ec09bbab349 100644
--- a/spec/frontend/experimentation/utils_spec.js
+++ b/spec/frontend/experimentation/utils_spec.js
@@ -1,38 +1,97 @@
+import { assignGitlabExperiment } from 'helpers/experimentation_helper';
+import { DEFAULT_VARIANT, CANDIDATE_VARIANT } from '~/experimentation/constants';
import * as experimentUtils from '~/experimentation/utils';
-const TEST_KEY = 'abc';
-
describe('experiment Utilities', () => {
- const oldGon = window.gon;
-
- afterEach(() => {
- window.gon = oldGon;
- });
+ const TEST_KEY = 'abc';
describe('getExperimentData', () => {
- it.each`
- gon | input | output
- ${{ experiment: { [TEST_KEY]: '_data_' } }} | ${[TEST_KEY]} | ${'_data_'}
- ${{}} | ${[TEST_KEY]} | ${undefined}
- `('with input=$input and gon=$gon, returns $output', ({ gon, input, output }) => {
- window.gon = gon;
+ describe.each`
+ gon | input | output
+ ${[TEST_KEY, '_data_']} | ${[TEST_KEY]} | ${{ variant: '_data_' }}
+ ${[]} | ${[TEST_KEY]} | ${undefined}
+ `('with input=$input and gon=$gon', ({ gon, input, output }) => {
+ assignGitlabExperiment(...gon);
- expect(experimentUtils.getExperimentData(...input)).toEqual(output);
+ it(`returns ${output}`, () => {
+ expect(experimentUtils.getExperimentData(...input)).toEqual(output);
+ });
});
});
describe('isExperimentVariant', () => {
+ describe.each`
+ gon | input | output
+ ${[TEST_KEY, DEFAULT_VARIANT]} | ${[TEST_KEY, DEFAULT_VARIANT]} | ${true}
+ ${[TEST_KEY, '_variant_name']} | ${[TEST_KEY, '_variant_name']} | ${true}
+ ${[TEST_KEY, '_variant_name']} | ${[TEST_KEY, '_bogus_name']} | ${false}
+ ${[TEST_KEY, '_variant_name']} | ${['boguskey', '_variant_name']} | ${false}
+ ${[]} | ${[TEST_KEY, '_variant_name']} | ${false}
+ `('with input=$input and gon=$gon', ({ gon, input, output }) => {
+ assignGitlabExperiment(...gon);
+
+ it(`returns ${output}`, () => {
+ expect(experimentUtils.isExperimentVariant(...input)).toEqual(output);
+ });
+ });
+ });
+
+ describe('experiment', () => {
+ const controlSpy = jest.fn();
+ const candidateSpy = jest.fn();
+ const getUpStandUpSpy = jest.fn();
+
+ const variants = {
+ use: controlSpy,
+ try: candidateSpy,
+ get_up_stand_up: getUpStandUpSpy,
+ };
+
+ describe('when there is no experiment data', () => {
+ it('calls control variant', () => {
+ experimentUtils.experiment('marley', variants);
+ expect(controlSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when experiment variant is "control"', () => {
+ assignGitlabExperiment('marley', DEFAULT_VARIANT);
+
+ it('calls the control variant', () => {
+ experimentUtils.experiment('marley', variants);
+ expect(controlSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when experiment variant is "candidate"', () => {
+ assignGitlabExperiment('marley', CANDIDATE_VARIANT);
+
+ it('calls the candidate variant', () => {
+ experimentUtils.experiment('marley', variants);
+ expect(candidateSpy).toHaveBeenCalled();
+ });
+ });
+
+ describe('when experiment variant is "get_up_stand_up"', () => {
+ assignGitlabExperiment('marley', 'get_up_stand_up');
+
+ it('calls the get-up-stand-up variant', () => {
+ experimentUtils.experiment('marley', variants);
+ expect(getUpStandUpSpy).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('getExperimentVariant', () => {
it.each`
- gon | input | output
- ${{ experiment: { [TEST_KEY]: { variant: 'control' } } }} | ${[TEST_KEY, 'control']} | ${true}
- ${{ experiment: { [TEST_KEY]: { variant: '_variant_name' } } }} | ${[TEST_KEY, '_variant_name']} | ${true}
- ${{ experiment: { [TEST_KEY]: { variant: '_variant_name' } } }} | ${[TEST_KEY, '_bogus_name']} | ${false}
- ${{ experiment: { [TEST_KEY]: { variant: '_variant_name' } } }} | ${['boguskey', '_variant_name']} | ${false}
- ${{}} | ${[TEST_KEY, '_variant_name']} | ${false}
+ gon | input | output
+ ${{ experiment: { [TEST_KEY]: { variant: DEFAULT_VARIANT } } }} | ${[TEST_KEY]} | ${DEFAULT_VARIANT}
+ ${{ experiment: { [TEST_KEY]: { variant: CANDIDATE_VARIANT } } }} | ${[TEST_KEY]} | ${CANDIDATE_VARIANT}
+ ${{}} | ${[TEST_KEY]} | ${DEFAULT_VARIANT}
`('with input=$input and gon=$gon, returns $output', ({ gon, input, output }) => {
window.gon = gon;
- expect(experimentUtils.isExperimentVariant(...input)).toEqual(output);
+ expect(experimentUtils.getExperimentVariant(...input)).toEqual(output);
});
});
});
diff --git a/spec/frontend/feature_flags/components/form_spec.js b/spec/frontend/feature_flags/components/form_spec.js
index a05e23a4250..00d557c11cf 100644
--- a/spec/frontend/feature_flags/components/form_spec.js
+++ b/spec/frontend/feature_flags/components/form_spec.js
@@ -123,6 +123,10 @@ describe('feature flag form', () => {
});
});
+ it('has label', () => {
+ expect(findGlToggle().props('label')).toBe(Form.i18n.statusLabel);
+ });
+
it('should be disabled if the feature flag is not active', (done) => {
wrapper.setProps({ active: false });
wrapper.vm.$nextTick(() => {
diff --git a/spec/frontend/feature_highlight/feature_highlight_popover_spec.js b/spec/frontend/feature_highlight/feature_highlight_popover_spec.js
index 1d558366ce8..e5e3974e103 100644
--- a/spec/frontend/feature_highlight/feature_highlight_popover_spec.js
+++ b/spec/frontend/feature_highlight/feature_highlight_popover_spec.js
@@ -41,7 +41,6 @@ describe('feature_highlight/feature_highlight_popover', () => {
expect(findPopover().props()).toMatchObject({
target: POPOVER_TARGET_ID,
cssClasses: ['feature-highlight-popover'],
- triggers: 'hover',
container: 'body',
placement: 'right',
boundary: 'viewport',
diff --git a/spec/frontend/fixtures/api_markdown.rb b/spec/frontend/fixtures/api_markdown.rb
new file mode 100644
index 00000000000..e012d922aad
--- /dev/null
+++ b/spec/frontend/fixtures/api_markdown.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::MergeRequests, '(JavaScript fixtures)', type: :request do
+ include ApiHelpers
+ include JavaScriptFixturesHelpers
+
+ fixture_subdir = 'api/markdown'
+
+ before(:all) do
+ clean_frontend_fixtures(fixture_subdir)
+ end
+
+ markdown_examples = begin
+ yaml_file_path = File.expand_path('api_markdown.yml', __dir__)
+ yaml = File.read(yaml_file_path)
+ YAML.safe_load(yaml, symbolize_names: true)
+ end
+
+ markdown_examples.each do |markdown_example|
+ name = markdown_example.fetch(:name)
+
+ context "for #{name}" do
+ let(:markdown) { markdown_example.fetch(:markdown) }
+
+ it "#{fixture_subdir}/#{name}.json" do
+ post api("/markdown"), params: { text: markdown }
+
+ expect(response).to be_successful
+ end
+ end
+ end
+end
diff --git a/spec/frontend/fixtures/api_markdown.yml b/spec/frontend/fixtures/api_markdown.yml
new file mode 100644
index 00000000000..a83d5374e2c
--- /dev/null
+++ b/spec/frontend/fixtures/api_markdown.yml
@@ -0,0 +1,50 @@
+# This data file drives the specs in
+# spec/frontend/fixtures/api_markdown.rb and
+# spec/frontend/rich_text_editor/extensions/markdown_processing_spec.js
+---
+- name: bold
+ markdown: '**bold**'
+- name: emphasis
+ markdown: '_emphasized text_'
+- name: inline_code
+ markdown: '`code`'
+- name: link
+ markdown: '[GitLab](https://gitlab.com)'
+- name: code_block
+ markdown: |-
+ ```javascript
+ console.log('hello world')
+ ```
+- name: headings
+ markdown: |-
+ # Heading 1
+
+ ## Heading 2
+
+ ### Heading 3
+
+ #### Heading 4
+
+ ##### Heading 5
+
+ ###### Heading 6
+- name: blockquote
+ markdown: |-
+ > This is a blockquote
+ >
+ > This is another one
+- name: thematic_break
+ markdown: |-
+ ---
+- name: bullet_list
+ markdown: |-
+ * list item 1
+ * list item 2
+ * embedded list item 3
+- name: ordered_list
+ markdown: |-
+ 1. list item 1
+ 2. list item 2
+ 3. list item 3
+- name: image
+ markdown: '![alt text](https://gitlab.com/logo.png)'
diff --git a/spec/frontend/fixtures/autocomplete.rb b/spec/frontend/fixtures/autocomplete.rb
new file mode 100644
index 00000000000..8983e241aa5
--- /dev/null
+++ b/spec/frontend/fixtures/autocomplete.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::AutocompleteController, '(JavaScript fixtures)', type: :controller do
+ include JavaScriptFixturesHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, name: 'frontend-fixtures') }
+
+ let(:project) { create(:project, namespace: group, path: 'autocomplete-project') }
+ let(:merge_request) { create(:merge_request, source_project: project, author: user) }
+
+ before(:all) do
+ clean_frontend_fixtures('autocomplete/')
+ end
+
+ before do
+ group.add_owner(user)
+ sign_in(user)
+ end
+
+ it 'autocomplete/users.json' do
+ 20.times do
+ user = create(:user)
+ project.add_developer(user)
+ end
+
+ get :users,
+ format: :json,
+ params: {
+ project_id: project.id,
+ active: true,
+ current_user: true,
+ author: merge_request.author.id,
+ merge_request_iid: merge_request.iid
+ }
+
+ expect(response).to be_successful
+ end
+end
diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb
index d6f6ed97626..a027247bd0d 100644
--- a/spec/frontend/fixtures/issues.rb
+++ b/spec/frontend/fixtures/issues.rb
@@ -16,8 +16,6 @@ RSpec.describe Projects::IssuesController, '(JavaScript fixtures)', type: :contr
end
before do
- stub_feature_flags(boards_filtered_search: false)
-
project.add_maintainer(user)
sign_in(user)
end
diff --git a/spec/frontend/fixtures/merge_requests_diffs.rb b/spec/frontend/fixtures/merge_requests_diffs.rb
index 5ad4176f7b8..edf1fcf3c0a 100644
--- a/spec/frontend/fixtures/merge_requests_diffs.rb
+++ b/spec/frontend/fixtures/merge_requests_diffs.rb
@@ -25,6 +25,10 @@ RSpec.describe Projects::MergeRequests::DiffsController, '(JavaScript fixtures)'
end
before do
+ # Create a user that matches the project.commit author
+ # This is so that the "author" information will be populated
+ create(:user, email: project.commit.author_email, name: project.commit.author_name)
+
sign_in(user)
end
@@ -33,17 +37,21 @@ RSpec.describe Projects::MergeRequests::DiffsController, '(JavaScript fixtures)'
end
it 'merge_request_diffs/with_commit.json' do
- # Create a user that matches the project.commit author
- # This is so that the "author" information will be populated
- create(:user, email: project.commit.author_email, name: project.commit.author_name)
-
render_merge_request(merge_request, commit_id: project.commit.sha)
end
+ it 'merge_request_diffs/diffs_metadata.json' do
+ render_merge_request(merge_request, action: :diffs_metadata)
+ end
+
+ it 'merge_request_diffs/diffs_batch.json' do
+ render_merge_request(merge_request, action: :diffs_batch, page: 1, per_page: 30)
+ end
+
private
- def render_merge_request(merge_request, view: 'inline', **extra_params)
- get :show, params: {
+ def render_merge_request(merge_request, action: :show, view: 'inline', **extra_params)
+ get action, params: {
namespace_id: project.namespace.to_param,
project_id: project,
id: merge_request.to_param,
diff --git a/spec/frontend/fixtures/static/mini_dropdown_graph.html b/spec/frontend/fixtures/static/mini_dropdown_graph.html
deleted file mode 100644
index cde811d4f52..00000000000
--- a/spec/frontend/fixtures/static/mini_dropdown_graph.html
+++ /dev/null
@@ -1,13 +0,0 @@
-<div class="js-builds-dropdown-tests dropdown dropdown" data-testid="widget-mini-pipeline-graph">
- <button class="js-builds-dropdown-button" data-toggle="dropdown" data-stage-endpoint="foobar">
- Dropdown
- </button>
- <ul class="dropdown-menu mini-pipeline-graph-dropdown-menu js-builds-dropdown-container">
- <li class="js-builds-dropdown-list scrollable-menu">
- <ul></ul>
- </li>
- <li class="js-builds-dropdown-loading hidden">
- <span class="gl-spinner"></span>
- </li>
- </ul>
-</div>
diff --git a/spec/frontend/fixtures/static/whats_new_notification.html b/spec/frontend/fixtures/static/whats_new_notification.html
index 30d5eea91cc..3b4dbdf7d36 100644
--- a/spec/frontend/fixtures/static/whats_new_notification.html
+++ b/spec/frontend/fixtures/static/whats_new_notification.html
@@ -1,5 +1,5 @@
<div class='whats-new-notification-fixture-root'>
- <div class='app' data-storage-key='storage-key'></div>
+ <div class='app' data-version-digest='version-digest'></div>
<div class='header-help'>
<div class='js-whats-new-notification-count'></div>
</div>
diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js
index 228c897ab00..6d482e5814d 100644
--- a/spec/frontend/flash_spec.js
+++ b/spec/frontend/flash_spec.js
@@ -126,9 +126,17 @@ describe('Flash', () => {
});
describe('deprecatedCreateFlash', () => {
+ const message = 'test';
+ const type = 'alert';
+ const parent = document;
+ const actionConfig = null;
+ const fadeTransition = false;
+ const addBodyClass = true;
+ const defaultParams = [message, type, parent, actionConfig, fadeTransition, addBodyClass];
+
describe('no flash-container', () => {
it('does not add to the DOM', () => {
- const flashEl = deprecatedCreateFlash('testing');
+ const flashEl = deprecatedCreateFlash(message);
expect(flashEl).toBeNull();
@@ -138,11 +146,9 @@ describe('Flash', () => {
describe('with flash-container', () => {
beforeEach(() => {
- document.body.innerHTML += `
- <div class="content-wrapper js-content-wrapper">
- <div class="flash-container"></div>
- </div>
- `;
+ setFixtures(
+ '<div class="content-wrapper js-content-wrapper"><div class="flash-container"></div></div>',
+ );
});
afterEach(() => {
@@ -150,7 +156,7 @@ describe('Flash', () => {
});
it('adds flash element into container', () => {
- deprecatedCreateFlash('test', 'alert', document, null, false, true);
+ deprecatedCreateFlash(...defaultParams);
expect(document.querySelector('.flash-alert')).not.toBeNull();
@@ -158,26 +164,35 @@ describe('Flash', () => {
});
it('adds flash into specified parent', () => {
- deprecatedCreateFlash('test', 'alert', document.querySelector('.content-wrapper'));
+ deprecatedCreateFlash(
+ message,
+ type,
+ document.querySelector('.content-wrapper'),
+ actionConfig,
+ fadeTransition,
+ addBodyClass,
+ );
expect(document.querySelector('.content-wrapper .flash-alert')).not.toBeNull();
+ expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
});
it('adds container classes when inside content-wrapper', () => {
- deprecatedCreateFlash('test');
+ deprecatedCreateFlash(...defaultParams);
expect(document.querySelector('.flash-text').className).toBe('flash-text');
+ expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
});
it('does not add container when outside of content-wrapper', () => {
document.querySelector('.content-wrapper').className = 'js-content-wrapper';
- deprecatedCreateFlash('test');
+ deprecatedCreateFlash(...defaultParams);
expect(document.querySelector('.flash-text').className.trim()).toContain('flash-text');
});
it('removes element after clicking', () => {
- deprecatedCreateFlash('test', 'alert', document, null, false, true);
+ deprecatedCreateFlash(...defaultParams);
document.querySelector('.flash-alert .js-close-icon').click();
@@ -188,24 +203,37 @@ describe('Flash', () => {
describe('with actionConfig', () => {
it('adds action link', () => {
- deprecatedCreateFlash('test', 'alert', document, {
- title: 'test',
- });
+ const newActionConfig = { title: 'test' };
+ deprecatedCreateFlash(
+ message,
+ type,
+ parent,
+ newActionConfig,
+ fadeTransition,
+ addBodyClass,
+ );
expect(document.querySelector('.flash-action')).not.toBeNull();
});
it('calls actionConfig clickHandler on click', () => {
- const actionConfig = {
+ const newActionConfig = {
title: 'test',
clickHandler: jest.fn(),
};
- deprecatedCreateFlash('test', 'alert', document, actionConfig);
+ deprecatedCreateFlash(
+ message,
+ type,
+ parent,
+ newActionConfig,
+ fadeTransition,
+ addBodyClass,
+ );
document.querySelector('.flash-action').click();
- expect(actionConfig.clickHandler).toHaveBeenCalled();
+ expect(newActionConfig.clickHandler).toHaveBeenCalled();
});
});
});
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index 13dbda9cf55..5453c93eac3 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -665,6 +665,41 @@ describe('GfmAutoComplete', () => {
expect(GfmAutoComplete.Members.nameOrUsernameIncludes(member, query)).toBe(result);
});
});
+
+ describe('sorter', () => {
+ const query = 'c';
+
+ const items = [
+ { search: 'DougHackett elayne.krieger' },
+ { search: 'BerylHuel cherie.block' },
+ { search: 'ErlindaMayert nicolle' },
+ { search: 'Administrator root' },
+ { search: 'PhoebeSchaden salina' },
+ { search: 'CatherinTerry tommy.will' },
+ { search: 'AntoineLedner ammie' },
+ { search: 'KinaCummings robena' },
+ { search: 'CharlsieHarber xzbdulia' },
+ ];
+
+ const expected = [
+ // Members whose name/username starts with `c` are grouped first
+ { search: 'BerylHuel cherie.block' },
+ { search: 'CatherinTerry tommy.will' },
+ { search: 'CharlsieHarber xzbdulia' },
+ // Members whose name/username contains `c` are grouped second
+ { search: 'DougHackett elayne.krieger' },
+ { search: 'ErlindaMayert nicolle' },
+ { search: 'PhoebeSchaden salina' },
+ { search: 'KinaCummings robena' },
+ // Remaining members are grouped last
+ { search: 'Administrator root' },
+ { search: 'AntoineLedner ammie' },
+ ];
+
+ it('sorts by match with start of name/username, then match with any part of name/username, and maintains sort order', () => {
+ expect(GfmAutoComplete.Members.sort(query, items)).toMatchObject(expected);
+ });
+ });
});
});
diff --git a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
index 2e02159a20c..1d2a5d636bc 100644
--- a/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
+++ b/spec/frontend/grafana_integration/components/__snapshots__/grafana_integration_spec.js.snap
@@ -9,7 +9,7 @@ exports[`grafana integration component default state to match the default snapsh
class="settings-header"
>
<h4
- class="js-section-header"
+ class="js-section-header settings-title js-settings-toggle js-settings-toggle-trigger-only"
>
Grafana authentication
diff --git a/spec/frontend/ide/components/cannot_push_code_alert_spec.js b/spec/frontend/ide/components/cannot_push_code_alert_spec.js
new file mode 100644
index 00000000000..ff659ecdf3f
--- /dev/null
+++ b/spec/frontend/ide/components/cannot_push_code_alert_spec.js
@@ -0,0 +1,72 @@
+import { GlButton, GlAlert } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
+import CannotPushCodeAlert from '~/ide/components/cannot_push_code_alert.vue';
+
+const TEST_MESSAGE = 'Hello test message!';
+const TEST_HREF = '/test/path/to/fork';
+const TEST_BUTTON_TEXT = 'Fork text';
+
+describe('ide/components/cannot_push_code_alert', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(CannotPushCodeAlert, {
+ propsData: {
+ message: TEST_MESSAGE,
+ ...props,
+ },
+ stubs: {
+ GlAlert: {
+ ...stubComponent(GlAlert),
+ template: `<div><slot></slot><slot name="actions"></slot></div>`,
+ },
+ },
+ });
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findButtonData = () => {
+ const button = findAlert().findComponent(GlButton);
+
+ if (!button.exists()) {
+ return null;
+ }
+
+ return {
+ href: button.attributes('href'),
+ method: button.attributes('data-method'),
+ text: button.text(),
+ };
+ };
+
+ describe('without actions', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('shows alert with message', () => {
+ expect(findAlert().props()).toMatchObject({ dismissible: false });
+ expect(findAlert().text()).toBe(TEST_MESSAGE);
+ });
+ });
+
+ describe.each`
+ action | buttonData
+ ${{}} | ${null}
+ ${{ href: TEST_HREF, text: TEST_BUTTON_TEXT }} | ${{ href: TEST_HREF, text: TEST_BUTTON_TEXT }}
+ ${{ href: TEST_HREF, text: TEST_BUTTON_TEXT, isForm: true }} | ${{ href: TEST_HREF, text: TEST_BUTTON_TEXT, method: 'post' }}
+ `('with action=$action', ({ action, buttonData }) => {
+ beforeEach(() => {
+ createComponent({ action });
+ });
+
+ it(`show button=${JSON.stringify(buttonData)}`, () => {
+ expect(findButtonData()).toEqual(buttonData);
+ });
+ });
+});
diff --git a/spec/frontend/ide/components/commit_sidebar/form_spec.js b/spec/frontend/ide/components/commit_sidebar/form_spec.js
index 083a2a73b24..f5916b021aa 100644
--- a/spec/frontend/ide/components/commit_sidebar/form_spec.js
+++ b/spec/frontend/ide/components/commit_sidebar/form_spec.js
@@ -14,7 +14,7 @@ import {
createBranchChangedCommitError,
branchAlreadyExistsCommitError,
} from '~/ide/lib/errors';
-import { MSG_CANNOT_PUSH_CODE_SHORT } from '~/ide/messages';
+import { MSG_CANNOT_PUSH_CODE } from '~/ide/messages';
import { createStore } from '~/ide/stores';
import { COMMIT_TO_NEW_BRANCH } from '~/ide/stores/modules/commit/constants';
@@ -85,8 +85,8 @@ describe('IDE commit form', () => {
${'when there are no changes'} | ${[]} | ${{ pushCode: true }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${''}
${'when there are changes'} | ${['test']} | ${{ pushCode: true }} | ${goToEditView} | ${findBeginCommitButtonData} | ${false} | ${''}
${'when there are changes'} | ${['test']} | ${{ pushCode: true }} | ${goToCommitView} | ${findCommitButtonData} | ${false} | ${''}
- ${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE_SHORT}
- ${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToCommitView} | ${findCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE_SHORT}
+ ${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToEditView} | ${findBeginCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE}
+ ${'when user cannot push'} | ${['test']} | ${{ pushCode: false }} | ${goToCommitView} | ${findCommitButtonData} | ${true} | ${MSG_CANNOT_PUSH_CODE}
`('$desc', ({ stagedFiles, userPermissions, viewFn, buttonFn, disabled, tooltip }) => {
beforeEach(async () => {
store.state.stagedFiles = stagedFiles;
diff --git a/spec/frontend/ide/components/ide_spec.js b/spec/frontend/ide/components/ide_spec.js
index bd251f78654..b23a78a035d 100644
--- a/spec/frontend/ide/components/ide_spec.js
+++ b/spec/frontend/ide/components/ide_spec.js
@@ -1,10 +1,10 @@
-import { GlAlert } from '@gitlab/ui';
import { createLocalVue, shallowMount } from '@vue/test-utils';
import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
+import CannotPushCodeAlert from '~/ide/components/cannot_push_code_alert.vue';
import ErrorMessage from '~/ide/components/error_message.vue';
import Ide from '~/ide/components/ide.vue';
-import { MSG_CANNOT_PUSH_CODE } from '~/ide/messages';
+import { MSG_CANNOT_PUSH_CODE_GO_TO_FORK, MSG_GO_TO_FORK } from '~/ide/messages';
import { createStore } from '~/ide/stores';
import { file } from '../helpers';
import { projectData } from '../mock_data';
@@ -12,14 +12,15 @@ import { projectData } from '../mock_data';
const localVue = createLocalVue();
localVue.use(Vuex);
+const TEST_FORK_IDE_PATH = '/test/ide/path';
+
describe('WebIDE', () => {
const emptyProjData = { ...projectData, empty_repo: true, branches: {} };
+ let store;
let wrapper;
const createComponent = ({ projData = emptyProjData, state = {} } = {}) => {
- const store = createStore();
-
store.state.currentProjectId = 'abcproject';
store.state.currentBranchId = 'master';
store.state.projects.abcproject = projData && { ...projData };
@@ -37,7 +38,11 @@ describe('WebIDE', () => {
});
};
- const findAlert = () => wrapper.find(GlAlert);
+ const findAlert = () => wrapper.findComponent(CannotPushCodeAlert);
+
+ beforeEach(() => {
+ store = createStore();
+ });
afterEach(() => {
wrapper.destroy();
@@ -148,6 +153,12 @@ describe('WebIDE', () => {
});
it('when user cannot push code, shows alert', () => {
+ store.state.links = {
+ forkInfo: {
+ ide_path: TEST_FORK_IDE_PATH,
+ },
+ };
+
createComponent({
projData: {
userPermissions: {
@@ -157,9 +168,12 @@ describe('WebIDE', () => {
});
expect(findAlert().props()).toMatchObject({
- dismissible: false,
+ message: MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
+ action: {
+ href: TEST_FORK_IDE_PATH,
+ text: MSG_GO_TO_FORK,
+ },
});
- expect(findAlert().text()).toBe(MSG_CANNOT_PUSH_CODE);
});
it.each`
diff --git a/spec/frontend/ide/stores/getters_spec.js b/spec/frontend/ide/stores/getters_spec.js
index 6b66c87e205..06456cdb12a 100644
--- a/spec/frontend/ide/stores/getters_spec.js
+++ b/spec/frontend/ide/stores/getters_spec.js
@@ -6,25 +6,40 @@ import {
} from '~/ide/constants';
import {
MSG_CANNOT_PUSH_CODE,
- MSG_CANNOT_PUSH_CODE_SHORT,
+ MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
+ MSG_CANNOT_PUSH_CODE_SHOULD_FORK,
MSG_CANNOT_PUSH_UNSIGNED,
MSG_CANNOT_PUSH_UNSIGNED_SHORT,
+ MSG_FORK,
+ MSG_GO_TO_FORK,
} from '~/ide/messages';
import { createStore } from '~/ide/stores';
import * as getters from '~/ide/stores/getters';
import { file } from '../helpers';
const TEST_PROJECT_ID = 'test_project';
+const TEST_IDE_PATH = '/test/ide/path';
+const TEST_FORK_PATH = '/test/fork/path';
describe('IDE store getters', () => {
let localState;
let localStore;
+ let origGon;
beforeEach(() => {
+ origGon = window.gon;
+
+ // Feature flag is defaulted to on in prod
+ window.gon = { features: { rejectUnsignedCommitsByGitlab: true } };
+
localStore = createStore();
localState = localStore.state;
});
+ afterEach(() => {
+ window.gon = origGon;
+ });
+
describe('activeFile', () => {
it('returns the current active file', () => {
localState.openFiles.push(file());
@@ -433,27 +448,100 @@ describe('IDE store getters', () => {
});
describe('canPushCodeStatus', () => {
- it.each`
- pushCode | rejectUnsignedCommits | expected
- ${true} | ${false} | ${{ isAllowed: true, message: '', messageShort: '' }}
- ${false} | ${false} | ${{ isAllowed: false, message: MSG_CANNOT_PUSH_CODE, messageShort: MSG_CANNOT_PUSH_CODE_SHORT }}
- ${false} | ${true} | ${{ isAllowed: false, message: MSG_CANNOT_PUSH_UNSIGNED, messageShort: MSG_CANNOT_PUSH_UNSIGNED_SHORT }}
- `(
- 'with pushCode="$pushCode" and rejectUnsignedCommits="$rejectUnsignedCommits"',
- ({ pushCode, rejectUnsignedCommits, expected }) => {
- localState.projects[TEST_PROJECT_ID] = {
- pushRules: {
- [PUSH_RULE_REJECT_UNSIGNED_COMMITS]: rejectUnsignedCommits,
+ it.each([
+ [
+ 'when can push code, and can push unsigned commits',
+ {
+ input: { pushCode: true, rejectUnsignedCommits: false },
+ output: { isAllowed: true, message: '', messageShort: '' },
+ },
+ ],
+ [
+ 'when cannot push code, and can push unsigned commits',
+ {
+ input: { pushCode: false, rejectUnsignedCommits: false },
+ output: {
+ isAllowed: false,
+ message: MSG_CANNOT_PUSH_CODE,
+ messageShort: MSG_CANNOT_PUSH_CODE,
},
- userPermissions: {
- [PERMISSION_PUSH_CODE]: pushCode,
+ },
+ ],
+ [
+ 'when cannot push code, and has ide_path in forkInfo',
+ {
+ input: {
+ pushCode: false,
+ rejectUnsignedCommits: false,
+ forkInfo: { ide_path: TEST_IDE_PATH },
},
- };
- localState.currentProjectId = TEST_PROJECT_ID;
+ output: {
+ isAllowed: false,
+ message: MSG_CANNOT_PUSH_CODE_GO_TO_FORK,
+ messageShort: MSG_CANNOT_PUSH_CODE,
+ action: { href: TEST_IDE_PATH, text: MSG_GO_TO_FORK },
+ },
+ },
+ ],
+ [
+ 'when cannot push code, and has fork_path in forkInfo',
+ {
+ input: {
+ pushCode: false,
+ rejectUnsignedCommits: false,
+ forkInfo: { fork_path: TEST_FORK_PATH },
+ },
+ output: {
+ isAllowed: false,
+ message: MSG_CANNOT_PUSH_CODE_SHOULD_FORK,
+ messageShort: MSG_CANNOT_PUSH_CODE,
+ action: { href: TEST_FORK_PATH, text: MSG_FORK, isForm: true },
+ },
+ },
+ ],
+ [
+ 'when can push code, but cannot push unsigned commits',
+ {
+ input: { pushCode: true, rejectUnsignedCommits: true },
+ output: {
+ isAllowed: false,
+ message: MSG_CANNOT_PUSH_UNSIGNED,
+ messageShort: MSG_CANNOT_PUSH_UNSIGNED_SHORT,
+ },
+ },
+ ],
+ [
+ 'when can push code, but cannot push unsigned commits, with reject_unsigned_commits_by_gitlab feature off',
+ {
+ input: {
+ pushCode: true,
+ rejectUnsignedCommits: true,
+ features: { rejectUnsignedCommitsByGitlab: false },
+ },
+ output: {
+ isAllowed: true,
+ message: '',
+ messageShort: '',
+ },
+ },
+ ],
+ ])('%s', (testName, { input, output }) => {
+ const { forkInfo, rejectUnsignedCommits, pushCode, features = {} } = input;
- expect(localStore.getters.canPushCodeStatus).toEqual(expected);
- },
- );
+ Object.assign(window.gon.features, features);
+ localState.links = { forkInfo };
+ localState.projects[TEST_PROJECT_ID] = {
+ pushRules: {
+ [PUSH_RULE_REJECT_UNSIGNED_COMMITS]: rejectUnsignedCommits,
+ },
+ userPermissions: {
+ [PERMISSION_PUSH_CODE]: pushCode,
+ },
+ };
+ localState.currentProjectId = TEST_PROJECT_ID;
+
+ expect(localStore.getters.canPushCodeStatus).toEqual(output);
+ });
});
describe('canPushCode', () => {
diff --git a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
index 4398d568501..07f90a12f0f 100644
--- a/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
+++ b/spec/frontend/incidents_settings/components/__snapshots__/incidents_settings_tabs_spec.js.snap
@@ -9,7 +9,9 @@ exports[`IncidentsSettingTabs should render the component 1`] = `
<div
class="settings-header"
>
- <h4>
+ <h4
+ class="settings-title js-settings-toggle js-settings-toggle-trigger-only"
+ >
Incidents
diff --git a/spec/frontend/integrations/edit/components/dynamic_field_spec.js b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
index aaca9fc4e62..2ebb3333c0f 100644
--- a/spec/frontend/integrations/edit/components/dynamic_field_spec.js
+++ b/spec/frontend/integrations/edit/components/dynamic_field_spec.js
@@ -192,17 +192,6 @@ describe('DynamicField', () => {
expect(findGlFormGroup().find('label').text()).toBe(defaultProps.title);
});
-
- describe('for password field with some value (hidden by backend)', () => {
- it('renders label with new password title', () => {
- createComponent({
- type: 'password',
- value: 'true',
- });
-
- expect(findGlFormGroup().find('label').text()).toBe(`Enter new ${defaultProps.title}`);
- });
- });
});
describe('validations', () => {
diff --git a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
index 3938e7c7c22..d08a1904e06 100644
--- a/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_issues_fields_spec.js
@@ -1,7 +1,7 @@
import { GlFormCheckbox, GlFormInput } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-
import JiraIssuesFields from '~/integrations/edit/components/jira_issues_fields.vue';
+import JiraUpgradeCta from '~/integrations/edit/components/jira_upgrade_cta.vue';
import eventHub from '~/integrations/edit/event_hub';
describe('JiraIssuesFields', () => {
@@ -28,23 +28,46 @@ describe('JiraIssuesFields', () => {
}
});
- const findEnableCheckbox = () => wrapper.find(GlFormCheckbox);
- const findProjectKey = () => wrapper.find(GlFormInput);
- const expectedBannerText = 'This is a Premium feature';
+ const findEnableCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findProjectKey = () => wrapper.findComponent(GlFormInput);
+ const findJiraUpgradeCta = () => wrapper.findComponent(JiraUpgradeCta);
const findJiraForVulnerabilities = () => wrapper.find('[data-testid="jira-for-vulnerabilities"]');
const setEnableCheckbox = async (isEnabled = true) =>
findEnableCheckbox().vm.$emit('input', isEnabled);
+ describe('jira issues call to action', () => {
+ it('shows the premium message', () => {
+ createComponent({
+ props: { showJiraIssuesIntegration: false },
+ });
+
+ expect(findJiraUpgradeCta().props()).toMatchObject({
+ showPremiumMessage: true,
+ showUltimateMessage: false,
+ });
+ });
+
+ it('shows the ultimate message', () => {
+ createComponent({
+ props: {
+ showJiraIssuesIntegration: true,
+ showJiraVulnerabilitiesIntegration: false,
+ },
+ });
+
+ expect(findJiraUpgradeCta().props()).toMatchObject({
+ showPremiumMessage: false,
+ showUltimateMessage: true,
+ });
+ });
+ });
+
describe('template', () => {
describe('upgrade banner for non-Premium user', () => {
beforeEach(() => {
createComponent({ props: { initialProjectKey: '', showJiraIssuesIntegration: false } });
});
- it('shows upgrade banner', () => {
- expect(wrapper.text()).toContain(expectedBannerText);
- });
-
it('does not show checkbox and input field', () => {
expect(findEnableCheckbox().exists()).toBe(false);
expect(findProjectKey().exists()).toBe(false);
@@ -57,7 +80,7 @@ describe('JiraIssuesFields', () => {
});
it('does not show upgrade banner', () => {
- expect(wrapper.text()).not.toContain(expectedBannerText);
+ expect(findJiraUpgradeCta().exists()).toBe(false);
});
// As per https://vuejs.org/v2/guide/forms.html#Checkbox-1,
@@ -125,6 +148,14 @@ describe('JiraIssuesFields', () => {
},
);
+ it('passes down the correct show-full-feature property', async () => {
+ await setEnableCheckbox(true);
+ expect(findJiraForVulnerabilities().attributes('show-full-feature')).toBe('true');
+ wrapper.setProps({ showJiraVulnerabilitiesIntegration: false });
+ await wrapper.vm.$nextTick();
+ expect(findJiraForVulnerabilities().attributes('show-full-feature')).toBeUndefined();
+ });
+
it('passes down the correct initial-issue-type-id value when value is empty', async () => {
await setEnableCheckbox(true);
expect(findJiraForVulnerabilities().attributes('initial-issue-type-id')).toBeUndefined();
diff --git a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
index c6e7ee44355..5c04add61a1 100644
--- a/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/jira_trigger_fields_spec.js
@@ -30,14 +30,23 @@ describe('JiraTriggerFields', () => {
const findCommentSettings = () => wrapper.find('[data-testid="comment-settings"]');
const findCommentDetail = () => wrapper.find('[data-testid="comment-detail"]');
const findCommentSettingsCheckbox = () => findCommentSettings().find(GlFormCheckbox);
+ const findIssueTransitionEnabled = () =>
+ wrapper.find('[data-testid="issue-transition-enabled"] input[type="checkbox"]');
+ const findIssueTransitionMode = () => wrapper.find('[data-testid="issue-transition-mode"]');
+ const findIssueTransitionModeRadios = () =>
+ findIssueTransitionMode().findAll('input[type="radio"]');
+ const findIssueTransitionIdsField = () =>
+ wrapper.find('input[type="text"][name="service[jira_issue_transition_id]"]');
describe('template', () => {
describe('initialTriggerCommit and initialTriggerMergeRequest are false', () => {
- it('does not show comment settings', () => {
+ it('does not show trigger settings', () => {
createComponent();
expect(findCommentSettings().isVisible()).toBe(false);
expect(findCommentDetail().isVisible()).toBe(false);
+ expect(findIssueTransitionEnabled().exists()).toBe(false);
+ expect(findIssueTransitionMode().exists()).toBe(false);
});
});
@@ -48,9 +57,11 @@ describe('JiraTriggerFields', () => {
});
});
- it('shows comment settings', () => {
+ it('shows trigger settings', () => {
expect(findCommentSettings().isVisible()).toBe(true);
expect(findCommentDetail().isVisible()).toBe(false);
+ expect(findIssueTransitionEnabled().isVisible()).toBe(true);
+ expect(findIssueTransitionMode().exists()).toBe(false);
});
// As per https://vuejs.org/v2/guide/forms.html#Checkbox-1,
@@ -73,13 +84,15 @@ describe('JiraTriggerFields', () => {
});
describe('initialTriggerMergeRequest is true', () => {
- it('shows comment settings', () => {
+ it('shows trigger settings', () => {
createComponent({
initialTriggerMergeRequest: true,
});
expect(findCommentSettings().isVisible()).toBe(true);
expect(findCommentDetail().isVisible()).toBe(false);
+ expect(findIssueTransitionEnabled().isVisible()).toBe(true);
+ expect(findIssueTransitionMode().exists()).toBe(false);
});
});
@@ -95,21 +108,94 @@ describe('JiraTriggerFields', () => {
});
});
- it('disables checkboxes and radios if inheriting', () => {
+ describe('initialJiraIssueTransitionAutomatic is false, initialJiraIssueTransitionId is not set', () => {
+ it('selects automatic transitions when enabling transitions', () => {
+ createComponent({
+ initialTriggerCommit: true,
+ initialEnableComments: true,
+ });
+
+ const checkbox = findIssueTransitionEnabled();
+ expect(checkbox.element.checked).toBe(false);
+ checkbox.trigger('click');
+
+ return wrapper.vm.$nextTick().then(() => {
+ const [radio1, radio2] = findIssueTransitionModeRadios().wrappers;
+ expect(radio1.element.checked).toBe(true);
+ expect(radio2.element.checked).toBe(false);
+ });
+ });
+ });
+
+ describe('initialJiraIssueTransitionAutomatic is true', () => {
+ it('uses automatic transitions', () => {
+ createComponent({
+ initialTriggerCommit: true,
+ initialJiraIssueTransitionAutomatic: true,
+ });
+
+ expect(findIssueTransitionEnabled().element.checked).toBe(true);
+
+ const [radio1, radio2] = findIssueTransitionModeRadios().wrappers;
+ expect(radio1.element.checked).toBe(true);
+ expect(radio2.element.checked).toBe(false);
+
+ expect(findIssueTransitionIdsField().exists()).toBe(false);
+ });
+ });
+
+ describe('initialJiraIssueTransitionId is set', () => {
+ it('uses custom transitions', () => {
+ createComponent({
+ initialTriggerCommit: true,
+ initialJiraIssueTransitionId: '1, 2, 3',
+ });
+
+ expect(findIssueTransitionEnabled().element.checked).toBe(true);
+
+ const [radio1, radio2] = findIssueTransitionModeRadios().wrappers;
+ expect(radio1.element.checked).toBe(false);
+ expect(radio2.element.checked).toBe(true);
+
+ const field = findIssueTransitionIdsField();
+ expect(field.isVisible()).toBe(true);
+ expect(field.element).toMatchObject({
+ type: 'text',
+ value: '1, 2, 3',
+ });
+ });
+ });
+
+ describe('initialJiraIssueTransitionAutomatic is true, initialJiraIssueTransitionId is set', () => {
+ it('uses automatic transitions', () => {
+ createComponent({
+ initialTriggerCommit: true,
+ initialJiraIssueTransitionAutomatic: true,
+ initialJiraIssueTransitionId: '1, 2, 3',
+ });
+
+ expect(findIssueTransitionEnabled().element.checked).toBe(true);
+
+ const [radio1, radio2] = findIssueTransitionModeRadios().wrappers;
+ expect(radio1.element.checked).toBe(true);
+ expect(radio2.element.checked).toBe(false);
+
+ expect(findIssueTransitionIdsField().exists()).toBe(false);
+ });
+ });
+
+ it('disables input fields if inheriting', () => {
createComponent(
{
initialTriggerCommit: true,
initialEnableComments: true,
+ initialJiraIssueTransitionId: '1, 2, 3',
},
true,
);
- wrapper.findAll('[type=checkbox]').wrappers.forEach((checkbox) => {
- expect(checkbox.attributes('disabled')).toBe('disabled');
- });
-
- wrapper.findAll('[type=radio]').wrappers.forEach((radio) => {
- expect(radio.attributes('disabled')).toBe('disabled');
+ wrapper.findAll('[type=text], [type=checkbox], [type=radio]').wrappers.forEach((input) => {
+ expect(input.attributes('disabled')).toBe('disabled');
});
});
});
diff --git a/spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js b/spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js
new file mode 100644
index 00000000000..e49a1619627
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/jira_upgrade_cta_spec.js
@@ -0,0 +1,30 @@
+import { shallowMount } from '@vue/test-utils';
+import JiraUpgradeCta from '~/integrations/edit/components/jira_upgrade_cta.vue';
+
+describe('JiraUpgradeCta', () => {
+ let wrapper;
+
+ const contentMessage = 'Upgrade your plan to enable this feature of the Jira Integration.';
+
+ const createComponent = (propsData) => {
+ wrapper = shallowMount(JiraUpgradeCta, {
+ propsData,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays the correct message for premium and lower users', () => {
+ createComponent({ showPremiumMessage: true });
+ expect(wrapper.html()).toContain('This is a Premium feature');
+ expect(wrapper.html()).toContain(contentMessage);
+ });
+
+ it('displays the correct message for ultimate and lower users', () => {
+ createComponent({ showUltimateMessage: true });
+ expect(wrapper.html()).toContain('This is an Ultimate feature');
+ expect(wrapper.html()).toContain(contentMessage);
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/trigger_fields_spec.js b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
index 3e5326812b1..b9d16464e72 100644
--- a/spec/frontend/integrations/edit/components/trigger_fields_spec.js
+++ b/spec/frontend/integrations/edit/components/trigger_fields_spec.js
@@ -138,11 +138,11 @@ describe('TriggerFields', () => {
const expectedResults = [
{
name: 'service[push_channel]',
- placeholder: 'Slack channels (e.g. general, development)',
+ placeholder: 'general, development',
},
{
name: 'service[merge_request_channel]',
- placeholder: 'Slack channels (e.g. general, development)',
+ placeholder: 'general, development',
},
];
diff --git a/spec/frontend/integrations/index/components/integrations_list_spec.js b/spec/frontend/integrations/index/components/integrations_list_spec.js
new file mode 100644
index 00000000000..94fd7fc84ee
--- /dev/null
+++ b/spec/frontend/integrations/index/components/integrations_list_spec.js
@@ -0,0 +1,26 @@
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import IntegrationsList from '~/integrations/index/components/integrations_list.vue';
+import { mockActiveIntegrations, mockInactiveIntegrations } from '../mock_data';
+
+describe('IntegrationsList', () => {
+ let wrapper;
+
+ const findActiveIntegrationsTable = () => wrapper.findByTestId('active-integrations-table');
+ const findInactiveIntegrationsTable = () => wrapper.findByTestId('inactive-integrations-table');
+
+ const createComponent = (propsData = {}) => {
+ wrapper = extendedWrapper(shallowMount(IntegrationsList, { propsData }));
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('provides correct `integrations` prop to the IntegrationsTable instance', () => {
+ createComponent({ integrations: [...mockInactiveIntegrations, ...mockActiveIntegrations] });
+
+ expect(findActiveIntegrationsTable().props('integrations')).toEqual(mockActiveIntegrations);
+ expect(findInactiveIntegrationsTable().props('integrations')).toEqual(mockInactiveIntegrations);
+ });
+});
diff --git a/spec/frontend/integrations/index/components/integrations_table_spec.js b/spec/frontend/integrations/index/components/integrations_table_spec.js
new file mode 100644
index 00000000000..bfe0a5987b4
--- /dev/null
+++ b/spec/frontend/integrations/index/components/integrations_table_spec.js
@@ -0,0 +1,53 @@
+import { GlTable, GlIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import IntegrationsTable from '~/integrations/index/components/integrations_table.vue';
+import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
+
+import { mockActiveIntegrations, mockInactiveIntegrations } from '../mock_data';
+
+describe('IntegrationsTable', () => {
+ let wrapper;
+
+ const findTable = () => wrapper.findComponent(GlTable);
+
+ const createComponent = (propsData = {}) => {
+ wrapper = mount(IntegrationsTable, {
+ propsData: {
+ integrations: mockActiveIntegrations,
+ ...propsData,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each([true, false])('when `showUpdatedAt` is %p', (showUpdatedAt) => {
+ beforeEach(() => {
+ createComponent({ showUpdatedAt });
+ });
+
+ it(`${showUpdatedAt ? 'renders' : 'does not render'} content in "Last updated" column`, () => {
+ const headers = findTable().findAll('th');
+ expect(headers.wrappers.some((header) => header.text() === 'Last updated')).toBe(
+ showUpdatedAt,
+ );
+ expect(wrapper.findComponent(TimeAgoTooltip).exists()).toBe(showUpdatedAt);
+ });
+ });
+
+ describe.each`
+ scenario | integrations | shouldRenderActiveIcon
+ ${'when integration is active'} | ${[mockActiveIntegrations[0]]} | ${true}
+ ${'when integration is inactive'} | ${[mockInactiveIntegrations[0]]} | ${false}
+ `('$scenario', ({ shouldRenderActiveIcon, integrations }) => {
+ beforeEach(() => {
+ createComponent({ integrations });
+ });
+
+ it(`${shouldRenderActiveIcon ? 'renders' : 'does not render'} icon in first column`, () => {
+ expect(findTable().findComponent(GlIcon).exists()).toBe(shouldRenderActiveIcon);
+ });
+ });
+});
diff --git a/spec/frontend/integrations/index/mock_data.js b/spec/frontend/integrations/index/mock_data.js
new file mode 100644
index 00000000000..2231687d255
--- /dev/null
+++ b/spec/frontend/integrations/index/mock_data.js
@@ -0,0 +1,50 @@
+export const mockActiveIntegrations = [
+ {
+ active: true,
+ title: 'Asana',
+ description: 'Asana - Teamwork without email',
+ updated_at: '2021-03-18T00:27:09.634Z',
+ edit_path:
+ '/gitlab-qa-sandbox-group/project_with_jenkins_6a55a67c-57c6ed0597c9319a/-/services/asana/edit',
+ name: 'asana',
+ },
+ {
+ active: true,
+ title: 'Jira',
+ description: 'Jira issue tracker',
+ updated_at: '2021-01-29T06:41:25.806Z',
+ edit_path:
+ '/gitlab-qa-sandbox-group/project_with_jenkins_6a55a67c-57c6ed0597c9319a/-/services/jira/edit',
+ name: 'jira',
+ },
+];
+
+export const mockInactiveIntegrations = [
+ {
+ active: false,
+ title: 'Webex Teams',
+ description: 'Receive event notifications in Webex Teams',
+ updated_at: null,
+ edit_path:
+ '/gitlab-qa-sandbox-group/project_with_jenkins_6a55a67c-57c6ed0597c9319a/-/services/webex_teams/edit',
+ name: 'webex_teams',
+ },
+ {
+ active: false,
+ title: 'YouTrack',
+ description: 'YouTrack issue tracker',
+ updated_at: null,
+ edit_path:
+ '/gitlab-qa-sandbox-group/project_with_jenkins_6a55a67c-57c6ed0597c9319a/-/services/youtrack/edit',
+ name: 'youtrack',
+ },
+ {
+ active: false,
+ title: 'Atlassian Bamboo CI',
+ description: 'A continuous integration and build server',
+ updated_at: null,
+ edit_path:
+ '/gitlab-qa-sandbox-group/project_with_jenkins_6a55a67c-57c6ed0597c9319a/-/services/bamboo/edit',
+ name: 'bamboo',
+ },
+];
diff --git a/spec/frontend/invite_member/components/invite_member_modal_spec.js b/spec/frontend/invite_member/components/invite_member_modal_spec.js
index 4eff19402a8..03e3da2d5ef 100644
--- a/spec/frontend/invite_member/components/invite_member_modal_spec.js
+++ b/spec/frontend/invite_member/components/invite_member_modal_spec.js
@@ -9,7 +9,7 @@ const memberPath = 'member_path';
const GlEmoji = { template: '<img />' };
const createComponent = () => {
return shallowMount(InviteMemberModal, {
- provide: {
+ propsData: {
membersPath: memberPath,
},
stubs: {
diff --git a/spec/frontend/invite_member/components/invite_member_trigger_spec.js b/spec/frontend/invite_member/components/invite_member_trigger_spec.js
index 67c312fd155..630e2dbfc16 100644
--- a/spec/frontend/invite_member/components/invite_member_trigger_spec.js
+++ b/spec/frontend/invite_member/components/invite_member_trigger_spec.js
@@ -5,7 +5,7 @@ import InviteMemberTrigger from '~/invite_member/components/invite_member_trigge
import triggerProvides from './invite_member_trigger_mock_data';
const createComponent = () => {
- return shallowMount(InviteMemberTrigger, { provide: triggerProvides });
+ return shallowMount(InviteMemberTrigger, { propsData: triggerProvides });
};
describe('InviteMemberTrigger', () => {
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 5ca5d855038..7ed18775693 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -3,7 +3,11 @@ import { shallowMount } from '@vue/test-utils';
import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
+import ExperimentTracking from '~/experimentation/experiment_tracking';
import InviteMembersModal from '~/invite_members/components/invite_members_modal.vue';
+import { INVITE_MEMBERS_IN_COMMENT } from '~/invite_members/constants';
+
+jest.mock('~/experimentation/experiment_tracking');
const id = '1';
const name = 'test name';
@@ -89,7 +93,7 @@ describe('InviteMembersModal', () => {
});
it('renders the modal with the correct title', () => {
- expect(wrapper.findComponent(GlModal).props('title')).toBe('Invite team members');
+ expect(wrapper.findComponent(GlModal).props('title')).toBe('Invite members');
});
it('renders the Cancel button text correctly', () => {
@@ -303,6 +307,7 @@ describe('InviteMembersModal', () => {
jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData });
jest.spyOn(Api, 'addGroupMembersByUserId').mockResolvedValue({ data: postData });
jest.spyOn(wrapper.vm, 'showToastMessageSuccess');
+ jest.spyOn(wrapper.vm, 'trackInvite');
clickInviteButton();
});
@@ -396,5 +401,46 @@ describe('InviteMembersModal', () => {
});
});
});
+
+ describe('tracking', () => {
+ const postData = {
+ user_id: '1',
+ access_level: defaultAccessLevel,
+ expires_at: undefined,
+ format: 'json',
+ };
+
+ beforeEach(() => {
+ wrapper = createComponent({ newUsersToInvite: [user3] });
+
+ wrapper.vm.$toast = { show: jest.fn() };
+ jest.spyOn(Api, 'inviteGroupMembersByEmail').mockResolvedValue({ data: postData });
+ });
+
+ it('tracks the invite', () => {
+ wrapper.vm.openModal({ inviteeType: 'members', source: INVITE_MEMBERS_IN_COMMENT });
+
+ clickInviteButton();
+
+ expect(ExperimentTracking).toHaveBeenCalledWith(INVITE_MEMBERS_IN_COMMENT);
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('comment_invite_success');
+ });
+
+ it('does not track invite for unknown source', () => {
+ wrapper.vm.openModal({ inviteeType: 'members', source: 'unknown' });
+
+ clickInviteButton();
+
+ expect(ExperimentTracking).not.toHaveBeenCalled();
+ });
+
+ it('does not track invite undefined source', () => {
+ wrapper.vm.openModal({ inviteeType: 'members' });
+
+ clickInviteButton();
+
+ expect(ExperimentTracking).not.toHaveBeenCalled();
+ });
+ });
});
});
diff --git a/spec/frontend/invite_members/components/invite_members_trigger_spec.js b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
index f362aace1df..b569b6286e0 100644
--- a/spec/frontend/invite_members/components/invite_members_trigger_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_trigger_spec.js
@@ -1,35 +1,99 @@
-import { GlButton } from '@gitlab/ui';
+import { GlButton, GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import ExperimentTracking from '~/experimentation/experiment_tracking';
import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
+import eventHub from '~/invite_members/event_hub';
+
+jest.mock('~/experimentation/experiment_tracking');
const displayText = 'Invite team members';
+let wrapper;
+let triggerProps;
+let findButton;
+const triggerComponent = {
+ button: GlButton,
+ anchor: GlLink,
+};
const createComponent = (props = {}) => {
- return shallowMount(InviteMembersTrigger, {
+ wrapper = shallowMount(InviteMembersTrigger, {
propsData: {
displayText,
+ ...triggerProps,
...props,
},
});
};
-describe('InviteMembersTrigger', () => {
- let wrapper;
+describe.each(['button', 'anchor'])('with triggerElement as %s', (triggerElement) => {
+ triggerProps = { triggerElement };
+ findButton = () => wrapper.findComponent(triggerComponent[triggerElement]);
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('displayText', () => {
- const findButton = () => wrapper.findComponent(GlButton);
+ it('includes the correct displayText for the button', () => {
+ createComponent();
+
+ expect(findButton().text()).toBe(displayText);
+ });
+ });
+
+ describe('clicking the link', () => {
+ let spy;
beforeEach(() => {
- wrapper = createComponent();
+ spy = jest.spyOn(eventHub, '$emit');
});
- it('includes the correct displayText for the button', () => {
- expect(findButton().text()).toBe(displayText);
+ it('emits openModal from an unknown source', () => {
+ createComponent();
+
+ findButton().vm.$emit('click');
+
+ expect(spy).toHaveBeenCalledWith('openModal', { inviteeType: 'members', source: 'unknown' });
+ });
+
+ it('emits openModal from a named source', () => {
+ createComponent({ triggerSource: '_trigger_source_' });
+
+ findButton().vm.$emit('click');
+
+ expect(spy).toHaveBeenCalledWith('openModal', {
+ inviteeType: 'members',
+ source: '_trigger_source_',
+ });
+ });
+ });
+
+ describe('tracking', () => {
+ it('tracks on mounting', () => {
+ createComponent({ trackExperiment: '_track_experiment_' });
+
+ expect(ExperimentTracking).toHaveBeenCalledWith('_track_experiment_');
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('comment_invite_shown');
+ });
+
+ it('does not track on mounting', () => {
+ createComponent();
+
+ expect(ExperimentTracking).not.toHaveBeenCalledWith('_track_experiment_');
+ });
+
+ it('does not add tracking attributes', () => {
+ createComponent();
+
+ expect(findButton().attributes('data-track-event')).toBeUndefined();
+ expect(findButton().attributes('data-track-label')).toBeUndefined();
+ });
+
+ it('adds tracking attributes', () => {
+ createComponent({ label: '_label_', event: '_event_' });
+
+ expect(findButton().attributes('data-track-event')).toBe('_event_');
+ expect(findButton().attributes('data-track-label')).toBe('_label_');
});
});
});
diff --git a/spec/frontend/issuable/components/csv_export_modal_spec.js b/spec/frontend/issuable/components/csv_export_modal_spec.js
index f46b6f72f05..a327da2d63a 100644
--- a/spec/frontend/issuable/components/csv_export_modal_spec.js
+++ b/spec/frontend/issuable/components/csv_export_modal_spec.js
@@ -58,14 +58,14 @@ describe('CsvExportModal', () => {
describe('issuable count info text', () => {
it('displays the info text when issuableCount is > -1', () => {
- wrapper = createComponent({ injectedProperties: { issuableCount: 10 } });
+ wrapper = createComponent({ props: { issuableCount: 10 } });
expect(wrapper.findByTestId('issuable-count-note').exists()).toBe(true);
expect(wrapper.findByTestId('issuable-count-note').text()).toContain('10 issues selected');
expect(findIcon().exists()).toBe(true);
});
it("doesn't display the info text when issuableCount is -1", () => {
- wrapper = createComponent({ injectedProperties: { issuableCount: -1 } });
+ wrapper = createComponent({ props: { issuableCount: -1 } });
expect(wrapper.findByTestId('issuable-count-note').exists()).toBe(false);
});
});
@@ -83,7 +83,7 @@ describe('CsvExportModal', () => {
describe('primary button', () => {
it('passes the exportCsvPath to the button', () => {
const exportCsvPath = '/gitlab-org/gitlab-test/-/issues/export_csv';
- wrapper = createComponent({ injectedProperties: { exportCsvPath } });
+ wrapper = createComponent({ props: { exportCsvPath } });
expect(findButton().attributes('href')).toBe(exportCsvPath);
});
});
diff --git a/spec/frontend/issuable/components/csv_import_export_buttons_spec.js b/spec/frontend/issuable/components/csv_import_export_buttons_spec.js
index e32bf35b13a..2fe8d28a333 100644
--- a/spec/frontend/issuable/components/csv_import_export_buttons_spec.js
+++ b/spec/frontend/issuable/components/csv_import_export_buttons_spec.js
@@ -9,6 +9,9 @@ describe('CsvImportExportButtons', () => {
let wrapper;
let glModalDirective;
+ const exportCsvPath = '/gitlab-org/gitlab-test/-/issues/export_csv';
+ const issuableCount = 10;
+
function createComponent(injectedProperties = {}) {
glModalDirective = jest.fn();
return extendedWrapper(
@@ -24,6 +27,10 @@ describe('CsvImportExportButtons', () => {
provide: {
...injectedProperties,
},
+ propsData: {
+ exportCsvPath,
+ issuableCount,
+ },
}),
);
}
@@ -57,7 +64,7 @@ describe('CsvImportExportButtons', () => {
});
it('renders the export modal', () => {
- expect(findExportCsvModal().exists()).toBe(true);
+ expect(findExportCsvModal().props()).toMatchObject({ exportCsvPath, issuableCount });
});
it('opens the export modal', () => {
diff --git a/spec/frontend/issuable_list/components/issuable_list_root_spec.js b/spec/frontend/issuable_list/components/issuable_list_root_spec.js
index 9c57233548c..38d6d6d86bc 100644
--- a/spec/frontend/issuable_list/components/issuable_list_root_spec.js
+++ b/spec/frontend/issuable_list/components/issuable_list_root_spec.js
@@ -1,5 +1,6 @@
import { GlSkeletonLoading, GlPagination } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
+import VueDraggable from 'vuedraggable';
import { TEST_HOST } from 'helpers/test_constants';
@@ -11,7 +12,7 @@ import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filte
import { mockIssuableListProps, mockIssuables } from '../mock_data';
const createComponent = ({ props = mockIssuableListProps, data = {} } = {}) =>
- mount(IssuableListRoot, {
+ shallowMount(IssuableListRoot, {
propsData: props,
data() {
return data;
@@ -24,20 +25,29 @@ const createComponent = ({ props = mockIssuableListProps, data = {} } = {}) =>
<p class="js-issuable-empty-state">Issuable empty state</p>
`,
},
+ stubs: {
+ IssuableTabs,
+ },
});
describe('IssuableListRoot', () => {
let wrapper;
- beforeEach(() => {
- wrapper = createComponent();
- });
+ const findFilteredSearchBar = () => wrapper.findComponent(FilteredSearchBar);
+ const findGlPagination = () => wrapper.findComponent(GlPagination);
+ const findIssuableItem = () => wrapper.findComponent(IssuableItem);
+ const findIssuableTabs = () => wrapper.findComponent(IssuableTabs);
+ const findVueDraggable = () => wrapper.findComponent(VueDraggable);
afterEach(() => {
wrapper.destroy();
});
describe('computed', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
const mockCheckedIssuables = {
[mockIssuables[0].iid]: { checked: true, issuable: mockIssuables[0] },
[mockIssuables[1].iid]: { checked: true, issuable: mockIssuables[1] },
@@ -108,6 +118,10 @@ describe('IssuableListRoot', () => {
});
describe('watch', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
describe('issuables', () => {
it('populates `checkedIssuables` prop with all issuables', async () => {
wrapper.setProps({
@@ -147,6 +161,10 @@ describe('IssuableListRoot', () => {
});
describe('methods', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
describe('issuableId', () => {
it('returns id value from provided issuable object', () => {
expect(wrapper.vm.issuableId({ id: 1 })).toBe(1);
@@ -171,12 +189,16 @@ describe('IssuableListRoot', () => {
});
describe('template', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
it('renders component container element with class "issuable-list-container"', () => {
expect(wrapper.classes()).toContain('issuable-list-container');
});
it('renders issuable-tabs component', () => {
- const tabsEl = wrapper.find(IssuableTabs);
+ const tabsEl = findIssuableTabs();
expect(tabsEl.exists()).toBe(true);
expect(tabsEl.props()).toMatchObject({
@@ -187,14 +209,14 @@ describe('IssuableListRoot', () => {
});
it('renders contents for slot "nav-actions" within issuable-tab component', () => {
- const buttonEl = wrapper.find(IssuableTabs).find('button.js-new-issuable');
+ const buttonEl = findIssuableTabs().find('button.js-new-issuable');
expect(buttonEl.exists()).toBe(true);
expect(buttonEl.text()).toBe('New issuable');
});
it('renders filtered-search-bar component', () => {
- const searchEl = wrapper.find(FilteredSearchBar);
+ const searchEl = findFilteredSearchBar();
const {
namespace,
recentSearchesStorageKey,
@@ -224,11 +246,13 @@ describe('IssuableListRoot', () => {
await wrapper.vm.$nextTick();
- expect(wrapper.findAll(GlSkeletonLoading)).toHaveLength(wrapper.vm.skeletonItemCount);
+ expect(wrapper.findAllComponents(GlSkeletonLoading)).toHaveLength(
+ wrapper.vm.skeletonItemCount,
+ );
});
it('renders issuable-item component for each item within `issuables` array', () => {
- const itemsEl = wrapper.findAll(IssuableItem);
+ const itemsEl = wrapper.findAllComponents(IssuableItem);
const mockIssuable = mockIssuableListProps.issuables[0];
expect(itemsEl).toHaveLength(mockIssuableListProps.issuables.length);
@@ -257,7 +281,7 @@ describe('IssuableListRoot', () => {
await wrapper.vm.$nextTick();
- const paginationEl = wrapper.find(GlPagination);
+ const paginationEl = findGlPagination();
expect(paginationEl.exists()).toBe(true);
expect(paginationEl.props()).toMatchObject({
perPage: 20,
@@ -271,10 +295,8 @@ describe('IssuableListRoot', () => {
});
describe('events', () => {
- let wrapperChecked;
-
beforeEach(() => {
- wrapperChecked = createComponent({
+ wrapper = createComponent({
data: {
checkedIssuables: {
[mockIssuables[0].iid]: { checked: true, issuable: mockIssuables[0] },
@@ -283,34 +305,30 @@ describe('IssuableListRoot', () => {
});
});
- afterEach(() => {
- wrapperChecked.destroy();
- });
-
it('issuable-tabs component emits `click-tab` event on `click-tab` event', () => {
- wrapper.find(IssuableTabs).vm.$emit('click');
+ findIssuableTabs().vm.$emit('click');
expect(wrapper.emitted('click-tab')).toBeTruthy();
});
it('sets all issuables as checked when filtered-search-bar component emits `checked-input` event', async () => {
- const searchEl = wrapperChecked.find(FilteredSearchBar);
+ const searchEl = findFilteredSearchBar();
searchEl.vm.$emit('checked-input', true);
- await wrapperChecked.vm.$nextTick();
+ await wrapper.vm.$nextTick();
expect(searchEl.emitted('checked-input')).toBeTruthy();
expect(searchEl.emitted('checked-input').length).toBe(1);
- expect(wrapperChecked.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({
+ expect(wrapper.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({
checked: true,
issuable: mockIssuables[0],
});
});
it('filtered-search-bar component emits `filter` event on `onFilter` & `sort` event on `onSort` events', () => {
- const searchEl = wrapper.find(FilteredSearchBar);
+ const searchEl = findFilteredSearchBar();
searchEl.vm.$emit('onFilter');
expect(wrapper.emitted('filter')).toBeTruthy();
@@ -319,21 +337,33 @@ describe('IssuableListRoot', () => {
});
it('sets an issuable as checked when issuable-item component emits `checked-input` event', async () => {
- const issuableItem = wrapperChecked.findAll(IssuableItem).at(0);
+ const issuableItem = wrapper.findAllComponents(IssuableItem).at(0);
issuableItem.vm.$emit('checked-input', true);
- await wrapperChecked.vm.$nextTick();
+ await wrapper.vm.$nextTick();
expect(issuableItem.emitted('checked-input')).toBeTruthy();
expect(issuableItem.emitted('checked-input').length).toBe(1);
- expect(wrapperChecked.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({
+ expect(wrapper.vm.checkedIssuables[mockIssuables[0].iid]).toEqual({
checked: true,
issuable: mockIssuables[0],
});
});
+ it('emits `update-legacy-bulk-edit` when filtered-search-bar checkbox is checked', () => {
+ findFilteredSearchBar().vm.$emit('checked-input');
+
+ expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]);
+ });
+
+ it('emits `update-legacy-bulk-edit` when issuable-item checkbox is checked', () => {
+ findIssuableItem().vm.$emit('checked-input');
+
+ expect(wrapper.emitted('update-legacy-bulk-edit')).toEqual([[]]);
+ });
+
it('gl-pagination component emits `page-change` event on `input` event', async () => {
wrapper.setProps({
showPaginationControls: true,
@@ -341,8 +371,48 @@ describe('IssuableListRoot', () => {
await wrapper.vm.$nextTick();
- wrapper.find(GlPagination).vm.$emit('input');
+ findGlPagination().vm.$emit('input');
expect(wrapper.emitted('page-change')).toBeTruthy();
});
});
+
+ describe('manual sorting', () => {
+ describe('when enabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent({
+ props: {
+ ...mockIssuableListProps,
+ isManualOrdering: true,
+ },
+ });
+ });
+
+ it('renders VueDraggable component', () => {
+ expect(findVueDraggable().exists()).toBe(true);
+ });
+
+ it('IssuableItem has grab cursor', () => {
+ expect(findIssuableItem().classes()).toContain('gl-cursor-grab');
+ });
+
+ it('emits a "reorder" event when user updates the issue order', () => {
+ const oldIndex = 4;
+ const newIndex = 6;
+
+ findVueDraggable().vm.$emit('update', { oldIndex, newIndex });
+
+ expect(wrapper.emitted('reorder')).toEqual([[{ oldIndex, newIndex }]]);
+ });
+ });
+
+ describe('when disabled', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ it('does not render VueDraggable component', () => {
+ expect(findVueDraggable().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/issuable_list/components/issuable_tabs_spec.js b/spec/frontend/issuable_list/components/issuable_tabs_spec.js
index 3cc237b9ce9..cbf5765078a 100644
--- a/spec/frontend/issuable_list/components/issuable_tabs_spec.js
+++ b/spec/frontend/issuable_list/components/issuable_tabs_spec.js
@@ -34,6 +34,9 @@ describe('IssuableTabs', () => {
wrapper.destroy();
});
+ const findAllGlBadges = () => wrapper.findAllComponents(GlBadge);
+ const findAllGlTabs = () => wrapper.findAllComponents(GlTab);
+
describe('methods', () => {
describe('isTabActive', () => {
it.each`
@@ -57,17 +60,19 @@ describe('IssuableTabs', () => {
describe('template', () => {
it('renders gl-tab for each tab within `tabs` array', () => {
- const tabsEl = wrapper.findAll(GlTab);
+ const tabsEl = findAllGlTabs();
expect(tabsEl.exists()).toBe(true);
expect(tabsEl).toHaveLength(mockIssuableListProps.tabs.length);
});
it('renders gl-badge component within a tab', () => {
- const badgeEl = wrapper.findAll(GlBadge).at(0);
+ const badges = findAllGlBadges();
- expect(badgeEl.exists()).toBe(true);
- expect(badgeEl.text()).toBe(`${mockIssuableListProps.tabCounts.opened}`);
+ // Does not render `All` badge since it has an undefined count
+ expect(badges).toHaveLength(2);
+ expect(badges.at(0).text()).toBe(`${mockIssuableListProps.tabCounts.opened}`);
+ expect(badges.at(1).text()).toBe(`${mockIssuableListProps.tabCounts.closed}`);
});
it('renders contents for slot "nav-actions"', () => {
@@ -80,7 +85,7 @@ describe('IssuableTabs', () => {
describe('events', () => {
it('gl-tab component emits `click` event on `click` event', () => {
- const tabEl = wrapper.findAll(GlTab).at(0);
+ const tabEl = findAllGlTabs().at(0);
tabEl.vm.$emit('click', 'opened');
diff --git a/spec/frontend/issuable_list/mock_data.js b/spec/frontend/issuable_list/mock_data.js
index 33ffd60bf95..e2fa99f7cc9 100644
--- a/spec/frontend/issuable_list/mock_data.js
+++ b/spec/frontend/issuable_list/mock_data.js
@@ -135,7 +135,7 @@ export const mockTabs = [
export const mockTabCounts = {
opened: 5,
closed: 0,
- all: 5,
+ all: undefined,
};
export const mockIssuableListProps = {
diff --git a/spec/frontend/issuable_show/mock_data.js b/spec/frontend/issuable_show/mock_data.js
index 9ecff705617..986d32b4982 100644
--- a/spec/frontend/issuable_show/mock_data.js
+++ b/spec/frontend/issuable_show/mock_data.js
@@ -32,6 +32,7 @@ export const mockIssuableShowProps = {
editFormVisible: false,
enableAutocomplete: true,
enableAutosave: true,
+ enableZenMode: true,
enableTaskList: true,
enableEdit: true,
showFieldTitle: false,
diff --git a/spec/frontend/issuable_type_selector/components/__snapshots__/info_popover_spec.js.snap b/spec/frontend/issuable_type_selector/components/__snapshots__/info_popover_spec.js.snap
new file mode 100644
index 00000000000..196fbb8a643
--- /dev/null
+++ b/spec/frontend/issuable_type_selector/components/__snapshots__/info_popover_spec.js.snap
@@ -0,0 +1,52 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Issuable type info popover renders 1`] = `
+<span
+ id="popovercontainer"
+>
+ <gl-icon-stub
+ class="gl-ml-5 gl-text-gray-500"
+ id="issuable-type-info"
+ name="question-o"
+ size="16"
+ />
+
+ <gl-popover-stub
+ container="popovercontainer"
+ cssclasses=""
+ target="issuable-type-info"
+ title="Issue types"
+ triggers="focus hover"
+ >
+ <ul
+ class="gl-list-style-none gl-p-0 gl-m-0"
+ >
+ <li
+ class="gl-mb-3"
+ >
+ <div
+ class="gl-font-weight-bold"
+ >
+ Issue
+ </div>
+
+ <span>
+ For general work
+ </span>
+ </li>
+
+ <li>
+ <div
+ class="gl-font-weight-bold"
+ >
+ Incident
+ </div>
+
+ <span>
+ For investigating IT service disruptions or outages
+ </span>
+ </li>
+ </ul>
+ </gl-popover-stub>
+</span>
+`;
diff --git a/spec/frontend/issuable_type_selector/components/info_popover_spec.js b/spec/frontend/issuable_type_selector/components/info_popover_spec.js
new file mode 100644
index 00000000000..975977ffeb3
--- /dev/null
+++ b/spec/frontend/issuable_type_selector/components/info_popover_spec.js
@@ -0,0 +1,20 @@
+import { shallowMount } from '@vue/test-utils';
+import InfoPopover from '~/issuable_type_selector/components/info_popover.vue';
+
+describe('Issuable type info popover', () => {
+ let wrapper;
+
+ function createComponent() {
+ wrapper = shallowMount(InfoPopover);
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/issue_show/components/edit_actions_spec.js b/spec/frontend/issue_show/components/edit_actions_spec.js
index 6a00eec4b1f..54707879f63 100644
--- a/spec/frontend/issue_show/components/edit_actions_spec.js
+++ b/spec/frontend/issue_show/components/edit_actions_spec.js
@@ -48,7 +48,7 @@ describe('Edit Actions components', () => {
vm.formState.title = '';
Vue.nextTick(() => {
- expect(vm.$el.querySelector('.btn-success').getAttribute('disabled')).toBe('disabled');
+ expect(vm.$el.querySelector('.btn-confirm').getAttribute('disabled')).toBe('disabled');
done();
});
@@ -65,16 +65,16 @@ describe('Edit Actions components', () => {
describe('updateIssuable', () => {
it('sends update.issauble event when clicking save button', () => {
- vm.$el.querySelector('.btn-success').click();
+ vm.$el.querySelector('.btn-confirm').click();
expect(eventHub.$emit).toHaveBeenCalledWith('update.issuable');
});
it('disabled button after clicking save button', (done) => {
- vm.$el.querySelector('.btn-success').click();
+ vm.$el.querySelector('.btn-confirm').click();
Vue.nextTick(() => {
- expect(vm.$el.querySelector('.btn-success').getAttribute('disabled')).toBe('disabled');
+ expect(vm.$el.querySelector('.btn-confirm').getAttribute('disabled')).toBe('disabled');
done();
});
diff --git a/spec/frontend/issues_list/components/issues_list_app_spec.js b/spec/frontend/issues_list/components/issues_list_app_spec.js
index 1053e8934c9..476804bda12 100644
--- a/spec/frontend/issues_list/components/issues_list_app_spec.js
+++ b/spec/frontend/issues_list/components/issues_list_app_spec.js
@@ -1,19 +1,59 @@
-import { shallowMount } from '@vue/test-utils';
+import { GlButton, GlEmptyState, GlLink } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import CsvImportExportButtons from '~/issuable/components/csv_import_export_buttons.vue';
import IssuableList from '~/issuable_list/components/issuable_list_root.vue';
+import { IssuableListTabs, IssuableStates } from '~/issuable_list/constants';
import IssuesListApp from '~/issues_list/components/issues_list_app.vue';
+
+import {
+ CREATED_DESC,
+ PAGE_SIZE,
+ PAGE_SIZE_MANUAL,
+ RELATIVE_POSITION_ASC,
+ sortOptions,
+ sortParams,
+} from '~/issues_list/constants';
+import eventHub from '~/issues_list/eventhub';
import axios from '~/lib/utils/axios_utils';
+import { setUrlParams } from '~/lib/utils/url_utility';
+
+jest.mock('~/flash');
describe('IssuesListApp component', () => {
+ const originalWindowLocation = window.location;
let axiosMock;
let wrapper;
- const fullPath = 'path/to/project';
- const endpoint = 'api/endpoint';
+ const defaultProvide = {
+ calendarPath: 'calendar/path',
+ canBulkUpdate: false,
+ emptyStateSvgPath: 'empty-state.svg',
+ endpoint: 'api/endpoint',
+ exportCsvPath: 'export/csv/path',
+ fullPath: 'path/to/project',
+ hasIssues: true,
+ isSignedIn: false,
+ issuesPath: 'path/to/issues',
+ jiraIntegrationPath: 'jira/integration/path',
+ newIssuePath: 'new/issue/path',
+ rssPath: 'rss/path',
+ showImportButton: true,
+ showNewIssueLink: true,
+ signInPath: 'sign/in/path',
+ };
+
const state = 'opened';
const xPage = 1;
const xTotal = 25;
+ const tabCounts = {
+ opened: xTotal,
+ closed: undefined,
+ all: undefined,
+ };
const fetchIssuesResponse = {
data: [],
headers: {
@@ -22,76 +62,484 @@ describe('IssuesListApp component', () => {
},
};
+ const findCsvImportExportButtons = () => wrapper.findComponent(CsvImportExportButtons);
+ const findGlButton = () => wrapper.findComponent(GlButton);
+ const findGlButtons = () => wrapper.findAllComponents(GlButton);
+ const findGlButtonAt = (index) => findGlButtons().at(index);
+ const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findGlLink = () => wrapper.findComponent(GlLink);
const findIssuableList = () => wrapper.findComponent(IssuableList);
- const mountComponent = () =>
- shallowMount(IssuesListApp, {
+ const mountComponent = ({ provide = {}, mountFn = shallowMount } = {}) =>
+ mountFn(IssuesListApp, {
provide: {
- endpoint,
- fullPath,
+ ...defaultProvide,
+ ...provide,
},
});
- beforeEach(async () => {
+ beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
- axiosMock.onGet(endpoint).reply(200, fetchIssuesResponse.data, fetchIssuesResponse.headers);
- wrapper = mountComponent();
- await waitForPromises();
+ axiosMock
+ .onGet(defaultProvide.endpoint)
+ .reply(200, fetchIssuesResponse.data, fetchIssuesResponse.headers);
});
afterEach(() => {
+ window.location = originalWindowLocation;
axiosMock.reset();
wrapper.destroy();
});
- it('renders IssuableList', () => {
- expect(findIssuableList().props()).toMatchObject({
- namespace: fullPath,
- recentSearchesStorageKey: 'issues',
- searchInputPlaceholder: 'Search or filter results…',
- showPaginationControls: true,
- issuables: [],
- totalItems: xTotal,
- currentPage: xPage,
- previousPage: xPage - 1,
- nextPage: xPage + 1,
- urlParams: { page: xPage, state },
+ describe('IssuableList', () => {
+ beforeEach(async () => {
+ wrapper = mountComponent();
+ await waitForPromises();
+ });
+
+ it('renders', () => {
+ expect(findIssuableList().props()).toMatchObject({
+ namespace: defaultProvide.fullPath,
+ recentSearchesStorageKey: 'issues',
+ searchInputPlaceholder: 'Search or filter results…',
+ sortOptions,
+ initialSortBy: CREATED_DESC,
+ tabs: IssuableListTabs,
+ currentTab: IssuableStates.Opened,
+ tabCounts,
+ showPaginationControls: false,
+ issuables: [],
+ totalItems: xTotal,
+ currentPage: xPage,
+ previousPage: xPage - 1,
+ nextPage: xPage + 1,
+ urlParams: { page: xPage, state },
+ });
});
});
- describe('when "page-change" event is emitted', () => {
- const data = [{ id: 10, title: 'title', state }];
- const page = 2;
- const totalItems = 21;
+ describe('header action buttons', () => {
+ it('renders rss button', () => {
+ wrapper = mountComponent();
- beforeEach(async () => {
- axiosMock.onGet(endpoint).reply(200, data, {
- 'x-page': page,
- 'x-total': totalItems,
+ expect(findGlButtonAt(0).attributes()).toMatchObject({
+ href: defaultProvide.rssPath,
+ icon: 'rss',
+ 'aria-label': IssuesListApp.i18n.rssLabel,
+ });
+ });
+
+ it('renders calendar button', () => {
+ wrapper = mountComponent();
+
+ expect(findGlButtonAt(1).attributes()).toMatchObject({
+ href: defaultProvide.calendarPath,
+ icon: 'calendar',
+ 'aria-label': IssuesListApp.i18n.calendarLabel,
+ });
+ });
+
+ it('renders csv import/export component', async () => {
+ const search = '?page=1&search=refactor';
+
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { search },
});
- findIssuableList().vm.$emit('page-change', page);
+ wrapper = mountComponent();
await waitForPromises();
+
+ expect(findCsvImportExportButtons().props()).toMatchObject({
+ exportCsvPath: `${defaultProvide.exportCsvPath}${search}`,
+ issuableCount: xTotal,
+ });
});
- it('fetches issues with expected params', async () => {
- expect(axiosMock.history.get[1].params).toEqual({
- page,
- per_page: 20,
- state,
- with_labels_details: true,
+ describe('bulk edit button', () => {
+ it('renders when user has permissions', () => {
+ wrapper = mountComponent({ provide: { canBulkUpdate: true } });
+
+ expect(findGlButtonAt(2).text()).toBe('Edit issues');
+ });
+
+ it('does not render when user does not have permissions', () => {
+ wrapper = mountComponent({ provide: { canBulkUpdate: false } });
+
+ expect(findGlButtons().filter((button) => button.text() === 'Edit issues')).toHaveLength(0);
+ });
+
+ it('emits "issuables:enableBulkEdit" event to legacy bulk edit class', () => {
+ wrapper = mountComponent({ provide: { canBulkUpdate: true } });
+
+ jest.spyOn(eventHub, '$emit');
+
+ findGlButtonAt(2).vm.$emit('click');
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('issuables:enableBulkEdit');
});
});
- it('updates IssuableList with response data', () => {
- expect(findIssuableList().props()).toMatchObject({
- issuables: data,
- totalItems,
- currentPage: page,
- previousPage: page - 1,
- nextPage: page + 1,
- urlParams: { page, state },
+ describe('new issue button', () => {
+ it('renders when user has permissions', () => {
+ wrapper = mountComponent({ provide: { showNewIssueLink: true } });
+
+ expect(findGlButtonAt(2).text()).toBe('New issue');
+ expect(findGlButtonAt(2).attributes('href')).toBe(defaultProvide.newIssuePath);
+ });
+
+ it('does not render when user does not have permissions', () => {
+ wrapper = mountComponent({ provide: { showNewIssueLink: false } });
+
+ expect(findGlButtons().filter((button) => button.text() === 'New issue')).toHaveLength(0);
+ });
+ });
+ });
+
+ describe('initial url params', () => {
+ describe('page', () => {
+ it('is set from the url params', () => {
+ const page = 5;
+
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: setUrlParams({ page }, TEST_HOST) },
+ });
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props('currentPage')).toBe(page);
+ });
+ });
+
+ describe('sort', () => {
+ it.each(Object.keys(sortParams))('is set as %s from the url params', (sortKey) => {
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: setUrlParams(sortParams[sortKey], TEST_HOST) },
+ });
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props()).toMatchObject({
+ initialSortBy: sortKey,
+ urlParams: sortParams[sortKey],
+ });
+ });
+ });
+
+ describe('state', () => {
+ it('is set from the url params', () => {
+ const initialState = IssuableStates.All;
+
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: setUrlParams({ state: initialState }, TEST_HOST) },
+ });
+
+ wrapper = mountComponent();
+
+ expect(findIssuableList().props('currentTab')).toBe(initialState);
+ });
+ });
+ });
+
+ describe('bulk edit', () => {
+ describe.each([true, false])(
+ 'when "issuables:toggleBulkEdit" event is received with payload `%s`',
+ (isBulkEdit) => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+
+ eventHub.$emit('issuables:toggleBulkEdit', isBulkEdit);
+ });
+
+ it(`${isBulkEdit ? 'enables' : 'disables'} bulk edit`, () => {
+ expect(findIssuableList().props('showBulkEditSidebar')).toBe(isBulkEdit);
+ });
+ },
+ );
+ });
+
+ describe('empty states', () => {
+ describe('when there are issues', () => {
+ describe('when search returns no results', () => {
+ beforeEach(async () => {
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: setUrlParams({ search: 'no results' }, TEST_HOST) },
+ });
+
+ wrapper = mountComponent({ provide: { hasIssues: true } });
+
+ await waitForPromises();
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ description: IssuesListApp.i18n.noSearchResultsDescription,
+ title: IssuesListApp.i18n.noSearchResultsTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ });
+ });
+ });
+
+ describe('when "Open" tab has no issues', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({ provide: { hasIssues: true } });
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ description: IssuesListApp.i18n.noOpenIssuesDescription,
+ title: IssuesListApp.i18n.noOpenIssuesTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ });
+ });
+ });
+
+ describe('when "Closed" tab has no issues', () => {
+ beforeEach(async () => {
+ Object.defineProperty(window, 'location', {
+ writable: true,
+ value: { href: setUrlParams({ state: IssuableStates.Closed }, TEST_HOST) },
+ });
+
+ wrapper = mountComponent({ provide: { hasIssues: true } });
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ title: IssuesListApp.i18n.noClosedIssuesTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ });
+ });
+ });
+ });
+
+ describe('when there are no issues', () => {
+ describe('when user is logged in', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ provide: { hasIssues: false, isSignedIn: true },
+ mountFn: mount,
+ });
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ description: IssuesListApp.i18n.noIssuesSignedInDescription,
+ title: IssuesListApp.i18n.noIssuesSignedInTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ });
+ });
+
+ it('shows "New issue" and import/export buttons', () => {
+ expect(findGlButton().text()).toBe(IssuesListApp.i18n.newIssueLabel);
+ expect(findGlButton().attributes('href')).toBe(defaultProvide.newIssuePath);
+ expect(findCsvImportExportButtons().props()).toMatchObject({
+ exportCsvPath: defaultProvide.exportCsvPath,
+ issuableCount: 0,
+ });
+ });
+
+ it('shows Jira integration information', () => {
+ const paragraphs = wrapper.findAll('p');
+ expect(paragraphs.at(2).text()).toContain(IssuesListApp.i18n.jiraIntegrationTitle);
+ expect(paragraphs.at(3).text()).toContain(
+ 'Enable the Jira integration to view your Jira issues in GitLab.',
+ );
+ expect(paragraphs.at(4).text()).toContain(
+ IssuesListApp.i18n.jiraIntegrationSecondaryMessage,
+ );
+ expect(findGlLink().text()).toBe('Enable the Jira integration');
+ expect(findGlLink().attributes('href')).toBe(defaultProvide.jiraIntegrationPath);
+ });
+ });
+
+ describe('when user is logged out', () => {
+ beforeEach(() => {
+ wrapper = mountComponent({
+ provide: { hasIssues: false, isSignedIn: false },
+ });
+ });
+
+ it('shows empty state', () => {
+ expect(findGlEmptyState().props()).toMatchObject({
+ description: IssuesListApp.i18n.noIssuesSignedOutDescription,
+ title: IssuesListApp.i18n.noIssuesSignedOutTitle,
+ svgPath: defaultProvide.emptyStateSvgPath,
+ primaryButtonText: IssuesListApp.i18n.noIssuesSignedOutButtonText,
+ primaryButtonLink: defaultProvide.signInPath,
+ });
+ });
+ });
+ });
+ });
+
+ describe('events', () => {
+ describe('when "click-tab" event is emitted by IssuableList', () => {
+ beforeEach(() => {
+ axiosMock.onGet(defaultProvide.endpoint).reply(200, fetchIssuesResponse.data, {
+ 'x-page': 2,
+ 'x-total': xTotal,
+ });
+
+ wrapper = mountComponent();
+
+ findIssuableList().vm.$emit('click-tab', IssuableStates.Closed);
+ });
+
+ it('makes API call to filter the list by the new state and resets the page to 1', () => {
+ expect(axiosMock.history.get[1].params).toMatchObject({
+ page: 1,
+ state: IssuableStates.Closed,
+ });
+ });
+ });
+
+ describe('when "page-change" event is emitted by IssuableList', () => {
+ const data = [{ id: 10, title: 'title', state }];
+ const page = 2;
+ const totalItems = 21;
+
+ beforeEach(async () => {
+ axiosMock.onGet(defaultProvide.endpoint).reply(200, data, {
+ 'x-page': page,
+ 'x-total': totalItems,
+ });
+
+ wrapper = mountComponent();
+
+ findIssuableList().vm.$emit('page-change', page);
+
+ await waitForPromises();
+ });
+
+ it('fetches issues with expected params', () => {
+ expect(axiosMock.history.get[1].params).toEqual({
+ page,
+ per_page: PAGE_SIZE,
+ state,
+ with_labels_details: true,
+ });
+ });
+
+ it('updates IssuableList with response data', () => {
+ expect(findIssuableList().props()).toMatchObject({
+ issuables: data,
+ totalItems,
+ currentPage: page,
+ previousPage: page - 1,
+ nextPage: page + 1,
+ urlParams: { page, state },
+ });
+ });
+ });
+
+ describe('when "reorder" event is emitted by IssuableList', () => {
+ const issueOne = { id: 1, iid: 101, title: 'Issue one' };
+ const issueTwo = { id: 2, iid: 102, title: 'Issue two' };
+ const issueThree = { id: 3, iid: 103, title: 'Issue three' };
+ const issueFour = { id: 4, iid: 104, title: 'Issue four' };
+ const issues = [issueOne, issueTwo, issueThree, issueFour];
+
+ beforeEach(async () => {
+ axiosMock.onGet(defaultProvide.endpoint).reply(200, issues, fetchIssuesResponse.headers);
+ wrapper = mountComponent();
+ await waitForPromises();
+ });
+
+ describe('when successful', () => {
+ describe.each`
+ description | issueToMove | oldIndex | newIndex | moveBeforeId | moveAfterId
+ ${'to the beginning of the list'} | ${issueThree} | ${2} | ${0} | ${null} | ${issueOne.id}
+ ${'down the list'} | ${issueOne} | ${0} | ${1} | ${issueTwo.id} | ${issueThree.id}
+ ${'up the list'} | ${issueThree} | ${2} | ${1} | ${issueOne.id} | ${issueTwo.id}
+ ${'to the end of the list'} | ${issueTwo} | ${1} | ${3} | ${issueFour.id} | ${null}
+ `(
+ 'when moving issue $description',
+ ({ issueToMove, oldIndex, newIndex, moveBeforeId, moveAfterId }) => {
+ it('makes API call to reorder the issue', async () => {
+ findIssuableList().vm.$emit('reorder', { oldIndex, newIndex });
+
+ await waitForPromises();
+
+ expect(axiosMock.history.put[0]).toMatchObject({
+ url: `${defaultProvide.issuesPath}/${issueToMove.iid}/reorder`,
+ data: JSON.stringify({ move_before_id: moveBeforeId, move_after_id: moveAfterId }),
+ });
+ });
+ },
+ );
+ });
+
+ describe('when unsuccessful', () => {
+ it('displays an error message', async () => {
+ axiosMock.onPut(`${defaultProvide.issuesPath}/${issueOne.iid}/reorder`).reply(500);
+
+ findIssuableList().vm.$emit('reorder', { oldIndex: 0, newIndex: 1 });
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({ message: IssuesListApp.i18n.reorderError });
+ });
+ });
+ });
+
+ describe('when "sort" event is emitted by IssuableList', () => {
+ it.each(Object.keys(sortParams))(
+ 'fetches issues with correct params with payload `%s`',
+ async (sortKey) => {
+ wrapper = mountComponent();
+
+ findIssuableList().vm.$emit('sort', sortKey);
+
+ await waitForPromises();
+
+ expect(axiosMock.history.get[1].params).toEqual({
+ page: xPage,
+ per_page: sortKey === RELATIVE_POSITION_ASC ? PAGE_SIZE_MANUAL : PAGE_SIZE,
+ state,
+ with_labels_details: true,
+ ...sortParams[sortKey],
+ });
+ },
+ );
+ });
+
+ describe('when "update-legacy-bulk-edit" event is emitted by IssuableList', () => {
+ beforeEach(() => {
+ wrapper = mountComponent();
+ jest.spyOn(eventHub, '$emit');
+ });
+
+ it('emits an "issuables:updateBulkEdit" event to the legacy bulk edit class', async () => {
+ findIssuableList().vm.$emit('update-legacy-bulk-edit');
+
+ await waitForPromises();
+
+ expect(eventHub.$emit).toHaveBeenCalledWith('issuables:updateBulkEdit');
+ });
+ });
+
+ describe('when "filter" event is emitted by IssuableList', () => {
+ beforeEach(async () => {
+ wrapper = mountComponent();
+
+ const payload = [
+ { type: 'filtered-search-term', value: { data: 'no' } },
+ { type: 'filtered-search-term', value: { data: 'issues' } },
+ ];
+
+ findIssuableList().vm.$emit('filter', payload);
+
+ await waitForPromises();
+ });
+
+ it('makes an API call to search for issues with the search term', () => {
+ expect(axiosMock.history.get[1].params).toMatchObject({ search: 'no issues' });
});
});
});
diff --git a/spec/frontend/jira_connect/api_spec.js b/spec/frontend/jira_connect/api_spec.js
index 240a57c7917..88922999715 100644
--- a/spec/frontend/jira_connect/api_spec.js
+++ b/spec/frontend/jira_connect/api_spec.js
@@ -1,8 +1,13 @@
import MockAdapter from 'axios-mock-adapter';
import { addSubscription, removeSubscription, fetchGroups } from '~/jira_connect/api';
+import { getJwt } from '~/jira_connect/utils';
import axios from '~/lib/utils/axios_utils';
import httpStatus from '~/lib/utils/http_status';
+jest.mock('~/jira_connect/utils', () => ({
+ getJwt: jest.fn().mockResolvedValue('jwt'),
+}));
+
describe('JiraConnect API', () => {
let mock;
let response;
@@ -13,14 +18,6 @@ describe('JiraConnect API', () => {
const mockJwt = 'jwt';
const mockResponse = { success: true };
- const tokenSpy = jest.fn((callback) => callback(mockJwt));
-
- window.AP = {
- context: {
- getToken: tokenSpy,
- },
- };
-
beforeEach(() => {
mock = new MockAdapter(axios);
});
@@ -44,7 +41,7 @@ describe('JiraConnect API', () => {
response = await makeRequest();
- expect(tokenSpy).toHaveBeenCalled();
+ expect(getJwt).toHaveBeenCalled();
expect(axios.post).toHaveBeenCalledWith(mockAddPath, {
jwt: mockJwt,
namespace_path: mockNamespace,
@@ -62,7 +59,7 @@ describe('JiraConnect API', () => {
response = await makeRequest();
- expect(tokenSpy).toHaveBeenCalled();
+ expect(getJwt).toHaveBeenCalled();
expect(axios.delete).toHaveBeenCalledWith(mockRemovePath, {
params: {
jwt: mockJwt,
diff --git a/spec/frontend/jira_connect/components/__snapshots__/group_item_name_spec.js.snap b/spec/frontend/jira_connect/components/__snapshots__/group_item_name_spec.js.snap
new file mode 100644
index 00000000000..21c903f064d
--- /dev/null
+++ b/spec/frontend/jira_connect/components/__snapshots__/group_item_name_spec.js.snap
@@ -0,0 +1,44 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`GroupItemName template matches the snapshot 1`] = `
+<div
+ class="gl-display-flex gl-align-items-center"
+>
+ <gl-icon-stub
+ class="gl-mr-3"
+ name="folder-o"
+ size="16"
+ />
+
+ <div
+ class="gl-display-none gl-flex-shrink-0 gl-sm-display-flex gl-mr-3"
+ >
+ <gl-avatar-stub
+ alt="avatar"
+ entityid="0"
+ entityname="Gitlab Org"
+ shape="rect"
+ size="32"
+ src="avatar.png"
+ />
+ </div>
+
+ <div>
+ <span
+ class="gl-mr-3 gl-text-gray-900! gl-font-weight-bold"
+ >
+
+ Gitlab Org
+
+ </span>
+
+ <div>
+ <p
+ class="gl-mt-2! gl-mb-0 gl-text-gray-600"
+ >
+ Open source software to collaborate on code
+ </p>
+ </div>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/jira_connect/components/app_spec.js b/spec/frontend/jira_connect/components/app_spec.js
index e2a5cd1be9d..e0d61d8209b 100644
--- a/spec/frontend/jira_connect/components/app_spec.js
+++ b/spec/frontend/jira_connect/components/app_spec.js
@@ -1,50 +1,39 @@
import { GlAlert, GlButton, GlModal, GlLink } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import JiraConnectApp from '~/jira_connect/components/app.vue';
import createStore from '~/jira_connect/store';
import { SET_ALERT } from '~/jira_connect/store/mutation_types';
-import { persistAlert } from '~/jira_connect/utils';
import { __ } from '~/locale';
-jest.mock('~/jira_connect/api');
+jest.mock('~/jira_connect/utils', () => ({
+ retrieveAlert: jest.fn().mockReturnValue({ message: 'error message' }),
+ getLocation: jest.fn(),
+}));
describe('JiraConnectApp', () => {
let wrapper;
let store;
const findAlert = () => wrapper.findComponent(GlAlert);
- const findAlertLink = () => findAlert().find(GlLink);
+ const findAlertLink = () => findAlert().findComponent(GlLink);
const findGlButton = () => wrapper.findComponent(GlButton);
const findGlModal = () => wrapper.findComponent(GlModal);
- const findHeader = () => wrapper.findByTestId('new-jira-connect-ui-heading');
- const findHeaderText = () => findHeader().text();
const createComponent = ({ provide, mountFn = shallowMount } = {}) => {
store = createStore();
- wrapper = extendedWrapper(
- mountFn(JiraConnectApp, {
- store,
- provide,
- }),
- );
+ wrapper = mountFn(JiraConnectApp, {
+ store,
+ provide,
+ });
};
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('template', () => {
- it('renders new UI', () => {
- createComponent();
-
- expect(findHeader().exists()).toBe(true);
- expect(findHeaderText()).toBe('Linked namespaces');
- });
-
describe('when user is not logged in', () => {
beforeEach(() => {
createComponent({
@@ -128,7 +117,6 @@ describe('JiraConnectApp', () => {
describe('when alert is set in localStoage', () => {
it('renders alert on mount', () => {
- persistAlert({ message: 'error message' });
createComponent();
const alert = findAlert();
diff --git a/spec/frontend/jira_connect/components/group_item_name_spec.js b/spec/frontend/jira_connect/components/group_item_name_spec.js
new file mode 100644
index 00000000000..ea0067f8ed1
--- /dev/null
+++ b/spec/frontend/jira_connect/components/group_item_name_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+
+import GroupItemName from '~/jira_connect/components/group_item_name.vue';
+import { mockGroup1 } from '../mock_data';
+
+describe('GroupItemName', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(GroupItemName, {
+ propsData: {
+ group: mockGroup1,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ it('matches the snapshot', () => {
+ createComponent();
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/components/groups_list_item_spec.js b/spec/frontend/jira_connect/components/groups_list_item_spec.js
index da16223255c..bcc27cc2898 100644
--- a/spec/frontend/jira_connect/components/groups_list_item_spec.js
+++ b/spec/frontend/jira_connect/components/groups_list_item_spec.js
@@ -1,11 +1,11 @@
-import { GlAvatar, GlButton } from '@gitlab/ui';
+import { GlButton } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
-import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import * as JiraConnectApi from '~/jira_connect/api';
+import GroupItemName from '~/jira_connect/components/group_item_name.vue';
import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
-import { persistAlert } from '~/jira_connect/utils';
+import { persistAlert, reloadPage } from '~/jira_connect/utils';
import { mockGroup1 } from '../mock_data';
jest.mock('~/jira_connect/utils');
@@ -14,36 +14,23 @@ describe('GroupsListItem', () => {
let wrapper;
const mockSubscriptionPath = 'subscriptionPath';
- const reloadSpy = jest.fn();
-
- global.AP = {
- navigator: {
- reload: reloadSpy,
- },
- };
-
const createComponent = ({ mountFn = shallowMount } = {}) => {
- wrapper = extendedWrapper(
- mountFn(GroupsListItem, {
- propsData: {
- group: mockGroup1,
- },
- provide: {
- subscriptionsPath: mockSubscriptionPath,
- },
- }),
- );
+ wrapper = mountFn(GroupsListItem, {
+ propsData: {
+ group: mockGroup1,
+ },
+ provide: {
+ subscriptionsPath: mockSubscriptionPath,
+ },
+ });
};
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- const findGlAvatar = () => wrapper.find(GlAvatar);
- const findGroupName = () => wrapper.findByTestId('group-list-item-name');
- const findGroupDescription = () => wrapper.findByTestId('group-list-item-description');
- const findLinkButton = () => wrapper.find(GlButton);
+ const findGroupItemName = () => wrapper.findComponent(GroupItemName);
+ const findLinkButton = () => wrapper.findComponent(GlButton);
const clickLinkButton = () => findLinkButton().trigger('click');
describe('template', () => {
@@ -51,17 +38,9 @@ describe('GroupsListItem', () => {
createComponent();
});
- it('renders group avatar', () => {
- expect(findGlAvatar().exists()).toBe(true);
- expect(findGlAvatar().props('src')).toBe(mockGroup1.avatar_url);
- });
-
- it('renders group name', () => {
- expect(findGroupName().text()).toBe(mockGroup1.full_name);
- });
-
- it('renders group description', () => {
- expect(findGroupDescription().text()).toBe(mockGroup1.description);
+ it('renders GroupItemName', () => {
+ expect(findGroupItemName().exists()).toBe(true);
+ expect(findGroupItemName().props('group')).toBe(mockGroup1);
});
it('renders Link button', () => {
@@ -106,7 +85,7 @@ describe('GroupsListItem', () => {
await waitForPromises();
- expect(reloadSpy).toHaveBeenCalled();
+ expect(reloadPage).toHaveBeenCalled();
});
});
@@ -125,7 +104,7 @@ describe('GroupsListItem', () => {
await waitForPromises();
- expect(reloadSpy).not.toHaveBeenCalled();
+ expect(reloadPage).not.toHaveBeenCalled();
expect(wrapper.emitted('error')[0][0]).toBe(mockErrorMessage);
});
});
diff --git a/spec/frontend/jira_connect/components/groups_list_spec.js b/spec/frontend/jira_connect/components/groups_list_spec.js
index 5c645eccc0e..f354cfe6a9b 100644
--- a/spec/frontend/jira_connect/components/groups_list_spec.js
+++ b/spec/frontend/jira_connect/components/groups_list_spec.js
@@ -1,7 +1,7 @@
-import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
+import { GlAlert, GlLoadingIcon, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-
import { fetchGroups } from '~/jira_connect/api';
import GroupsList from '~/jira_connect/components/groups_list.vue';
import GroupsListItem from '~/jira_connect/components/groups_list_item.vue';
@@ -12,77 +12,100 @@ jest.mock('~/jira_connect/api', () => {
fetchGroups: jest.fn(),
};
});
+
+const mockGroupsPath = '/groups';
+
describe('GroupsList', () => {
let wrapper;
const mockEmptyResponse = { data: [] };
const createComponent = (options = {}) => {
- wrapper = shallowMount(GroupsList, {
- ...options,
- });
+ wrapper = extendedWrapper(
+ shallowMount(GroupsList, {
+ provide: {
+ groupsPath: mockGroupsPath,
+ },
+ ...options,
+ }),
+ );
};
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
- const findGlAlert = () => wrapper.find(GlAlert);
- const findGlLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findGlAlert = () => wrapper.findComponent(GlAlert);
+ const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAllItems = () => wrapper.findAll(GroupsListItem);
const findFirstItem = () => findAllItems().at(0);
const findSecondItem = () => findAllItems().at(1);
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findGroupsList = () => wrapper.findByTestId('groups-list');
- describe('isLoading is true', () => {
+ describe('when groups are loading', () => {
it('renders loading icon', async () => {
- fetchGroups.mockResolvedValue(mockEmptyResponse);
+ fetchGroups.mockReturnValue(new Promise(() => {}));
createComponent();
- wrapper.setData({ isLoading: true });
await wrapper.vm.$nextTick();
expect(findGlLoadingIcon().exists()).toBe(true);
});
});
- describe('error fetching groups', () => {
+ describe('when groups fetch fails', () => {
it('renders error message', async () => {
fetchGroups.mockRejectedValue();
createComponent();
await waitForPromises();
+ expect(findGlLoadingIcon().exists()).toBe(false);
expect(findGlAlert().exists()).toBe(true);
expect(findGlAlert().text()).toBe('Failed to load namespaces. Please try again.');
});
});
- describe('no groups returned', () => {
+ describe('with no groups returned', () => {
it('renders empty state', async () => {
fetchGroups.mockResolvedValue(mockEmptyResponse);
createComponent();
await waitForPromises();
+ expect(findGlLoadingIcon().exists()).toBe(false);
expect(wrapper.text()).toContain('No available namespaces');
});
});
describe('with groups returned', () => {
beforeEach(async () => {
- fetchGroups.mockResolvedValue({ data: [mockGroup1, mockGroup2] });
+ fetchGroups.mockResolvedValue({
+ headers: { 'X-PAGE': 1, 'X-TOTAL': 2 },
+ data: [mockGroup1, mockGroup2],
+ });
createComponent();
await waitForPromises();
});
it('renders groups list', () => {
- expect(findAllItems().length).toBe(2);
+ expect(findAllItems()).toHaveLength(2);
expect(findFirstItem().props('group')).toBe(mockGroup1);
expect(findSecondItem().props('group')).toBe(mockGroup2);
});
+ it('sets GroupListItem `disabled` prop to `false`', () => {
+ findAllItems().wrappers.forEach((groupListItem) => {
+ expect(groupListItem.props('disabled')).toBe(false);
+ });
+ });
+
+ it('does not set opacity of the groups list', () => {
+ expect(findGroupsList().classes()).not.toContain('gl-opacity-5');
+ });
+
it('shows error message on $emit from item', async () => {
const errorMessage = 'error message';
@@ -93,5 +116,55 @@ describe('GroupsList', () => {
expect(findGlAlert().exists()).toBe(true);
expect(findGlAlert().text()).toContain(errorMessage);
});
+
+ describe('when searching groups', () => {
+ const mockSearchTeam = 'mock search term';
+
+ describe('while groups are loading', () => {
+ beforeEach(async () => {
+ fetchGroups.mockClear();
+ fetchGroups.mockReturnValue(new Promise(() => {}));
+
+ findSearchBox().vm.$emit('input', mockSearchTeam);
+ await wrapper.vm.$nextTick();
+ });
+
+ it('calls `fetchGroups` with search term', () => {
+ expect(fetchGroups).toHaveBeenCalledWith(mockGroupsPath, {
+ page: 1,
+ perPage: 10,
+ search: mockSearchTeam,
+ });
+ });
+
+ it('disables GroupListItems', async () => {
+ findAllItems().wrappers.forEach((groupListItem) => {
+ expect(groupListItem.props('disabled')).toBe(true);
+ });
+ });
+
+ it('sets opacity of the groups list', () => {
+ expect(findGroupsList().classes()).toContain('gl-opacity-5');
+ });
+
+ it('sets loading prop of ths search box', () => {
+ expect(findSearchBox().props('isLoading')).toBe(true);
+ });
+ });
+
+ describe('when group search finishes loading', () => {
+ beforeEach(async () => {
+ fetchGroups.mockResolvedValue({ data: [mockGroup1] });
+ findSearchBox().vm.$emit('input');
+
+ await waitForPromises();
+ });
+
+ it('renders new groups list', () => {
+ expect(findAllItems()).toHaveLength(1);
+ expect(findFirstItem().props('group')).toBe(mockGroup1);
+ });
+ });
+ });
});
});
diff --git a/spec/frontend/jira_connect/components/subscriptions_list_spec.js b/spec/frontend/jira_connect/components/subscriptions_list_spec.js
new file mode 100644
index 00000000000..ff86969367d
--- /dev/null
+++ b/spec/frontend/jira_connect/components/subscriptions_list_spec.js
@@ -0,0 +1,122 @@
+import { GlButton, GlEmptyState, GlTable } from '@gitlab/ui';
+import { mount, shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
+
+import * as JiraConnectApi from '~/jira_connect/api';
+import SubscriptionsList from '~/jira_connect/components/subscriptions_list.vue';
+import createStore from '~/jira_connect/store';
+import { SET_ALERT } from '~/jira_connect/store/mutation_types';
+import { reloadPage } from '~/jira_connect/utils';
+import { mockSubscription } from '../mock_data';
+
+jest.mock('~/jira_connect/utils');
+
+describe('SubscriptionsList', () => {
+ let wrapper;
+ let store;
+
+ const createComponent = ({ mountFn = shallowMount, provide = {} } = {}) => {
+ store = createStore();
+
+ wrapper = mountFn(SubscriptionsList, {
+ provide,
+ store,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findGlEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findGlTable = () => wrapper.findComponent(GlTable);
+ const findUnlinkButton = () => findGlTable().findComponent(GlButton);
+ const clickUnlinkButton = () => findUnlinkButton().trigger('click');
+
+ describe('template', () => {
+ it('renders GlEmptyState when subscriptions is empty', () => {
+ createComponent();
+
+ expect(findGlEmptyState().exists()).toBe(true);
+ expect(findGlTable().exists()).toBe(false);
+ });
+
+ it('renders GlTable when subscriptions are present', () => {
+ createComponent({
+ provide: {
+ subscriptions: [mockSubscription],
+ },
+ });
+
+ expect(findGlEmptyState().exists()).toBe(false);
+ expect(findGlTable().exists()).toBe(true);
+ });
+ });
+
+ describe('on "Unlink" button click', () => {
+ let removeSubscriptionSpy;
+
+ beforeEach(() => {
+ createComponent({
+ mountFn: mount,
+ provide: {
+ subscriptions: [mockSubscription],
+ },
+ });
+ removeSubscriptionSpy = jest.spyOn(JiraConnectApi, 'removeSubscription').mockResolvedValue();
+ });
+
+ it('sets button to loading and sends request', async () => {
+ expect(findUnlinkButton().props('loading')).toBe(false);
+
+ clickUnlinkButton();
+
+ await wrapper.vm.$nextTick();
+
+ expect(findUnlinkButton().props('loading')).toBe(true);
+
+ await waitForPromises();
+
+ expect(removeSubscriptionSpy).toHaveBeenCalledWith(mockSubscription.unlink_path);
+ });
+
+ describe('when request is successful', () => {
+ it('reloads the page', async () => {
+ clickUnlinkButton();
+
+ await waitForPromises();
+
+ expect(reloadPage).toHaveBeenCalled();
+ });
+ });
+
+ describe('when request has errors', () => {
+ const mockErrorMessage = 'error message';
+ const mockError = { response: { data: { error: mockErrorMessage } } };
+
+ beforeEach(() => {
+ jest.spyOn(JiraConnectApi, 'removeSubscription').mockRejectedValue(mockError);
+ jest.spyOn(store, 'commit');
+ });
+
+ it('sets alert', async () => {
+ clickUnlinkButton();
+
+ await waitForPromises();
+
+ expect(reloadPage).not.toHaveBeenCalled();
+ expect(store.commit.mock.calls).toEqual(
+ expect.arrayContaining([
+ [
+ SET_ALERT,
+ {
+ message: mockErrorMessage,
+ variant: 'danger',
+ },
+ ],
+ ]),
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/jira_connect/index_spec.js b/spec/frontend/jira_connect/index_spec.js
index eb54fe6476f..0161cfa0273 100644
--- a/spec/frontend/jira_connect/index_spec.js
+++ b/spec/frontend/jira_connect/index_spec.js
@@ -1,27 +1,14 @@
-import waitForPromises from 'helpers/wait_for_promises';
import { initJiraConnect } from '~/jira_connect';
-import { removeSubscription } from '~/jira_connect/api';
-jest.mock('~/jira_connect/api', () => ({
- removeSubscription: jest.fn().mockResolvedValue(),
+jest.mock('~/jira_connect/utils', () => ({
getLocation: jest.fn().mockResolvedValue('test/location'),
}));
describe('initJiraConnect', () => {
- window.AP = {
- navigator: {
- reload: jest.fn(),
- },
- };
-
beforeEach(async () => {
setFixtures(`
<a class="js-jira-connect-sign-in" href="https://gitlab.com">Sign In</a>
<a class="js-jira-connect-sign-in" href="https://gitlab.com">Another Sign In</a>
-
- <a href="https://gitlab.com/sub1" class="js-jira-connect-remove-subscription">Remove</a>
- <a href="https://gitlab.com/sub2" class="js-jira-connect-remove-subscription">Remove</a>
- <a href="https://gitlab.com/sub3" class="js-jira-connect-remove-subscription">Remove</a>
`);
await initJiraConnect();
@@ -34,23 +21,4 @@ describe('initJiraConnect', () => {
});
});
});
-
- describe('`remove subscription` buttons', () => {
- describe('on click', () => {
- it('calls `removeSubscription`', () => {
- Array.from(document.querySelectorAll('.js-jira-connect-remove-subscription')).forEach(
- (removeSubscriptionButton) => {
- removeSubscriptionButton.dispatchEvent(new Event('click'));
-
- waitForPromises();
-
- expect(removeSubscription).toHaveBeenCalledWith(removeSubscriptionButton.href);
- expect(removeSubscription).toHaveBeenCalledTimes(1);
-
- removeSubscription.mockClear();
- },
- );
- });
- });
- });
});
diff --git a/spec/frontend/jira_connect/mock_data.js b/spec/frontend/jira_connect/mock_data.js
index 22255fabc3d..5247a3dc522 100644
--- a/spec/frontend/jira_connect/mock_data.js
+++ b/spec/frontend/jira_connect/mock_data.js
@@ -15,3 +15,9 @@ export const mockGroup2 = {
full_path: 'gitlab-com',
description: 'For GitLab company related projects',
};
+
+export const mockSubscription = {
+ group: mockGroup1,
+ created_at: '2021-04-14T08:52:23.115Z',
+ unlink_path: '/-/jira_connect/subscriptions/1',
+};
diff --git a/spec/frontend/jira_connect/utils_spec.js b/spec/frontend/jira_connect/utils_spec.js
index 5310bce384b..7eae870478d 100644
--- a/spec/frontend/jira_connect/utils_spec.js
+++ b/spec/frontend/jira_connect/utils_spec.js
@@ -1,11 +1,19 @@
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import { ALERT_LOCALSTORAGE_KEY } from '~/jira_connect/constants';
-import { persistAlert, retrieveAlert } from '~/jira_connect/utils';
-
-useLocalStorageSpy();
+import {
+ persistAlert,
+ retrieveAlert,
+ getJwt,
+ getLocation,
+ reloadPage,
+ sizeToParent,
+} from '~/jira_connect/utils';
describe('JiraConnect utils', () => {
describe('alert utils', () => {
+ useLocalStorageSpy();
+
it.each`
arg | expectedRetrievedValue
${{ title: 'error' }} | ${{ title: 'error' }}
@@ -29,4 +37,104 @@ describe('JiraConnect utils', () => {
},
);
});
+
+ describe('AP object utils', () => {
+ afterEach(() => {
+ global.AP = null;
+ });
+
+ describe('getJwt', () => {
+ const mockJwt = 'jwt';
+ const getTokenSpy = jest.fn((callback) => callback(mockJwt));
+
+ it('resolves to the function call when AP.context.getToken is a function', async () => {
+ global.AP = {
+ context: {
+ getToken: getTokenSpy,
+ },
+ };
+
+ const jwt = await getJwt();
+
+ expect(getTokenSpy).toHaveBeenCalled();
+ expect(jwt).toBe(mockJwt);
+ });
+
+ it('resolves to undefined when AP.context.getToken is not a function', async () => {
+ const jwt = await getJwt();
+
+ expect(getTokenSpy).not.toHaveBeenCalled();
+ expect(jwt).toBeUndefined();
+ });
+ });
+
+ describe('getLocation', () => {
+ const mockLocation = 'test/location';
+ const getLocationSpy = jest.fn((callback) => callback(mockLocation));
+
+ it('resolves to the function call when AP.getLocation is a function', async () => {
+ global.AP = {
+ getLocation: getLocationSpy,
+ };
+
+ const location = await getLocation();
+
+ expect(getLocationSpy).toHaveBeenCalled();
+ expect(location).toBe(mockLocation);
+ });
+
+ it('resolves to undefined when AP.getLocation is not a function', async () => {
+ const location = await getLocation();
+
+ expect(getLocationSpy).not.toHaveBeenCalled();
+ expect(location).toBeUndefined();
+ });
+ });
+
+ describe('reloadPage', () => {
+ const reloadSpy = jest.fn();
+
+ useMockLocationHelper();
+
+ it('calls the function when AP.navigator.reload is a function', async () => {
+ global.AP = {
+ navigator: {
+ reload: reloadSpy,
+ },
+ };
+
+ await reloadPage();
+
+ expect(reloadSpy).toHaveBeenCalled();
+ expect(window.location.reload).not.toHaveBeenCalled();
+ });
+
+ it('calls window.location.reload when AP.navigator.reload is not a function', async () => {
+ await reloadPage();
+
+ expect(reloadSpy).not.toHaveBeenCalled();
+ expect(window.location.reload).toHaveBeenCalled();
+ });
+ });
+
+ describe('sizeToParent', () => {
+ const sizeToParentSpy = jest.fn();
+
+ it('calls the function when AP.sizeToParent is a function', async () => {
+ global.AP = {
+ sizeToParent: sizeToParentSpy,
+ };
+
+ await sizeToParent();
+
+ expect(sizeToParentSpy).toHaveBeenCalled();
+ });
+
+ it('does nothing when AP.navigator.reload is not a function', async () => {
+ await sizeToParent();
+
+ expect(sizeToParentSpy).not.toHaveBeenCalled();
+ });
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/commit_block_spec.js b/spec/frontend/jobs/components/commit_block_spec.js
index 13261317b48..8a6d48cecb8 100644
--- a/spec/frontend/jobs/components/commit_block_spec.js
+++ b/spec/frontend/jobs/components/commit_block_spec.js
@@ -1,89 +1,70 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import component from '~/jobs/components/commit_block.vue';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CommitBlock from '~/jobs/components/commit_block.vue';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
describe('Commit block', () => {
- const Component = Vue.extend(component);
- let vm;
+ let wrapper;
- const props = {
- commit: {
- short_id: '1f0fb84f',
- id: '1f0fb84fb6770d74d97eee58118fd3909cd4f48c',
- commit_path: 'commit/1f0fb84fb6770d74d97eee58118fd3909cd4f48c',
- title: 'Update README.md',
- },
- mergeRequest: {
- iid: '!21244',
- path: 'merge_requests/21244',
- },
- isLastBlock: true,
+ const commit = {
+ short_id: '1f0fb84f',
+ id: '1f0fb84fb6770d74d97eee58118fd3909cd4f48c',
+ commit_path: 'commit/1f0fb84fb6770d74d97eee58118fd3909cd4f48c',
+ title: 'Update README.md',
+ };
+
+ const mergeRequest = {
+ iid: '!21244',
+ path: 'merge_requests/21244',
+ };
+
+ const findCommitSha = () => wrapper.findByTestId('commit-sha');
+ const findLinkSha = () => wrapper.findByTestId('link-commit');
+
+ const mountComponent = (props) => {
+ wrapper = extendedWrapper(
+ shallowMount(CommitBlock, {
+ propsData: {
+ commit,
+ ...props,
+ },
+ }),
+ );
};
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
- describe('pipeline short sha', () => {
+ describe('without merge request', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
- ...props,
- });
+ mountComponent();
});
it('renders pipeline short sha link', () => {
- expect(vm.$el.querySelector('.js-commit-sha').getAttribute('href')).toEqual(
- props.commit.commit_path,
- );
-
- expect(vm.$el.querySelector('.js-commit-sha').textContent.trim()).toEqual(
- props.commit.short_id,
- );
+ expect(findCommitSha().attributes('href')).toBe(commit.commit_path);
+ expect(findCommitSha().text()).toBe(commit.short_id);
});
it('renders clipboard button', () => {
- expect(vm.$el.querySelector('button').getAttribute('data-clipboard-text')).toEqual(
- props.commit.id,
- );
+ expect(wrapper.findComponent(ClipboardButton).attributes('text')).toBe(commit.id);
});
- });
-
- describe('with merge request', () => {
- it('renders merge request link and reference', () => {
- vm = mountComponent(Component, {
- ...props,
- });
-
- expect(vm.$el.querySelector('.js-link-commit').getAttribute('href')).toEqual(
- props.mergeRequest.path,
- );
- expect(vm.$el.querySelector('.js-link-commit').textContent.trim()).toEqual(
- `!${props.mergeRequest.iid}`,
- );
+ it('renders git commit title', () => {
+ expect(wrapper.text()).toContain(commit.title);
});
- });
- describe('without merge request', () => {
it('does not render merge request', () => {
- const copyProps = { ...props };
- delete copyProps.mergeRequest;
-
- vm = mountComponent(Component, {
- ...copyProps,
- });
-
- expect(vm.$el.querySelector('.js-link-commit')).toBeNull();
+ expect(findLinkSha().exists()).toBe(false);
});
});
- describe('git commit title', () => {
- it('renders git commit title', () => {
- vm = mountComponent(Component, {
- ...props,
- });
+ describe('with merge request', () => {
+ it('renders merge request link and reference', () => {
+ mountComponent({ mergeRequest });
- expect(vm.$el.textContent).toContain(props.commit.title);
+ expect(findLinkSha().attributes('href')).toBe(mergeRequest.path);
+ expect(findLinkSha().text()).toBe(`!${mergeRequest.iid}`);
});
});
});
diff --git a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
index 2b56bd2d558..ad0368555fa 100644
--- a/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
+++ b/spec/frontend/jobs/components/job_sidebar_details_container_spec.js
@@ -34,11 +34,22 @@ describe('Job Sidebar Details Container', () => {
});
describe('when no details are available', () => {
- it('should render an empty container', () => {
+ beforeEach(() => {
createWrapper();
+ });
+ it('should render an empty container', () => {
expect(wrapper.html()).toBe('');
});
+
+ it.each(['duration', 'erased_at', 'finished_at', 'queued', 'runner', 'coverage'])(
+ 'should not render %s details when missing',
+ async (detail) => {
+ await store.dispatch('receiveJobSuccess', { [detail]: undefined });
+
+ expect(findAllDetailsRow()).toHaveLength(0);
+ },
+ );
});
describe('when some of the details are available', () => {
@@ -49,7 +60,7 @@ describe('Job Sidebar Details Container', () => {
['erased_at', 'Erased: 3 weeks ago'],
['finished_at', 'Finished: 3 weeks ago'],
['queued', 'Queued: 9 seconds'],
- ['runner', 'Runner: local ci runner (#1)'],
+ ['runner', 'Runner: #1 (ABCDEFGH) local ci runner'],
['coverage', 'Coverage: 20%'],
])('uses %s to render job-%s', async (detail, value) => {
await store.dispatch('receiveJobSuccess', { [detail]: job[detail] });
diff --git a/spec/frontend/jobs/components/manual_variables_form_spec.js b/spec/frontend/jobs/components/manual_variables_form_spec.js
index 7172a319876..376a822dde5 100644
--- a/spec/frontend/jobs/components/manual_variables_form_spec.js
+++ b/spec/frontend/jobs/components/manual_variables_form_spec.js
@@ -1,11 +1,16 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { createLocalVue, mount, shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import Form from '~/jobs/components/manual_variables_form.vue';
const localVue = createLocalVue();
+Vue.use(Vuex);
+
describe('Manual Variables Form', () => {
let wrapper;
+ let store;
const requiredProps = {
action: {
@@ -16,88 +21,104 @@ describe('Manual Variables Form', () => {
variablesSettingsUrl: '/settings',
};
- const factory = (props = {}) => {
- wrapper = shallowMount(localVue.extend(Form), {
- propsData: props,
- localVue,
+ const createComponent = ({ props = {}, mountFn = shallowMount } = {}) => {
+ store = new Vuex.Store({
+ actions: {
+ triggerManualJob: jest.fn(),
+ },
});
+
+ wrapper = extendedWrapper(
+ mountFn(localVue.extend(Form), {
+ propsData: { ...requiredProps, ...props },
+ localVue,
+ store,
+ }),
+ );
};
- beforeEach(() => {
- factory(requiredProps);
- });
+ const findInputKey = () => wrapper.findComponent({ ref: 'inputKey' });
+ const findInputValue = () => wrapper.findComponent({ ref: 'inputSecretValue' });
- afterEach((done) => {
- // The component has a `nextTick` callback after some events so we need
- // to wait for those to finish before destroying.
- setImmediate(() => {
- wrapper.destroy();
- wrapper = null;
+ const findTriggerBtn = () => wrapper.findByTestId('trigger-manual-job-btn');
+ const findHelpText = () => wrapper.findByTestId('form-help-text');
+ const findDeleteVarBtn = () => wrapper.findByTestId('delete-variable-btn');
+ const findCiVariableKey = () => wrapper.findByTestId('ci-variable-key');
+ const findCiVariableValue = () => wrapper.findByTestId('ci-variable-value');
+ const findAllVariables = () => wrapper.findAllByTestId('ci-variable-row');
- done();
- });
+ afterEach(() => {
+ wrapper.destroy();
});
- it('renders empty form with correct placeholders', () => {
- expect(wrapper.find({ ref: 'inputKey' }).attributes('placeholder')).toBe('Input variable key');
- expect(wrapper.find({ ref: 'inputSecretValue' }).attributes('placeholder')).toBe(
- 'Input variable value',
- );
- });
+ describe('shallowMount', () => {
+ beforeEach(() => {
+ createComponent();
+ });
- it('renders help text with provided link', () => {
- expect(wrapper.find('p').text()).toBe(
- 'Specify variable values to be used in this run. The values specified in CI/CD settings will be used as default',
- );
+ it('renders empty form with correct placeholders', () => {
+ expect(findInputKey().attributes('placeholder')).toBe('Input variable key');
+ expect(findInputValue().attributes('placeholder')).toBe('Input variable value');
+ });
- expect(wrapper.find('a').attributes('href')).toBe(requiredProps.variablesSettingsUrl);
- });
+ it('renders help text with provided link', () => {
+ expect(findHelpText().text()).toBe(
+ 'Specify variable values to be used in this run. The values specified in CI/CD settings will be used as default',
+ );
- describe('when adding a new variable', () => {
- it('creates a new variable when user types a new key and resets the form', (done) => {
- wrapper.vm
- .$nextTick()
- .then(() => wrapper.find({ ref: 'inputKey' }).setValue('new key'))
- .then(() => {
- expect(wrapper.vm.variables.length).toBe(1);
- expect(wrapper.vm.variables[0].key).toBe('new key');
- expect(wrapper.find({ ref: 'inputKey' }).attributes('value')).toBe(undefined);
- })
- .then(done)
- .catch(done.fail);
+ expect(wrapper.find('a').attributes('href')).toBe(requiredProps.variablesSettingsUrl);
});
- it('creates a new variable when user types a new value and resets the form', (done) => {
- wrapper.vm
- .$nextTick()
- .then(() => wrapper.find({ ref: 'inputSecretValue' }).setValue('new value'))
- .then(() => {
- expect(wrapper.vm.variables.length).toBe(1);
- expect(wrapper.vm.variables[0].secret_value).toBe('new value');
- expect(wrapper.find({ ref: 'inputSecretValue' }).attributes('value')).toBe(undefined);
- })
- .then(done)
- .catch(done.fail);
+ describe('when adding a new variable', () => {
+ it('creates a new variable when user types a new key and resets the form', async () => {
+ await findInputKey().setValue('new key');
+
+ expect(findAllVariables()).toHaveLength(1);
+ expect(findCiVariableKey().element.value).toBe('new key');
+ expect(findInputKey().attributes('value')).toBe(undefined);
+ });
+
+ it('creates a new variable when user types a new value and resets the form', async () => {
+ await findInputValue().setValue('new value');
+
+ expect(findAllVariables()).toHaveLength(1);
+ expect(findCiVariableValue().element.value).toBe('new value');
+ expect(findInputValue().attributes('value')).toBe(undefined);
+ });
});
});
- describe('when deleting a variable', () => {
- beforeEach((done) => {
- wrapper.vm.variables = [
- {
- key: 'new key',
- secret_value: 'value',
- id: '1',
- },
- ];
-
- wrapper.vm.$nextTick(done);
+ describe('mount', () => {
+ beforeEach(() => {
+ createComponent({ mountFn: mount });
+ });
+
+ describe('when deleting a variable', () => {
+ it('removes the variable row', async () => {
+ await wrapper.setData({
+ variables: [
+ {
+ key: 'new key',
+ secret_value: 'value',
+ id: '1',
+ },
+ ],
+ });
+
+ findDeleteVarBtn().trigger('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findAllVariables()).toHaveLength(0);
+ });
});
- it('removes the variable row', () => {
- wrapper.find(GlButton).vm.$emit('click');
+ it('trigger button is disabled after trigger action', async () => {
+ expect(findTriggerBtn().props('disabled')).toBe(false);
+
+ await findTriggerBtn().trigger('click');
- expect(wrapper.vm.variables.length).toBe(0);
+ expect(findTriggerBtn().props('disabled')).toBe(true);
});
});
});
diff --git a/spec/frontend/jobs/components/sidebar_spec.js b/spec/frontend/jobs/components/sidebar_spec.js
index 5a2e699137d..500a1b48950 100644
--- a/spec/frontend/jobs/components/sidebar_spec.js
+++ b/spec/frontend/jobs/components/sidebar_spec.js
@@ -1,5 +1,6 @@
import { shallowMount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import ArtifactsBlock from '~/jobs/components/artifacts_block.vue';
import JobRetryForwardDeploymentModal from '~/jobs/components/job_retry_forward_deployment_modal.vue';
import JobRetryButton from '~/jobs/components/job_sidebar_retry_button.vue';
import JobsContainer from '~/jobs/components/jobs_container.vue';
@@ -14,6 +15,7 @@ describe('Sidebar details block', () => {
const forwardDeploymentFailure = 'forward_deployment_failure';
const findModal = () => wrapper.find(JobRetryForwardDeploymentModal);
+ const findArtifactsBlock = () => wrapper.findComponent(ArtifactsBlock);
const findCancelButton = () => wrapper.findByTestId('cancel-button');
const findNewIssueButton = () => wrapper.findByTestId('job-new-issue');
const findRetryButton = () => wrapper.find(JobRetryButton);
@@ -21,6 +23,9 @@ describe('Sidebar details block', () => {
const createWrapper = ({ props = {} } = {}) => {
store = createStore();
+
+ store.state.job = job;
+
wrapper = extendedWrapper(
shallowMount(Sidebar, {
...props,
@@ -164,4 +169,29 @@ describe('Sidebar details block', () => {
});
});
});
+
+ describe('artifacts', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('artifacts are not shown if there are no properties other than locked', () => {
+ expect(findArtifactsBlock().exists()).toBe(false);
+ });
+
+ it('artifacts are shown if present', async () => {
+ store.state.job.artifact = {
+ download_path: '/root/ci-project/-/jobs/1960/artifacts/download',
+ browse_path: '/root/ci-project/-/jobs/1960/artifacts/browse',
+ keep_path: '/root/ci-project/-/jobs/1960/artifacts/keep',
+ expire_at: '2021-03-23T17:57:11.211Z',
+ expired: false,
+ locked: false,
+ };
+
+ await wrapper.vm.$nextTick();
+
+ expect(findArtifactsBlock().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/jobs/components/stages_dropdown_spec.js b/spec/frontend/jobs/components/stages_dropdown_spec.js
index 72d5d0f9d44..b75d1707a8d 100644
--- a/spec/frontend/jobs/components/stages_dropdown_spec.js
+++ b/spec/frontend/jobs/components/stages_dropdown_spec.js
@@ -1,163 +1,134 @@
-import Vue from 'vue';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import component from '~/jobs/components/stages_dropdown.vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import StagesDropdown from '~/jobs/components/stages_dropdown.vue';
+import CiIcon from '~/vue_shared/components/ci_icon.vue';
+import {
+ mockPipelineWithoutMR,
+ mockPipelineWithAttachedMR,
+ mockPipelineDetached,
+} from '../mock_data';
describe('Stages Dropdown', () => {
- const Component = Vue.extend(component);
- let vm;
-
- const mockPipelineData = {
- id: 28029444,
- details: {
- status: {
- details_path: '/gitlab-org/gitlab-foss/pipelines/28029444',
- group: 'success',
- has_details: true,
- icon: 'status_success',
- label: 'passed',
- text: 'passed',
- tooltip: 'passed',
- },
- },
- path: 'pipeline/28029444',
- flags: {
- merge_request_pipeline: true,
- detached_merge_request_pipeline: false,
- },
- merge_request: {
- iid: 1234,
- path: '/root/detached-merge-request-pipelines/-/merge_requests/1',
- title: 'Update README.md',
- source_branch: 'feature-1234',
- source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1234',
- target_branch: 'master',
- target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
- },
- ref: {
- name: 'test-branch',
- },
+ let wrapper;
+
+ const findStatus = () => wrapper.findComponent(CiIcon);
+ const findSelectedStageText = () => wrapper.findComponent(GlDropdown).props('text');
+ const findStageItem = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
+
+ const findPipelineInfoText = () => wrapper.findByTestId('pipeline-info').text();
+ const findPipelinePath = () => wrapper.findByTestId('pipeline-path').attributes('href');
+ const findMRLinkPath = () => wrapper.findByTestId('mr-link').attributes('href');
+ const findSourceBranchLinkPath = () =>
+ wrapper.findByTestId('source-branch-link').attributes('href');
+ const findTargetBranchLinkPath = () =>
+ wrapper.findByTestId('target-branch-link').attributes('href');
+
+ const createComponent = (props) => {
+ wrapper = extendedWrapper(
+ shallowMount(StagesDropdown, {
+ propsData: {
+ ...props,
+ },
+ }),
+ );
};
- describe('without a merge request pipeline', () => {
- let pipeline;
+ afterEach(() => {
+ wrapper.destroy();
+ });
+ describe('without a merge request pipeline', () => {
beforeEach(() => {
- pipeline = JSON.parse(JSON.stringify(mockPipelineData));
- delete pipeline.merge_request;
- delete pipeline.flags.merge_request_pipeline;
- delete pipeline.flags.detached_merge_request_pipeline;
-
- vm = mountComponent(Component, {
- pipeline,
+ createComponent({
+ pipeline: mockPipelineWithoutMR,
stages: [{ name: 'build' }, { name: 'test' }],
selectedStage: 'deploy',
});
});
- afterEach(() => {
- vm.$destroy();
- });
-
it('renders pipeline status', () => {
- expect(vm.$el.querySelector('.js-ci-status-icon-success')).not.toBeNull();
+ expect(findStatus().exists()).toBe(true);
});
it('renders pipeline link', () => {
- expect(vm.$el.querySelector('.js-pipeline-path').getAttribute('href')).toEqual(
- 'pipeline/28029444',
- );
+ expect(findPipelinePath()).toBe('pipeline/28029444');
});
it('renders dropdown with stages', () => {
- expect(vm.$el.querySelector('.dropdown .js-stage-item').textContent).toContain('build');
+ expect(findStageItem(0).text()).toBe('build');
});
it('rendes selected stage', () => {
- expect(vm.$el.querySelector('.dropdown .js-selected-stage').textContent).toContain('deploy');
+ expect(findSelectedStageText()).toBe('deploy');
});
it(`renders the pipeline info text like "Pipeline #123 for source_branch"`, () => {
- const expected = `Pipeline #${pipeline.id} for ${pipeline.ref.name}`;
- const actual = trimText(vm.$el.querySelector('.js-pipeline-info').innerText);
+ const expected = `Pipeline #${mockPipelineWithoutMR.id} for ${mockPipelineWithoutMR.ref.name}`;
+ const actual = trimText(findPipelineInfoText());
expect(actual).toBe(expected);
});
});
describe('with an "attached" merge request pipeline', () => {
- let pipeline;
-
beforeEach(() => {
- pipeline = JSON.parse(JSON.stringify(mockPipelineData));
- pipeline.flags.merge_request_pipeline = true;
- pipeline.flags.detached_merge_request_pipeline = false;
-
- vm = mountComponent(Component, {
- pipeline,
+ createComponent({
+ pipeline: mockPipelineWithAttachedMR,
stages: [],
selectedStage: 'deploy',
});
});
it(`renders the pipeline info text like "Pipeline #123 for !456 with source_branch into target_branch"`, () => {
- const expected = `Pipeline #${pipeline.id} for !${pipeline.merge_request.iid} with ${pipeline.merge_request.source_branch} into ${pipeline.merge_request.target_branch}`;
- const actual = trimText(vm.$el.querySelector('.js-pipeline-info').innerText);
+ const expected = `Pipeline #${mockPipelineWithAttachedMR.id} for !${mockPipelineWithAttachedMR.merge_request.iid} with ${mockPipelineWithAttachedMR.merge_request.source_branch} into ${mockPipelineWithAttachedMR.merge_request.target_branch}`;
+ const actual = trimText(findPipelineInfoText());
expect(actual).toBe(expected);
});
it(`renders the correct merge request link`, () => {
- const actual = vm.$el.querySelector('.js-mr-link').href;
-
- expect(actual).toContain(pipeline.merge_request.path);
+ expect(findMRLinkPath()).toBe(mockPipelineWithAttachedMR.merge_request.path);
});
it(`renders the correct source branch link`, () => {
- const actual = vm.$el.querySelector('.js-source-branch-link').href;
-
- expect(actual).toContain(pipeline.merge_request.source_branch_path);
+ expect(findSourceBranchLinkPath()).toBe(
+ mockPipelineWithAttachedMR.merge_request.source_branch_path,
+ );
});
it(`renders the correct target branch link`, () => {
- const actual = vm.$el.querySelector('.js-target-branch-link').href;
-
- expect(actual).toContain(pipeline.merge_request.target_branch_path);
+ expect(findTargetBranchLinkPath()).toBe(
+ mockPipelineWithAttachedMR.merge_request.target_branch_path,
+ );
});
});
describe('with a detached merge request pipeline', () => {
- let pipeline;
-
beforeEach(() => {
- pipeline = JSON.parse(JSON.stringify(mockPipelineData));
- pipeline.flags.merge_request_pipeline = false;
- pipeline.flags.detached_merge_request_pipeline = true;
-
- vm = mountComponent(Component, {
- pipeline,
+ createComponent({
+ pipeline: mockPipelineDetached,
stages: [],
selectedStage: 'deploy',
});
});
it(`renders the pipeline info like "Pipeline #123 for !456 with source_branch"`, () => {
- const expected = `Pipeline #${pipeline.id} for !${pipeline.merge_request.iid} with ${pipeline.merge_request.source_branch}`;
- const actual = trimText(vm.$el.querySelector('.js-pipeline-info').innerText);
+ const expected = `Pipeline #${mockPipelineDetached.id} for !${mockPipelineDetached.merge_request.iid} with ${mockPipelineDetached.merge_request.source_branch}`;
+ const actual = trimText(findPipelineInfoText());
expect(actual).toBe(expected);
});
it(`renders the correct merge request link`, () => {
- const actual = vm.$el.querySelector('.js-mr-link').href;
-
- expect(actual).toContain(pipeline.merge_request.path);
+ expect(findMRLinkPath()).toBe(mockPipelineDetached.merge_request.path);
});
it(`renders the correct source branch link`, () => {
- const actual = vm.$el.querySelector('.js-source-branch-link').href;
-
- expect(actual).toContain(pipeline.merge_request.source_branch_path);
+ expect(findSourceBranchLinkPath()).toBe(
+ mockPipelineDetached.merge_request.source_branch_path,
+ );
});
});
});
diff --git a/spec/frontend/jobs/components/table/jobs_table_spec.js b/spec/frontend/jobs/components/table/jobs_table_spec.js
new file mode 100644
index 00000000000..db057efbfb4
--- /dev/null
+++ b/spec/frontend/jobs/components/table/jobs_table_spec.js
@@ -0,0 +1,31 @@
+import { GlTable } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import JobsTable from '~/jobs/components/table/jobs_table.vue';
+import { mockJobsInTable } from '../../mock_data';
+
+describe('Jobs Table', () => {
+ let wrapper;
+
+ const findTable = () => wrapper.findComponent(GlTable);
+
+ const createComponent = (props = {}) => {
+ wrapper = shallowMount(JobsTable, {
+ propsData: {
+ jobs: mockJobsInTable,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays a table', () => {
+ expect(findTable().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/jobs/components/table/jobs_table_tabs_spec.js b/spec/frontend/jobs/components/table/jobs_table_tabs_spec.js
new file mode 100644
index 00000000000..ac9b45be932
--- /dev/null
+++ b/spec/frontend/jobs/components/table/jobs_table_tabs_spec.js
@@ -0,0 +1,42 @@
+import { mount } from '@vue/test-utils';
+import { trimText } from 'helpers/text_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import JobsTableTabs from '~/jobs/components/table/jobs_table_tabs.vue';
+
+describe('Jobs Table Tabs', () => {
+ let wrapper;
+
+ const defaultProps = {
+ jobCounts: { all: 848, pending: 0, running: 0, finished: 704 },
+ };
+
+ const findTab = (testId) => wrapper.findByTestId(testId);
+
+ const createComponent = () => {
+ wrapper = extendedWrapper(
+ mount(JobsTableTabs, {
+ provide: {
+ ...defaultProps,
+ },
+ }),
+ );
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ tabId | text | count
+ ${'jobs-all-tab'} | ${'All'} | ${defaultProps.jobCounts.all}
+ ${'jobs-pending-tab'} | ${'Pending'} | ${defaultProps.jobCounts.pending}
+ ${'jobs-running-tab'} | ${'Running'} | ${defaultProps.jobCounts.running}
+ ${'jobs-finished-tab'} | ${'Finished'} | ${defaultProps.jobCounts.finished}
+ `('displays the right tab text and badge count', ({ tabId, text, count }) => {
+ expect(trimText(findTab(tabId).text())).toBe(`${text} ${count}`);
+ });
+});
diff --git a/spec/frontend/jobs/mock_data.js b/spec/frontend/jobs/mock_data.js
index 3d40e94d219..1432c6d7e9b 100644
--- a/spec/frontend/jobs/mock_data.js
+++ b/spec/frontend/jobs/mock_data.js
@@ -911,6 +911,9 @@ export const stages = [
export default {
id: 4757,
+ artifact: {
+ locked: false,
+ },
name: 'test',
build_path: '/root/ci-mock/-/jobs/4757',
retry_path: '/root/ci-mock/-/jobs/4757/retry',
@@ -955,6 +958,7 @@ export default {
artifacts: [null],
runner: {
id: 1,
+ short_sha: 'ABCDEFGH',
description: 'local ci runner',
edit_path: '/root/ci-mock/runners/1/edit',
},
@@ -1189,3 +1193,214 @@ export const jobsInStage = {
path: '/gitlab-org/gitlab-shell/pipelines/27#build',
dropdown_path: '/gitlab-org/gitlab-shell/pipelines/27/stage.json?stage=build',
};
+
+export const mockPipelineWithoutMR = {
+ id: 28029444,
+ details: {
+ status: {
+ details_path: '/gitlab-org/gitlab-foss/pipelines/28029444',
+ group: 'success',
+ has_details: true,
+ icon: 'status_success',
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
+ },
+ },
+ path: 'pipeline/28029444',
+ ref: {
+ name: 'test-branch',
+ },
+};
+
+export const mockPipelineWithAttachedMR = {
+ id: 28029444,
+ details: {
+ status: {
+ details_path: '/gitlab-org/gitlab-foss/pipelines/28029444',
+ group: 'success',
+ has_details: true,
+ icon: 'status_success',
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
+ },
+ },
+ path: 'pipeline/28029444',
+ flags: {
+ merge_request_pipeline: true,
+ detached_merge_request_pipeline: false,
+ },
+ merge_request: {
+ iid: 1234,
+ path: '/root/detached-merge-request-pipelines/-/merge_requests/1',
+ title: 'Update README.md',
+ source_branch: 'feature-1234',
+ source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1234',
+ target_branch: 'master',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ },
+ ref: {
+ name: 'test-branch',
+ },
+};
+
+export const mockPipelineDetached = {
+ id: 28029444,
+ details: {
+ status: {
+ details_path: '/gitlab-org/gitlab-foss/pipelines/28029444',
+ group: 'success',
+ has_details: true,
+ icon: 'status_success',
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
+ },
+ },
+ path: 'pipeline/28029444',
+ flags: {
+ merge_request_pipeline: false,
+ detached_merge_request_pipeline: true,
+ },
+ merge_request: {
+ iid: 1234,
+ path: '/root/detached-merge-request-pipelines/-/merge_requests/1',
+ title: 'Update README.md',
+ source_branch: 'feature-1234',
+ source_branch_path: '/root/detached-merge-request-pipelines/branches/feature-1234',
+ target_branch: 'master',
+ target_branch_path: '/root/detached-merge-request-pipelines/branches/master',
+ },
+ ref: {
+ name: 'test-branch',
+ },
+};
+
+export const mockJobsInTable = [
+ {
+ detailedStatus: {
+ icon: 'status_manual',
+ label: 'manual play action',
+ text: 'manual',
+ tooltip: 'manual action',
+ action: {
+ buttonTitle: 'Trigger this manual action',
+ icon: 'play',
+ method: 'post',
+ path: '/root/ci-project/-/jobs/2004/play',
+ title: 'Play',
+ __typename: 'StatusAction',
+ },
+ __typename: 'DetailedStatus',
+ },
+ id: 'gid://gitlab/Ci::Build/2004',
+ refName: 'master',
+ refPath: '/root/ci-project/-/commits/master',
+ tags: [],
+ shortSha: '2d5d8323',
+ commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/423',
+ path: '/root/ci-project/-/pipelines/423',
+ user: {
+ webPath: '/root',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ __typename: 'User',
+ },
+ __typename: 'Pipeline',
+ },
+ stage: { name: 'test', __typename: 'CiStage' },
+ name: 'test_manual_job',
+ duration: null,
+ finishedAt: null,
+ coverage: null,
+ retryable: false,
+ playable: true,
+ cancelable: false,
+ active: false,
+ __typename: 'CiJob',
+ },
+ {
+ detailedStatus: {
+ icon: 'status_skipped',
+ label: 'skipped',
+ text: 'skipped',
+ tooltip: 'skipped',
+ action: null,
+ __typename: 'DetailedStatus',
+ },
+ id: 'gid://gitlab/Ci::Build/2021',
+ refName: 'master',
+ refPath: '/root/ci-project/-/commits/master',
+ tags: [],
+ shortSha: '2d5d8323',
+ commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/425',
+ path: '/root/ci-project/-/pipelines/425',
+ user: {
+ webPath: '/root',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ __typename: 'User',
+ },
+ __typename: 'Pipeline',
+ },
+ stage: { name: 'test', __typename: 'CiStage' },
+ name: 'coverage_job',
+ duration: null,
+ finishedAt: null,
+ coverage: null,
+ retryable: false,
+ playable: false,
+ cancelable: false,
+ active: false,
+ __typename: 'CiJob',
+ },
+ {
+ detailedStatus: {
+ icon: 'status_success',
+ label: 'passed',
+ text: 'passed',
+ tooltip: 'passed',
+ action: {
+ buttonTitle: 'Retry this job',
+ icon: 'retry',
+ method: 'post',
+ path: '/root/ci-project/-/jobs/2015/retry',
+ title: 'Retry',
+ __typename: 'StatusAction',
+ },
+ __typename: 'DetailedStatus',
+ },
+ id: 'gid://gitlab/Ci::Build/2015',
+ refName: 'master',
+ refPath: '/root/ci-project/-/commits/master',
+ tags: [],
+ shortSha: '2d5d8323',
+ commitPath: '/root/ci-project/-/commit/2d5d83230bdea0e003d83ef4c16d2bf9a8808ebe',
+ pipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/424',
+ path: '/root/ci-project/-/pipelines/424',
+ user: {
+ webPath: '/root',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ __typename: 'User',
+ },
+ __typename: 'Pipeline',
+ },
+ stage: { name: 'deploy', __typename: 'CiStage' },
+ name: 'artifact_job',
+ duration: 2,
+ finishedAt: '2021-04-01T17:36:18Z',
+ coverage: null,
+ retryable: true,
+ playable: false,
+ cancelable: false,
+ active: false,
+ __typename: 'CiJob',
+ },
+];
diff --git a/spec/frontend/lib/utils/color_utils_spec.js b/spec/frontend/lib/utils/color_utils_spec.js
index 8c846abd77f..c6b88b2957c 100644
--- a/spec/frontend/lib/utils/color_utils_spec.js
+++ b/spec/frontend/lib/utils/color_utils_spec.js
@@ -1,4 +1,9 @@
-import { textColorForBackground, hexToRgb, validateHexColor } from '~/lib/utils/color_utils';
+import {
+ textColorForBackground,
+ hexToRgb,
+ validateHexColor,
+ darkModeEnabled,
+} from '~/lib/utils/color_utils';
describe('Color utils', () => {
describe('Converting hex code to rgb', () => {
@@ -47,4 +52,24 @@ describe('Color utils', () => {
expect(validateHexColor(color)).toEqual(output);
});
});
+
+ describe('darkModeEnabled', () => {
+ it.each`
+ page | bodyClass | ideTheme | expected
+ ${'ide:index'} | ${'gl-dark'} | ${'monokai-light'} | ${false}
+ ${'ide:index'} | ${'ui-light'} | ${'monokai'} | ${true}
+ ${'groups:issues:index'} | ${'ui-light'} | ${'monokai'} | ${false}
+ ${'groups:issues:index'} | ${'gl-dark'} | ${'monokai-light'} | ${true}
+ `(
+ 'is $expected on $page with $bodyClass body class and $ideTheme IDE theme',
+ async ({ page, bodyClass, ideTheme, expected }) => {
+ document.body.outerHTML = `<body class="${bodyClass}" data-page="${page}"></body>`;
+ window.gon = {
+ user_color_scheme: ideTheme,
+ };
+
+ expect(darkModeEnabled()).toBe(expected);
+ },
+ );
+ });
});
diff --git a/spec/frontend/lib/utils/common_utils_spec.js b/spec/frontend/lib/utils/common_utils_spec.js
index 18be88a0b8b..e03d1ef7295 100644
--- a/spec/frontend/lib/utils/common_utils_spec.js
+++ b/spec/frontend/lib/utils/common_utils_spec.js
@@ -987,6 +987,16 @@ describe('common_utils', () => {
});
});
+ describe('roundToNearestHalf', () => {
+ it('Rounds decimals ot the nearest half', () => {
+ expect(commonUtils.roundToNearestHalf(3.141592)).toBe(3);
+ expect(commonUtils.roundToNearestHalf(3.41592)).toBe(3.5);
+ expect(commonUtils.roundToNearestHalf(1.27)).toBe(1.5);
+ expect(commonUtils.roundToNearestHalf(1.23)).toBe(1);
+ expect(commonUtils.roundToNearestHalf(1.778)).toBe(2);
+ });
+ });
+
describe('searchBy', () => {
const searchSpace = {
iid: 1,
diff --git a/spec/frontend/lib/utils/datetime_utility_spec.js b/spec/frontend/lib/utils/datetime_utility_spec.js
index 2df0cb00f9a..6180cd8e94d 100644
--- a/spec/frontend/lib/utils/datetime_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime_utility_spec.js
@@ -178,6 +178,30 @@ describe('timeIntervalInWords', () => {
});
});
+describe('humanizeTimeInterval', () => {
+ it.each`
+ intervalInSeconds | expected
+ ${0} | ${'0 seconds'}
+ ${1} | ${'1 second'}
+ ${1.48} | ${'1.5 seconds'}
+ ${2} | ${'2 seconds'}
+ ${60} | ${'1 minute'}
+ ${91} | ${'1.5 minutes'}
+ ${120} | ${'2 minutes'}
+ ${3600} | ${'1 hour'}
+ ${5401} | ${'1.5 hours'}
+ ${7200} | ${'2 hours'}
+ ${86400} | ${'1 day'}
+ ${129601} | ${'1.5 days'}
+ ${172800} | ${'2 days'}
+ `(
+ 'returns "$expected" when the time interval is $intervalInSeconds seconds',
+ ({ intervalInSeconds, expected }) => {
+ expect(datetimeUtility.humanizeTimeInterval(intervalInSeconds)).toBe(expected);
+ },
+ );
+});
+
describe('dateInWords', () => {
const date = new Date('07/01/2016');
@@ -966,62 +990,6 @@ describe('format24HourTimeStringFromInt', () => {
});
});
-describe('getOverlapDateInPeriods', () => {
- const start = new Date(2021, 0, 11);
- const end = new Date(2021, 0, 13);
-
- describe('when date periods overlap', () => {
- const givenPeriodLeft = new Date(2021, 0, 11);
- const givenPeriodRight = new Date(2021, 0, 14);
-
- it('returns an overlap object that contains the amount of days overlapping, the amount of hours overlapping, start date of overlap and end date of overlap', () => {
- expect(
- datetimeUtility.getOverlapDateInPeriods(
- { start, end },
- { start: givenPeriodLeft, end: givenPeriodRight },
- ),
- ).toEqual({
- daysOverlap: 2,
- hoursOverlap: 48,
- overlapStartDate: givenPeriodLeft.getTime(),
- overlapEndDate: end.getTime(),
- });
- });
- });
-
- describe('when date periods do not overlap', () => {
- const givenPeriodLeft = new Date(2021, 0, 9);
- const givenPeriodRight = new Date(2021, 0, 10);
-
- it('returns an overlap object that contains a 0 value for days overlapping', () => {
- expect(
- datetimeUtility.getOverlapDateInPeriods(
- { start, end },
- { start: givenPeriodLeft, end: givenPeriodRight },
- ),
- ).toEqual({ daysOverlap: 0 });
- });
- });
-
- describe('when date periods contain an invalid Date', () => {
- const startInvalid = new Date(NaN);
- const endInvalid = new Date(NaN);
- const error = __('Invalid period');
-
- it('throws an exception when the left period contains an invalid date', () => {
- expect(() =>
- datetimeUtility.getOverlapDateInPeriods({ start, end }, { start: startInvalid, end }),
- ).toThrow(error);
- });
-
- it('throws an exception when the right period contains an invalid date', () => {
- expect(() =>
- datetimeUtility.getOverlapDateInPeriods({ start, end }, { start, end: endInvalid }),
- ).toThrow(error);
- });
- });
-});
-
describe('isToday', () => {
const today = new Date();
it.each`
diff --git a/spec/frontend/lib/utils/forms_spec.js b/spec/frontend/lib/utils/forms_spec.js
index f65bd8ffe0c..123d36ac5d5 100644
--- a/spec/frontend/lib/utils/forms_spec.js
+++ b/spec/frontend/lib/utils/forms_spec.js
@@ -1,4 +1,9 @@
-import { serializeForm, serializeFormObject, isEmptyValue } from '~/lib/utils/forms';
+import {
+ serializeForm,
+ serializeFormObject,
+ isEmptyValue,
+ parseRailsFormFields,
+} from '~/lib/utils/forms';
describe('lib/utils/forms', () => {
const createDummyForm = (inputs) => {
@@ -135,4 +140,160 @@ describe('lib/utils/forms', () => {
});
});
});
+
+ describe('parseRailsFormFields', () => {
+ let mountEl;
+
+ beforeEach(() => {
+ mountEl = document.createElement('div');
+ mountEl.classList.add('js-foo-bar');
+ });
+
+ afterEach(() => {
+ mountEl = null;
+ });
+
+ it('parses fields generated by Rails and returns object with HTML attributes', () => {
+ mountEl.innerHTML = `
+ <input type="text" placeholder="Name" value="Administrator" name="user[name]" id="user_name" data-js-name="name">
+ <input type="text" placeholder="Email" value="foo@bar.com" name="user[contact_info][email]" id="user_contact_info_email" data-js-name="contactInfoEmail">
+ <input type="text" placeholder="Phone" value="(123) 456-7890" name="user[contact_info][phone]" id="user_contact_info_phone" data-js-name="contact_info_phone">
+ <input type="hidden" placeholder="Job title" value="" name="user[job_title]" id="user_job_title" data-js-name="jobTitle">
+ <textarea name="user[bio]" id="user_bio" data-js-name="bio">Foo bar</textarea>
+ <select name="user[timezone]" id="user_timezone" data-js-name="timezone">
+ <option value="utc+12">[UTC - 12] International Date Line West</option>
+ <option value="utc+11" selected>[UTC - 11] American Samoa</option>
+ </select>
+ <input type="checkbox" name="user[interests][]" id="user_interests_vue" value="Vue" checked data-js-name="interests">
+ <input type="checkbox" name="user[interests][]" id="user_interests_graphql" value="GraphQL" data-js-name="interests">
+ <input type="radio" name="user[access_level]" value="regular" id="user_access_level_regular" data-js-name="accessLevel">
+ <input type="radio" name="user[access_level]" value="admin" id="user_access_level_admin" checked data-js-name="access_level">
+ <input name="user[private_profile]" type="hidden" value="0">
+ <input type="radio" name="user[private_profile]" id="user_private_profile" value="1" checked data-js-name="privateProfile">
+ <input name="user[email_notifications]" type="hidden" value="0">
+ <input type="radio" name="user[email_notifications]" id="user_email_notifications" value="1" data-js-name="emailNotifications">
+ `;
+
+ expect(parseRailsFormFields(mountEl)).toEqual({
+ name: {
+ name: 'user[name]',
+ id: 'user_name',
+ value: 'Administrator',
+ placeholder: 'Name',
+ },
+ contactInfoEmail: {
+ name: 'user[contact_info][email]',
+ id: 'user_contact_info_email',
+ value: 'foo@bar.com',
+ placeholder: 'Email',
+ },
+ contactInfoPhone: {
+ name: 'user[contact_info][phone]',
+ id: 'user_contact_info_phone',
+ value: '(123) 456-7890',
+ placeholder: 'Phone',
+ },
+ jobTitle: {
+ name: 'user[job_title]',
+ id: 'user_job_title',
+ value: '',
+ placeholder: 'Job title',
+ },
+ bio: {
+ name: 'user[bio]',
+ id: 'user_bio',
+ value: 'Foo bar',
+ },
+ timezone: {
+ name: 'user[timezone]',
+ id: 'user_timezone',
+ value: 'utc+11',
+ },
+ interests: [
+ {
+ name: 'user[interests][]',
+ id: 'user_interests_vue',
+ value: 'Vue',
+ checked: true,
+ },
+ {
+ name: 'user[interests][]',
+ id: 'user_interests_graphql',
+ value: 'GraphQL',
+ checked: false,
+ },
+ ],
+ accessLevel: [
+ {
+ name: 'user[access_level]',
+ id: 'user_access_level_regular',
+ value: 'regular',
+ checked: false,
+ },
+ {
+ name: 'user[access_level]',
+ id: 'user_access_level_admin',
+ value: 'admin',
+ checked: true,
+ },
+ ],
+ privateProfile: [
+ {
+ name: 'user[private_profile]',
+ id: 'user_private_profile',
+ value: '1',
+ checked: true,
+ },
+ ],
+ emailNotifications: [
+ {
+ name: 'user[email_notifications]',
+ id: 'user_email_notifications',
+ value: '1',
+ checked: false,
+ },
+ ],
+ });
+ });
+
+ it('returns an empty object if there are no inputs', () => {
+ expect(parseRailsFormFields(mountEl)).toEqual({});
+ });
+
+ it('returns an empty object if inputs do not have `name` attributes', () => {
+ mountEl.innerHTML = `
+ <input type="text" placeholder="Name" value="Administrator" id="user_name">
+ <input type="text" placeholder="Email" value="foo@bar.com" id="user_contact_info_email">
+ <input type="text" placeholder="Phone" value="(123) 456-7890" id="user_contact_info_phone">
+ `;
+
+ expect(parseRailsFormFields(mountEl)).toEqual({});
+ });
+
+ it('does not include field if `data-js-name` attribute is missing', () => {
+ mountEl.innerHTML = `
+ <input type="text" placeholder="Name" value="Administrator" name="user[name]" id="user_name" data-js-name="name">
+ <input type="text" placeholder="Email" value="foo@bar.com" name="user[email]" id="email">
+ `;
+
+ expect(parseRailsFormFields(mountEl)).toEqual({
+ name: {
+ name: 'user[name]',
+ id: 'user_name',
+ value: 'Administrator',
+ placeholder: 'Name',
+ },
+ });
+ });
+
+ it('throws error if `mountEl` argument is not passed', () => {
+ expect(() => parseRailsFormFields()).toThrow(new TypeError('`mountEl` argument is required'));
+ });
+
+ it('throws error if `mountEl` argument is `null`', () => {
+ expect(() => parseRailsFormFields(null)).toThrow(
+ new TypeError('`mountEl` argument is required'),
+ );
+ });
+ });
});
diff --git a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
index f86237dc160..f1471f625f8 100644
--- a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
@@ -42,6 +42,7 @@ describe('AccessRequestActionButtons', () => {
memberId: member.id,
title: 'Deny access',
isAccessRequest: true,
+ isInvite: false,
icon: 'close',
});
});
diff --git a/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js b/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
index f77d41a642e..936715e7723 100644
--- a/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/approve_access_request_button_spec.js
@@ -3,6 +3,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ApproveAccessRequestButton from '~/members/components/action_buttons/approve_access_request_button.vue';
+import { MEMBER_TYPES } from '~/members/constants';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
@@ -14,9 +15,14 @@ describe('ApproveAccessRequestButton', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
- state: {
- memberPath: '/groups/foo-bar/-/group_members/:id',
- ...state,
+ modules: {
+ [MEMBER_TYPES.accessRequest]: {
+ namespaced: true,
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ },
},
});
};
@@ -25,6 +31,9 @@ describe('ApproveAccessRequestButton', () => {
wrapper = shallowMount(ApproveAccessRequestButton, {
localVue,
store: createStore(state),
+ provide: {
+ namespace: MEMBER_TYPES.accessRequest,
+ },
propsData: {
memberId: 1,
...propsData,
diff --git a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
index fe63f9bfaa7..e7a99a96da6 100644
--- a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
@@ -39,9 +39,11 @@ describe('InviteActionButtons', () => {
it('sets props correctly', () => {
expect(findRemoveMemberButton().props()).toEqual({
memberId: member.id,
+ memberType: null,
message: `Are you sure you want to revoke the invitation for ${member.invite.email} to join "${member.source.fullName}"`,
title: 'Revoke invite',
isAccessRequest: false,
+ isInvite: true,
icon: 'remove',
});
});
diff --git a/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js b/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
index f6e342898cb..f91aef131a1 100644
--- a/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_group_link_button_spec.js
@@ -3,6 +3,7 @@ import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import RemoveGroupLinkButton from '~/members/components/action_buttons/remove_group_link_button.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import { group } from '../../mock_data';
const localVue = createLocalVue();
@@ -17,7 +18,12 @@ describe('RemoveGroupLinkButton', () => {
const createStore = () => {
return new Vuex.Store({
- actions,
+ modules: {
+ [MEMBER_TYPES.group]: {
+ namespaced: true,
+ actions,
+ },
+ },
});
};
@@ -25,6 +31,9 @@ describe('RemoveGroupLinkButton', () => {
wrapper = mount(RemoveGroupLinkButton, {
localVue,
store: createStore(),
+ provide: {
+ namespace: MEMBER_TYPES.group,
+ },
propsData: {
groupLink: group,
},
diff --git a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
index 437b3e705a4..4ff12f7fa97 100644
--- a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
@@ -2,6 +2,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import RemoveMemberButton from '~/members/components/action_buttons/remove_member_button.vue';
+import { MEMBER_TYPES } from '~/members/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -11,9 +12,14 @@ describe('RemoveMemberButton', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
- state: {
- memberPath: '/groups/foo-bar/-/group_members/:id',
- ...state,
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ },
},
});
};
@@ -22,11 +28,17 @@ describe('RemoveMemberButton', () => {
wrapper = shallowMount(RemoveMemberButton, {
localVue,
store: createStore(state),
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
propsData: {
memberId: 1,
+ memberType: 'GroupMember',
message: 'Are you sure you want to remove John Smith?',
title: 'Remove member',
isAccessRequest: true,
+ isInvite: true,
+ oncallSchedules: { name: 'user', schedules: [] },
...propsData,
},
directives: {
@@ -44,8 +56,11 @@ describe('RemoveMemberButton', () => {
expect(wrapper.attributes()).toMatchObject({
'data-member-path': '/groups/foo-bar/-/group_members/1',
+ 'data-member-type': 'GroupMember',
'data-message': 'Are you sure you want to remove John Smith?',
'data-is-access-request': 'true',
+ 'data-is-invite': 'true',
+ 'data-oncall-schedules': '{"name":"user","schedules":[]}',
'aria-label': 'Remove member',
title: 'Remove member',
icon: 'remove',
diff --git a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
index 49b6979f954..547e067450c 100644
--- a/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/resend_invite_button_spec.js
@@ -3,6 +3,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ResendInviteButton from '~/members/components/action_buttons/resend_invite_button.vue';
+import { MEMBER_TYPES } from '~/members/constants';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
@@ -14,9 +15,14 @@ describe('ResendInviteButton', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
- state: {
- memberPath: '/groups/foo-bar/-/group_members/:id',
- ...state,
+ modules: {
+ [MEMBER_TYPES.invite]: {
+ namespaced: true,
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ },
},
});
};
@@ -25,6 +31,9 @@ describe('ResendInviteButton', () => {
wrapper = shallowMount(ResendInviteButton, {
localVue,
store: createStore(state),
+ provide: {
+ namespace: MEMBER_TYPES.invite,
+ },
propsData: {
memberId: 1,
...propsData,
diff --git a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
index 1d7ea5b3109..0aa3780f030 100644
--- a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
@@ -39,10 +39,16 @@ describe('UserActionButtons', () => {
it('sets props correctly', () => {
expect(findRemoveMemberButton().props()).toEqual({
memberId: member.id,
- message: `Are you sure you want to remove ${member.user.name} from "${member.source.fullName}"`,
+ memberType: 'GroupMember',
+ message: `Are you sure you want to remove ${member.user.name} from "${member.source.fullName}"?`,
title: 'Remove member',
isAccessRequest: false,
+ isInvite: false,
icon: 'remove',
+ oncallSchedules: {
+ name: member.user.name,
+ schedules: member.user.oncallSchedules,
+ },
});
});
@@ -56,7 +62,7 @@ describe('UserActionButtons', () => {
});
expect(findRemoveMemberButton().props('message')).toBe(
- `Are you sure you want to remove this orphaned member from "${orphanedMember.source.fullName}"`,
+ `Are you sure you want to remove this orphaned member from "${orphanedMember.source.fullName}"?`,
);
});
});
@@ -86,4 +92,40 @@ describe('UserActionButtons', () => {
expect(findRemoveMemberButton().exists()).toBe(false);
});
});
+
+ describe('when group member', () => {
+ beforeEach(() => {
+ createComponent({
+ member: {
+ ...member,
+ type: 'GroupMember',
+ },
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('sets member type correctly', () => {
+ expect(findRemoveMemberButton().props().memberType).toBe('GroupMember');
+ });
+ });
+
+ describe('when project member', () => {
+ beforeEach(() => {
+ createComponent({
+ member: {
+ ...member,
+ type: 'ProjectMember',
+ },
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('sets member type correctly', () => {
+ expect(findRemoveMemberButton().props().memberType).toBe('ProjectMember');
+ });
+ });
});
diff --git a/spec/frontend/members/components/app_spec.js b/spec/frontend/members/components/app_spec.js
index a1329c3ee9f..05933e36b52 100644
--- a/spec/frontend/members/components/app_spec.js
+++ b/spec/frontend/members/components/app_spec.js
@@ -5,6 +5,7 @@ import Vuex from 'vuex';
import * as commonUtils from '~/lib/utils/common_utils';
import MembersApp from '~/members/components/app.vue';
import FilterSortContainer from '~/members/components/filter_sort/filter_sort_container.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import { RECEIVE_MEMBER_ROLE_ERROR, HIDE_ERROR } from '~/members/store/mutation_types';
import mutations from '~/members/store/mutations';
@@ -17,16 +18,24 @@ describe('MembersApp', () => {
const createComponent = (state = {}, options = {}) => {
store = new Vuex.Store({
- state: {
- showError: true,
- errorMessage: 'Something went wrong, please try again.',
- ...state,
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ showError: true,
+ errorMessage: 'Something went wrong, please try again.',
+ ...state,
+ },
+ mutations,
+ },
},
- mutations,
});
wrapper = shallowMount(MembersApp, {
localVue,
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
store,
...options,
});
@@ -48,7 +57,9 @@ describe('MembersApp', () => {
it('renders and scrolls to error alert', async () => {
createComponent({ showError: false, errorMessage: '' });
- store.commit(RECEIVE_MEMBER_ROLE_ERROR, { error: new Error('Network Error') });
+ store.commit(`${MEMBER_TYPES.user}/${RECEIVE_MEMBER_ROLE_ERROR}`, {
+ error: new Error('Network Error'),
+ });
await nextTick();
@@ -66,7 +77,7 @@ describe('MembersApp', () => {
it('does not render and scroll to error alert', async () => {
createComponent();
- store.commit(HIDE_ERROR);
+ store.commit(`${MEMBER_TYPES.user}/${HIDE_ERROR}`);
await nextTick();
diff --git a/spec/frontend/members/components/avatars/user_avatar_spec.js b/spec/frontend/members/components/avatars/user_avatar_spec.js
index 3f4d9155c5d..5cf3a4cdc13 100644
--- a/spec/frontend/members/components/avatars/user_avatar_spec.js
+++ b/spec/frontend/members/components/avatars/user_avatar_spec.js
@@ -1,31 +1,25 @@
import { GlAvatarLink, GlBadge } from '@gitlab/ui';
import { within } from '@testing-library/dom';
import { mount, createWrapper } from '@vue/test-utils';
-import Vue from 'vue';
-import Vuex from 'vuex';
import UserAvatar from '~/members/components/avatars/user_avatar.vue';
import { member as memberMock, member2faEnabled, orphanedMember } from '../../mock_data';
-Vue.use(Vuex);
-
describe('UserAvatar', () => {
let wrapper;
const { user } = memberMock;
- const createComponent = (propsData = {}, state = {}) => {
+ const createComponent = (propsData = {}, provide = {}) => {
wrapper = mount(UserAvatar, {
propsData: {
member: memberMock,
isCurrentUser: false,
...propsData,
},
- store: new Vuex.Store({
- state: {
- canManageMembers: true,
- ...state,
- },
- }),
+ provide: {
+ canManageMembers: true,
+ ...provide,
+ },
});
};
diff --git a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
index 0d9f9acbbeb..16ac52737bc 100644
--- a/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
+++ b/spec/frontend/members/components/filter_sort/filter_sort_container_spec.js
@@ -3,6 +3,7 @@ import Vuex from 'vuex';
import FilterSortContainer from '~/members/components/filter_sort/filter_sort_container.vue';
import MembersFilteredSearchBar from '~/members/components/filter_sort/members_filtered_search_bar.vue';
import SortDropdown from '~/members/components/filter_sort/sort_dropdown.vue';
+import { MEMBER_TYPES } from '~/members/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -12,22 +13,30 @@ describe('FilterSortContainer', () => {
const createComponent = (state) => {
const store = new Vuex.Store({
- state: {
- filteredSearchBar: {
- show: true,
- tokens: ['two_factor'],
- searchParam: 'search',
- placeholder: 'Filter members',
- recentSearchesStorageKey: 'group_members',
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ filteredSearchBar: {
+ show: true,
+ tokens: ['two_factor'],
+ searchParam: 'search',
+ placeholder: 'Filter members',
+ recentSearchesStorageKey: 'group_members',
+ },
+ tableSortableFields: ['account'],
+ ...state,
+ },
},
- tableSortableFields: ['account'],
- ...state,
},
});
wrapper = shallowMount(FilterSortContainer, {
localVue,
store,
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
});
};
diff --git a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
index 14b437a8c4e..af5434f7068 100644
--- a/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
+++ b/spec/frontend/members/components/filter_sort/members_filtered_search_bar_spec.js
@@ -2,6 +2,7 @@ import { GlFilteredSearchToken } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import MembersFilteredSearchBar from '~/members/components/filter_sort/members_filtered_search_bar.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import FilteredSearchBar from '~/vue_shared/components/filtered_search_bar/filtered_search_bar_root.vue';
const localVue = createLocalVue();
@@ -10,24 +11,33 @@ localVue.use(Vuex);
describe('MembersFilteredSearchBar', () => {
let wrapper;
- const createComponent = (state) => {
+ const createComponent = ({ state = {}, provide = {} } = {}) => {
const store = new Vuex.Store({
- state: {
- sourceId: 1,
- filteredSearchBar: {
- show: true,
- tokens: ['two_factor'],
- searchParam: 'search',
- placeholder: 'Filter members',
- recentSearchesStorageKey: 'group_members',
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ filteredSearchBar: {
+ show: true,
+ tokens: ['two_factor'],
+ searchParam: 'search',
+ placeholder: 'Filter members',
+ recentSearchesStorageKey: 'group_members',
+ },
+ ...state,
+ },
},
- canManageMembers: true,
- ...state,
},
});
wrapper = shallowMount(MembersFilteredSearchBar, {
localVue,
+ provide: {
+ sourceId: 1,
+ canManageMembers: true,
+ namespace: MEMBER_TYPES.user,
+ ...provide,
+ },
store,
});
};
@@ -68,14 +78,18 @@ describe('MembersFilteredSearchBar', () => {
describe('when `canManageMembers` is false', () => {
it('excludes 2FA token', () => {
createComponent({
- filteredSearchBar: {
- show: true,
- tokens: ['two_factor', 'with_inherited_permissions'],
- searchParam: 'search',
- placeholder: 'Filter members',
- recentSearchesStorageKey: 'group_members',
+ state: {
+ filteredSearchBar: {
+ show: true,
+ tokens: ['two_factor', 'with_inherited_permissions'],
+ searchParam: 'search',
+ placeholder: 'Filter members',
+ recentSearchesStorageKey: 'group_members',
+ },
+ },
+ provide: {
+ canManageMembers: false,
},
- canManageMembers: false,
});
expect(findFilteredSearchBar().props('tokens')).toEqual([
diff --git a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
index 357fad741e9..4b335755980 100644
--- a/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
+++ b/spec/frontend/members/components/filter_sort/sort_dropdown_spec.js
@@ -3,6 +3,7 @@ import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import * as urlUtilities from '~/lib/utils/url_utility';
import SortDropdown from '~/members/components/filter_sort/sort_dropdown.vue';
+import { MEMBER_TYPES } from '~/members/constants';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -14,22 +15,30 @@ describe('SortDropdown', () => {
const createComponent = (state) => {
const store = new Vuex.Store({
- state: {
- sourceId: 1,
- tableSortableFields: ['account', 'granted', 'expires', 'maxRole', 'lastSignIn'],
- filteredSearchBar: {
- show: true,
- tokens: ['two_factor'],
- searchParam: 'search',
- placeholder: 'Filter members',
- recentSearchesStorageKey: 'group_members',
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ tableSortableFields: ['account', 'granted', 'expires', 'maxRole', 'lastSignIn'],
+ filteredSearchBar: {
+ show: true,
+ tokens: ['two_factor'],
+ searchParam: 'search',
+ placeholder: 'Filter members',
+ recentSearchesStorageKey: 'group_members',
+ },
+ ...state,
+ },
},
- ...state,
},
});
wrapper = mount(SortDropdown, {
localVue,
+ provide: {
+ sourceId: 1,
+ namespace: MEMBER_TYPES.user,
+ },
store,
});
};
diff --git a/spec/frontend/members/components/modals/leave_modal_spec.js b/spec/frontend/members/components/modals/leave_modal_spec.js
index 2d52911572f..ea9eb7bf923 100644
--- a/spec/frontend/members/components/modals/leave_modal_spec.js
+++ b/spec/frontend/members/components/modals/leave_modal_spec.js
@@ -1,10 +1,12 @@
import { GlModal, GlForm } from '@gitlab/ui';
import { within } from '@testing-library/dom';
import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
+import { cloneDeep } from 'lodash';
import { nextTick } from 'vue';
import Vuex from 'vuex';
import LeaveModal from '~/members/components/modals/leave_modal.vue';
-import { LEAVE_MODAL_ID } from '~/members/constants';
+import { LEAVE_MODAL_ID, MEMBER_TYPES } from '~/members/constants';
+import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
import { member } from '../../mock_data';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
@@ -17,9 +19,14 @@ describe('LeaveModal', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
- state: {
- memberPath: '/groups/foo-bar/-/group_members/:id',
- ...state,
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ },
},
});
};
@@ -28,6 +35,9 @@ describe('LeaveModal', () => {
wrapper = mount(LeaveModal, {
localVue,
store: createStore(state),
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
propsData: {
member,
...propsData,
@@ -39,9 +49,9 @@ describe('LeaveModal', () => {
});
};
- const findModal = () => wrapper.find(GlModal);
-
- const findForm = () => findModal().find(GlForm);
+ const findModal = () => wrapper.findComponent(GlModal);
+ const findForm = () => findModal().findComponent(GlForm);
+ const findOncallSchedulesList = () => findModal().findComponent(OncallSchedulesList);
const getByText = (text, options) =>
createWrapper(within(findModal().element).getByText(text, options));
@@ -79,6 +89,24 @@ describe('LeaveModal', () => {
);
});
+ describe('On-call schedules list', () => {
+ it("displays oncall schedules list when member's user is part of on-call schedules ", () => {
+ const schedulesList = findOncallSchedulesList();
+ expect(schedulesList.exists()).toBe(true);
+ expect(schedulesList.props()).toMatchObject({
+ isCurrentUser: true,
+ schedules: member.user.oncallSchedules,
+ });
+ });
+
+ it("does NOT display oncall schedules list when member's user is NOT a part of on-call schedules ", () => {
+ const memberWithoutOncallSchedules = cloneDeep(member);
+ delete (memberWithoutOncallSchedules, 'user.oncallSchedules');
+ createComponent({ member: memberWithoutOncallSchedules });
+ expect(findOncallSchedulesList().exists()).toBe(false);
+ });
+ });
+
it('submits the form when "Leave" button is clicked', () => {
const submitSpy = jest.spyOn(findForm().element, 'submit');
diff --git a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
index 62df912c1a2..01279581c55 100644
--- a/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_group_link_modal_spec.js
@@ -4,7 +4,7 @@ import { mount, createLocalVue, createWrapper } from '@vue/test-utils';
import { nextTick } from 'vue';
import Vuex from 'vuex';
import RemoveGroupLinkModal from '~/members/components/modals/remove_group_link_modal.vue';
-import { REMOVE_GROUP_LINK_MODAL_ID } from '~/members/constants';
+import { REMOVE_GROUP_LINK_MODAL_ID, MEMBER_TYPES } from '~/members/constants';
import { group } from '../../mock_data';
jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
@@ -21,13 +21,18 @@ describe('RemoveGroupLinkModal', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
- state: {
- memberPath: '/groups/foo-bar/-/group_links/:id',
- groupLinkToRemove: group,
- removeGroupLinkModalVisible: true,
- ...state,
+ modules: {
+ [MEMBER_TYPES.group]: {
+ namespaced: true,
+ state: {
+ memberPath: '/groups/foo-bar/-/group_links/:id',
+ groupLinkToRemove: group,
+ removeGroupLinkModalVisible: true,
+ ...state,
+ },
+ actions,
+ },
},
- actions,
});
};
@@ -35,6 +40,9 @@ describe('RemoveGroupLinkModal', () => {
wrapper = mount(RemoveGroupLinkModal, {
localVue,
store: createStore(state),
+ provide: {
+ namespace: MEMBER_TYPES.group,
+ },
attrs: {
static: true,
},
diff --git a/spec/frontend/members/components/table/expiration_datepicker_spec.js b/spec/frontend/members/components/table/expiration_datepicker_spec.js
index d26172b4ed1..3c4a9ba37ff 100644
--- a/spec/frontend/members/components/table/expiration_datepicker_spec.js
+++ b/spec/frontend/members/components/table/expiration_datepicker_spec.js
@@ -5,6 +5,7 @@ import Vuex from 'vuex';
import { useFakeDate } from 'helpers/fake_date';
import waitForPromises from 'helpers/wait_for_promises';
import ExpirationDatepicker from '~/members/components/table/expiration_datepicker.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import { member } from '../../mock_data';
const localVue = createLocalVue();
@@ -31,7 +32,11 @@ describe('ExpirationDatepicker', () => {
),
};
- return new Vuex.Store({ actions });
+ return new Vuex.Store({
+ modules: {
+ [MEMBER_TYPES.user]: { namespaced: true, actions },
+ },
+ });
};
const createComponent = (propsData = {}) => {
@@ -41,6 +46,9 @@ describe('ExpirationDatepicker', () => {
permissions: { canUpdate: true },
...propsData,
},
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
localVue,
store: createStore(),
mocks: {
diff --git a/spec/frontend/members/components/table/members_table_cell_spec.js b/spec/frontend/members/components/table/members_table_cell_spec.js
index b7dcd2a9fae..5375ee11736 100644
--- a/spec/frontend/members/components/table/members_table_cell_spec.js
+++ b/spec/frontend/members/components/table/members_table_cell_spec.js
@@ -42,21 +42,21 @@ describe('MembersTableCell', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
- state: {
- sourceId: 1,
- currentUserId: 1,
- ...state,
- },
+ state,
});
};
let wrapper;
- const createComponent = (propsData, state = {}) => {
+ const createComponent = (propsData, state) => {
wrapper = mount(MembersTableCell, {
localVue,
propsData,
store: createStore(state),
+ provide: {
+ sourceId: 1,
+ currentUserId: 1,
+ },
scopedSlots: {
default: `
<wrapped-component
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index cf5811e72e7..5cf1f40a8f4 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -14,6 +14,7 @@ import MemberAvatar from '~/members/components/table/member_avatar.vue';
import MemberSource from '~/members/components/table/member_source.vue';
import MembersTable from '~/members/components/table/members_table.vue';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import * as initUserPopovers from '~/user_popovers';
import { member as memberMock, directMember, invite, accessRequest } from '../../mock_data';
@@ -25,24 +26,33 @@ describe('MembersTable', () => {
const createStore = (state = {}) => {
return new Vuex.Store({
- state: {
- members: [],
- tableFields: [],
- tableAttrs: {
- table: { 'data-qa-selector': 'members_list' },
- tr: { 'data-qa-selector': 'member_row' },
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ members: [],
+ tableFields: [],
+ tableAttrs: {
+ table: { 'data-qa-selector': 'members_list' },
+ tr: { 'data-qa-selector': 'member_row' },
+ },
+ ...state,
+ },
},
- sourceId: 1,
- currentUserId: 1,
- ...state,
},
});
};
- const createComponent = (state) => {
+ const createComponent = (state, provide = {}) => {
wrapper = mount(MembersTable, {
localVue,
store: createStore(state),
+ provide: {
+ sourceId: 1,
+ currentUserId: 1,
+ namespace: MEMBER_TYPES.user,
+ ...provide,
+ },
stubs: [
'member-avatar',
'member-source',
@@ -119,7 +129,7 @@ describe('MembersTable', () => {
describe('when user is not logged in', () => {
it('does not render the "Actions" field', () => {
- createComponent({ currentUserId: null, tableFields: ['actions'] });
+ createComponent({ tableFields: ['actions'] }, { currentUserId: null });
expect(within(wrapper.element).queryByTestId('col-actions')).toBe(null);
});
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/role_dropdown_spec.js
index aa280599061..c8b6bead450 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/role_dropdown_spec.js
@@ -7,6 +7,7 @@ import Vuex from 'vuex';
import waitForPromises from 'helpers/wait_for_promises';
import { BV_DROPDOWN_SHOW } from '~/lib/utils/constants';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import { member } from '../../mock_data';
const localVue = createLocalVue();
@@ -24,11 +25,18 @@ describe('RoleDropdown', () => {
updateMemberRole: jest.fn(() => Promise.resolve()),
};
- return new Vuex.Store({ actions });
+ return new Vuex.Store({
+ modules: {
+ [MEMBER_TYPES.user]: { namespaced: true, actions },
+ },
+ });
};
const createComponent = (propsData = {}) => {
wrapper = mount(RoleDropdown, {
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
propsData: {
member,
permissions: {},
diff --git a/spec/frontend/members/index_spec.js b/spec/frontend/members/index_spec.js
index dd3b9ddd912..8b645d9b059 100644
--- a/spec/frontend/members/index_spec.js
+++ b/spec/frontend/members/index_spec.js
@@ -1,5 +1,6 @@
import { createWrapper } from '@vue/test-utils';
import MembersApp from '~/members/components/app.vue';
+import { MEMBER_TYPES } from '~/members/constants';
import { initMembersApp } from '~/members/index';
import { membersJsonString, members } from './mock_data';
@@ -10,6 +11,7 @@ describe('initMembersApp', () => {
const setup = () => {
vm = initMembersApp(el, {
+ namespace: MEMBER_TYPES.user,
tableFields: ['account'],
tableAttrs: { table: { 'data-qa-selector': 'members_list' } },
tableSortableFields: ['account'],
@@ -42,72 +44,49 @@ describe('initMembersApp', () => {
expect(wrapper.find(MembersApp).exists()).toBe(true);
});
- it('sets `currentUserId` in Vuex store', () => {
- setup();
-
- expect(vm.$store.state.currentUserId).toBe(123);
- });
-
- describe('when `gon.current_user_id` is not set (user is not logged in)', () => {
- it('sets `currentUserId` as `null` in Vuex store', () => {
- window.gon = {};
- setup();
-
- expect(vm.$store.state.currentUserId).toBeNull();
- });
- });
-
- it('parses and sets `data-source-id` as `sourceId` in Vuex store', () => {
- setup();
-
- expect(vm.$store.state.sourceId).toBe(234);
- });
-
- it('parses and sets `data-can-manage-members` as `canManageMembers` in Vuex store', () => {
- setup();
-
- expect(vm.$store.state.canManageMembers).toBe(true);
- });
-
it('parses and sets `members` in Vuex store', () => {
setup();
- expect(vm.$store.state.members).toEqual(members);
+ expect(vm.$store.state[MEMBER_TYPES.user].members).toEqual(members);
});
it('sets `tableFields` in Vuex store', () => {
setup();
- expect(vm.$store.state.tableFields).toEqual(['account']);
+ expect(vm.$store.state[MEMBER_TYPES.user].tableFields).toEqual(['account']);
});
it('sets `tableAttrs` in Vuex store', () => {
setup();
- expect(vm.$store.state.tableAttrs).toEqual({ table: { 'data-qa-selector': 'members_list' } });
+ expect(vm.$store.state[MEMBER_TYPES.user].tableAttrs).toEqual({
+ table: { 'data-qa-selector': 'members_list' },
+ });
});
it('sets `tableSortableFields` in Vuex store', () => {
setup();
- expect(vm.$store.state.tableSortableFields).toEqual(['account']);
+ expect(vm.$store.state[MEMBER_TYPES.user].tableSortableFields).toEqual(['account']);
});
it('sets `requestFormatter` in Vuex store', () => {
setup();
- expect(vm.$store.state.requestFormatter()).toEqual({});
+ expect(vm.$store.state[MEMBER_TYPES.user].requestFormatter()).toEqual({});
});
it('sets `filteredSearchBar` in Vuex store', () => {
setup();
- expect(vm.$store.state.filteredSearchBar).toEqual({ show: false });
+ expect(vm.$store.state[MEMBER_TYPES.user].filteredSearchBar).toEqual({ show: false });
});
it('sets `memberPath` in Vuex store', () => {
setup();
- expect(vm.$store.state.memberPath).toBe('/groups/foo-bar/-/group_members/:id');
+ expect(vm.$store.state[MEMBER_TYPES.user].memberPath).toBe(
+ '/groups/foo-bar/-/group_members/:id',
+ );
});
});
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index 6a73b2fcf8c..a47b7ab2118 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -11,6 +11,7 @@ export const member = {
fullName: 'Foo Bar',
webUrl: 'https://gitlab.com/groups/foo-bar',
},
+ type: 'GroupMember',
user: {
id: 123,
name: 'Administrator',
@@ -19,6 +20,7 @@ export const member = {
avatarUrl: 'https://www.gravatar.com/avatar/4816142ef496f956a277bedf1a40607b?s=80&d=identicon',
blocked: false,
twoFactorEnabled: false,
+ oncallSchedules: [{ name: 'schedule 1' }],
},
id: 238,
createdAt: '2020-07-17T16:22:46.923Z',
diff --git a/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
new file mode 100644
index 00000000000..eaa3b1c5d53
--- /dev/null
+++ b/spec/frontend/merge_conflicts/components/merge_conflict_resolver_app_spec.js
@@ -0,0 +1,131 @@
+import { GlSprintf } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import InlineConflictLines from '~/merge_conflicts/components/inline_conflict_lines.vue';
+import ParallelConflictLines from '~/merge_conflicts/components/parallel_conflict_lines.vue';
+import component from '~/merge_conflicts/merge_conflict_resolver_app.vue';
+import { createStore } from '~/merge_conflicts/store';
+import { decorateFiles } from '~/merge_conflicts/utils';
+import { conflictsMock } from '../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Merge Conflict Resolver App', () => {
+ let wrapper;
+ let store;
+
+ const decoratedMockFiles = decorateFiles(conflictsMock.files);
+
+ const mountComponent = () => {
+ wrapper = shallowMount(component, {
+ store,
+ stubs: { GlSprintf },
+ provide() {
+ return {
+ mergeRequestPath: 'foo',
+ sourceBranchPath: 'foo',
+ resolveConflictsPath: 'bar',
+ };
+ },
+ });
+ };
+
+ beforeEach(() => {
+ store = createStore();
+ store.commit('SET_LOADING_STATE', false);
+ store.dispatch('setConflictsData', conflictsMock);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findConflictsCount = () => wrapper.find('[data-testid="conflicts-count"]');
+ const findFiles = () => wrapper.findAll('[data-testid="files"]');
+ const findFileHeader = (w = wrapper) => w.find('[data-testid="file-name"]');
+ const findFileInteractiveButton = (w = wrapper) => w.find('[data-testid="interactive-button"]');
+ const findFileInlineButton = (w = wrapper) => w.find('[data-testid="inline-button"]');
+ const findSideBySideButton = () => wrapper.find('[data-testid="side-by-side"]');
+ const findInlineConflictLines = (w = wrapper) => w.find(InlineConflictLines);
+ const findParallelConflictLines = (w = wrapper) => w.find(ParallelConflictLines);
+ const findCommitMessageTextarea = () => wrapper.find('[data-testid="commit-message"]');
+
+ it('shows the amount of conflicts', () => {
+ mountComponent();
+
+ const title = findConflictsCount();
+
+ expect(title.exists()).toBe(true);
+ expect(title.text().trim()).toBe('Showing 3 conflicts between test-conflicts and master');
+ });
+
+ describe('files', () => {
+ it('shows one file area for each file', () => {
+ mountComponent();
+
+ expect(findFiles()).toHaveLength(conflictsMock.files.length);
+ });
+
+ it('has the appropriate file header', () => {
+ mountComponent();
+
+ const fileHeader = findFileHeader(findFiles().at(0));
+
+ expect(fileHeader.text()).toBe(decoratedMockFiles[0].filePath);
+ });
+
+ describe('editing', () => {
+ it('interactive mode is the default', () => {
+ mountComponent();
+
+ const interactiveButton = findFileInteractiveButton(findFiles().at(0));
+ const inlineButton = findFileInlineButton(findFiles().at(0));
+
+ expect(interactiveButton.classes('active')).toBe(true);
+ expect(inlineButton.classes('active')).toBe(false);
+ });
+
+ it('clicking inline set inline as default', async () => {
+ mountComponent();
+
+ const inlineButton = findFileInlineButton(findFiles().at(0));
+ expect(inlineButton.classes('active')).toBe(false);
+
+ inlineButton.trigger('click');
+ await wrapper.vm.$nextTick();
+
+ expect(inlineButton.classes('active')).toBe(true);
+ });
+
+ it('inline mode shows a inline-conflict-lines', () => {
+ mountComponent();
+
+ const inlineConflictLinesComponent = findInlineConflictLines(findFiles().at(0));
+
+ expect(inlineConflictLinesComponent.exists()).toBe(true);
+ expect(inlineConflictLinesComponent.props('file')).toEqual(decoratedMockFiles[0]);
+ });
+
+ it('parallel mode shows a parallel-conflict-lines', async () => {
+ mountComponent();
+
+ findSideBySideButton().trigger('click');
+ await wrapper.vm.$nextTick();
+
+ const parallelConflictLinesComponent = findParallelConflictLines(findFiles().at(0));
+
+ expect(parallelConflictLinesComponent.exists()).toBe(true);
+ expect(parallelConflictLinesComponent.props('file')).toEqual(decoratedMockFiles[0]);
+ });
+ });
+ });
+
+ describe('submit form', () => {
+ it('contains a commit message textarea', () => {
+ mountComponent();
+
+ expect(findCommitMessageTextarea().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/merge_conflicts/mock_data.js b/spec/frontend/merge_conflicts/mock_data.js
new file mode 100644
index 00000000000..8948f2a3c1e
--- /dev/null
+++ b/spec/frontend/merge_conflicts/mock_data.js
@@ -0,0 +1,340 @@
+export const conflictsMock = {
+ target_branch: 'master',
+ source_branch: 'test-conflicts',
+ commit_sha: '6dbf385a3c7bf01e09b5d2d9e5d72f8fb8c590a3',
+ commit_message:
+ "Merge branch 'master' into 'test-conflicts'\n\n# Conflicts:\n# .gitlab-ci.yml\n# README.md",
+ files: [
+ {
+ old_path: '.gitlab-ci.yml',
+ new_path: '.gitlab-ci.yml',
+ blob_icon: 'doc-text',
+ blob_path:
+ '/gitlab-org/gitlab-test/-/blob/6dbf385a3c7bf01e09b5d2d9e5d72f8fb8c590a3/.gitlab-ci.yml',
+ sections: [
+ {
+ conflict: false,
+ lines: [
+ {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ text: '@@ -7,10 +7,11 @@ upload:',
+ meta_data: { old_pos: 7, new_pos: 7 },
+ rich_text: '@@ -7,10 +7,11 @@ upload:',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '587d266bb27a4dc3022bbed44dfa19849df3044c_7_7',
+ type: null,
+ old_line: 7,
+ new_line: 7,
+ text: ' stage: upload',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC7" class="line" lang="yaml"\u003e \u003cspan class="na"\u003estage\u003c/span\u003e\u003cspan class="pi"\u003e:\u003c/span\u003e \u003cspan class="s"\u003eupload\u003c/span\u003e\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '587d266bb27a4dc3022bbed44dfa19849df3044c_8_8',
+ type: null,
+ old_line: 8,
+ new_line: 8,
+ text: ' script:',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC8" class="line" lang="yaml"\u003e \u003cspan class="na"\u003escript\u003c/span\u003e\u003cspan class="pi"\u003e:\u003c/span\u003e\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '587d266bb27a4dc3022bbed44dfa19849df3044c_9_9',
+ type: null,
+ old_line: 9,
+ new_line: 9,
+ text:
+ // eslint-disable-next-line no-template-curly-in-string
+ ' - \'curl --header "JOB-TOKEN: $CI_JOB_TOKEN" --upload-file README.md ${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/my_package/0.0.1/file.txt\'',
+ meta_data: null,
+ rich_text:
+ // eslint-disable-next-line no-template-curly-in-string
+ '\u003cspan id="LC9" class="line" lang="yaml"\u003e \u003cspan class="pi"\u003e-\u003c/span\u003e \u003cspan class="s1"\u003e\'\u003c/span\u003e\u003cspan class="s"\u003ecurl\u003c/span\u003e\u003cspan class="nv"\u003e \u003c/span\u003e\u003cspan class="s"\u003e--header\u003c/span\u003e\u003cspan class="nv"\u003e \u003c/span\u003e\u003cspan class="s"\u003e"JOB-TOKEN:\u003c/span\u003e\u003cspan class="nv"\u003e \u003c/span\u003e\u003cspan class="s"\u003e$CI_JOB_TOKEN"\u003c/span\u003e\u003cspan class="nv"\u003e \u003c/span\u003e\u003cspan class="s"\u003e--upload-file\u003c/span\u003e\u003cspan class="nv"\u003e \u003c/span\u003e\u003cspan class="s"\u003eREADME.md\u003c/span\u003e\u003cspan class="nv"\u003e \u003c/span\u003e\u003cspan class="s"\u003e${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/packages/generic/my_package/0.0.1/file.txt\'\u003c/span\u003e\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ ],
+ },
+ {
+ conflict: true,
+ lines: [
+ {
+ line_code: '587d266bb27a4dc3022bbed44dfa19849df3044c_10_10',
+ type: 'new',
+ old_line: null,
+ new_line: 10,
+ text: '# some new comments',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC10" class="line" lang="yaml"\u003e\u003cspan class="c1"\u003e# some new comments\u003c/span\u003e\u003c/span\u003e',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '587d266bb27a4dc3022bbed44dfa19849df3044c_10_11',
+ type: 'old',
+ old_line: 10,
+ new_line: null,
+ text: '# a different comment',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC10" class="line" lang="yaml"\u003e\u003cspan class="c1"\u003e# a different comment\u003c/span\u003e\u003c/span\u003e',
+ can_receive_suggestion: false,
+ },
+ ],
+ id: '587d266bb27a4dc3022bbed44dfa19849df3044c_10_10',
+ },
+ ],
+ type: 'text',
+ content_path:
+ '/gitlab-org/gitlab-test/-/merge_requests/2/conflict_for_path?new_path=.gitlab-ci.yml\u0026old_path=.gitlab-ci.yml',
+ },
+ {
+ old_path: 'README.md',
+ new_path: 'README.md',
+ blob_icon: 'doc-text',
+ blob_path:
+ '/gitlab-org/gitlab-test/-/blob/6dbf385a3c7bf01e09b5d2d9e5d72f8fb8c590a3/README.md',
+ sections: [
+ {
+ conflict: false,
+ lines: [
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_1_1',
+ type: null,
+ old_line: 1,
+ new_line: 1,
+ text: '- 1',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC1" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 1\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_2_2',
+ type: null,
+ old_line: 2,
+ new_line: 2,
+ text: '- 2',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC2" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 2\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_3_3',
+ type: null,
+ old_line: 3,
+ new_line: 3,
+ text: '- 3',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC3" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 3\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ ],
+ },
+ {
+ conflict: true,
+ lines: [
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_4_4',
+ type: 'new',
+ old_line: null,
+ new_line: 4,
+ text: '- 4c',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC4" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 4c\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_4_5',
+ type: 'old',
+ old_line: 4,
+ new_line: null,
+ text: '- 4b',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC4" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 4b\u003c/span\u003e\n',
+ can_receive_suggestion: false,
+ },
+ ],
+ id: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_4_4',
+ },
+ {
+ conflict: false,
+ lines: [
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_5_5',
+ type: null,
+ old_line: 5,
+ new_line: 5,
+ text: '- 5',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC5" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 5\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_6_6',
+ type: null,
+ old_line: 6,
+ new_line: 6,
+ text: '- 6',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC6" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 6\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_7_7',
+ type: null,
+ old_line: 7,
+ new_line: 7,
+ text: '- 7',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC7" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 7\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ ],
+ },
+ {
+ conflict: false,
+ lines: [
+ {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ text: '@@ -9,15 +9,15 @@',
+ meta_data: { old_pos: 9, new_pos: 9 },
+ rich_text: '@@ -9,15 +9,15 @@',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_9_9',
+ type: null,
+ old_line: 9,
+ new_line: 9,
+ text: '- 9',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC9" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 9\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_10_10',
+ type: null,
+ old_line: 10,
+ new_line: 10,
+ text: '- 10',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC10" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 10\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_11_11',
+ type: null,
+ old_line: 11,
+ new_line: 11,
+ text: '- 11',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC11" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 11\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ ],
+ },
+ {
+ conflict: true,
+ lines: [
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_12_12',
+ type: 'new',
+ old_line: null,
+ new_line: 12,
+ text: '- 12c',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC12" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 12c\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_12_13',
+ type: 'old',
+ old_line: 12,
+ new_line: null,
+ text: '- 12b',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC12" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 12b\u003c/span\u003e\n',
+ can_receive_suggestion: false,
+ },
+ ],
+ id: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_12_12',
+ },
+ {
+ conflict: false,
+ lines: [
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_13_13',
+ type: null,
+ old_line: 13,
+ new_line: 13,
+ text: '- 13',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC13" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 13\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_14_14',
+ type: null,
+ old_line: 14,
+ new_line: 14,
+ text: '- 14 ',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC14" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 14 \u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: '8ec9a00bfd09b3190ac6b22251dbb1aa95a0579d_15_15',
+ type: null,
+ old_line: 15,
+ new_line: 15,
+ text: '- 15',
+ meta_data: null,
+ rich_text:
+ '\u003cspan id="LC15" class="line" lang="markdown"\u003e\u003cspan class="p"\u003e-\u003c/span\u003e 15\u003c/span\u003e\n',
+ can_receive_suggestion: true,
+ },
+ {
+ line_code: null,
+ type: 'match',
+ old_line: null,
+ new_line: null,
+ text: '',
+ meta_data: { old_pos: 15, new_pos: 15 },
+ rich_text: '',
+ can_receive_suggestion: true,
+ },
+ ],
+ },
+ ],
+ type: 'text',
+ content_path:
+ '/gitlab-org/gitlab-test/-/merge_requests/2/conflict_for_path?new_path=README.md\u0026old_path=README.md',
+ },
+ ],
+};
diff --git a/spec/frontend/merge_conflicts/store/actions_spec.js b/spec/frontend/merge_conflicts/store/actions_spec.js
index 352f1783b87..8fa8765a9f9 100644
--- a/spec/frontend/merge_conflicts/store/actions_spec.js
+++ b/spec/frontend/merge_conflicts/store/actions_spec.js
@@ -1,5 +1,6 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import Cookies from 'js-cookie';
import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import testAction from 'helpers/vuex_action_helper';
import createFlash from '~/flash';
@@ -10,6 +11,7 @@ import { restoreFileLinesState, markLine, decorateFiles } from '~/merge_conflict
jest.mock('~/flash.js');
jest.mock('~/merge_conflicts/utils');
+jest.mock('js-cookie');
describe('merge conflicts actions', () => {
let mock;
@@ -80,6 +82,25 @@ describe('merge conflicts actions', () => {
});
});
+ describe('setConflictsData', () => {
+ it('INTERACTIVE_RESOLVE_MODE updates the correct file ', (done) => {
+ decorateFiles.mockReturnValue([{ bar: 'baz' }]);
+ testAction(
+ actions.setConflictsData,
+ { files, foo: 'bar' },
+ {},
+ [
+ {
+ type: types.SET_CONFLICTS_DATA,
+ payload: { foo: 'bar', files: [{ bar: 'baz' }] },
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
describe('submitResolvedConflicts', () => {
useMockLocationHelper();
const resolveConflictsPath = 'resolve/conflicts/path/mock';
@@ -120,21 +141,109 @@ describe('merge conflicts actions', () => {
});
});
- describe('setConflictsData', () => {
- it('INTERACTIVE_RESOLVE_MODE updates the correct file ', (done) => {
- decorateFiles.mockReturnValue([{ bar: 'baz' }]);
+ describe('setLoadingState', () => {
+ it('commits the right mutation', () => {
testAction(
- actions.setConflictsData,
- { files, foo: 'bar' },
+ actions.setLoadingState,
+ true,
{},
[
{
- type: types.SET_CONFLICTS_DATA,
- payload: { foo: 'bar', files: [{ bar: 'baz' }] },
+ type: types.SET_LOADING_STATE,
+ payload: true,
+ },
+ ],
+ [],
+ );
+ });
+ });
+
+ describe('setErrorState', () => {
+ it('commits the right mutation', () => {
+ testAction(
+ actions.setErrorState,
+ true,
+ {},
+ [
+ {
+ type: types.SET_ERROR_STATE,
+ payload: true,
+ },
+ ],
+ [],
+ );
+ });
+ });
+
+ describe('setFailedRequest', () => {
+ it('commits the right mutation', () => {
+ testAction(
+ actions.setFailedRequest,
+ 'errors in the request',
+ {},
+ [
+ {
+ type: types.SET_FAILED_REQUEST,
+ payload: 'errors in the request',
+ },
+ ],
+ [],
+ );
+ });
+ });
+
+ describe('setViewType', () => {
+ it('commits the right mutation', (done) => {
+ const payload = 'viewType';
+ testAction(
+ actions.setViewType,
+ payload,
+ {},
+ [
+ {
+ type: types.SET_VIEW_TYPE,
+ payload,
+ },
+ ],
+ [],
+ () => {
+ expect(Cookies.set).toHaveBeenCalledWith('diff_view', payload);
+ done();
+ },
+ );
+ });
+ });
+
+ describe('setSubmitState', () => {
+ it('commits the right mutation', () => {
+ testAction(
+ actions.setSubmitState,
+ true,
+ {},
+ [
+ {
+ type: types.SET_SUBMIT_STATE,
+ payload: true,
+ },
+ ],
+ [],
+ );
+ });
+ });
+
+ describe('updateCommitMessage', () => {
+ it('commits the right mutation', () => {
+ testAction(
+ actions.updateCommitMessage,
+ 'some message',
+ {},
+ [
+ {
+ type: types.UPDATE_CONFLICTS_DATA,
+ payload: { commitMessage: 'some message' },
},
],
[],
- done,
);
});
});
diff --git a/spec/frontend/merge_conflicts/store/getters_spec.js b/spec/frontend/merge_conflicts/store/getters_spec.js
new file mode 100644
index 00000000000..7a26a2bba6a
--- /dev/null
+++ b/spec/frontend/merge_conflicts/store/getters_spec.js
@@ -0,0 +1,187 @@
+import {
+ CONFLICT_TYPES,
+ EDIT_RESOLVE_MODE,
+ INTERACTIVE_RESOLVE_MODE,
+} from '~/merge_conflicts/constants';
+import * as getters from '~/merge_conflicts/store/getters';
+import realState from '~/merge_conflicts/store/state';
+
+describe('Merge Conflicts getters', () => {
+ let state;
+
+ beforeEach(() => {
+ state = realState();
+ });
+
+ describe('getConflictsCount', () => {
+ it('returns zero when there are no files', () => {
+ state.conflictsData.files = [];
+
+ expect(getters.getConflictsCount(state)).toBe(0);
+ });
+
+ it(`counts the number of sections in files of type ${CONFLICT_TYPES.TEXT}`, () => {
+ state.conflictsData.files = [
+ { sections: [{ conflict: true }], type: CONFLICT_TYPES.TEXT },
+ { sections: [{ conflict: true }, { conflict: true }], type: CONFLICT_TYPES.TEXT },
+ ];
+ expect(getters.getConflictsCount(state)).toBe(3);
+ });
+
+ it(`counts the number of file in files not of type ${CONFLICT_TYPES.TEXT}`, () => {
+ state.conflictsData.files = [
+ { sections: [{ conflict: true }], type: '' },
+ { sections: [{ conflict: true }, { conflict: true }], type: '' },
+ ];
+ expect(getters.getConflictsCount(state)).toBe(2);
+ });
+ });
+
+ describe('getConflictsCountText', () => {
+ it('with one conflicts', () => {
+ const getConflictsCount = 1;
+
+ expect(getters.getConflictsCountText(state, { getConflictsCount })).toBe('1 conflict');
+ });
+
+ it('with more than one conflicts', () => {
+ const getConflictsCount = 3;
+
+ expect(getters.getConflictsCountText(state, { getConflictsCount })).toBe('3 conflicts');
+ });
+ });
+
+ describe('isReadyToCommit', () => {
+ it('return false when isSubmitting is true', () => {
+ state.conflictsData.files = [];
+ state.isSubmitting = true;
+ state.conflictsData.commitMessage = 'foo';
+
+ expect(getters.isReadyToCommit(state)).toBe(false);
+ });
+
+ it('returns false when has no commit message', () => {
+ state.conflictsData.files = [];
+ state.isSubmitting = false;
+ state.conflictsData.commitMessage = '';
+
+ expect(getters.isReadyToCommit(state)).toBe(false);
+ });
+
+ it('returns true when all conflicts are resolved and is not submitting and we have a commitMessage', () => {
+ state.conflictsData.files = [
+ {
+ resolveMode: INTERACTIVE_RESOLVE_MODE,
+ type: CONFLICT_TYPES.TEXT,
+ sections: [{ conflict: true }],
+ resolutionData: { foo: 'bar' },
+ },
+ ];
+ state.isSubmitting = false;
+ state.conflictsData.commitMessage = 'foo';
+
+ expect(getters.isReadyToCommit(state)).toBe(true);
+ });
+
+ describe('unresolved', () => {
+ it(`files with resolvedMode set to ${EDIT_RESOLVE_MODE} and empty count as unresolved`, () => {
+ state.conflictsData.files = [
+ { content: '', resolveMode: EDIT_RESOLVE_MODE },
+ { content: 'foo' },
+ ];
+ state.isSubmitting = false;
+ state.conflictsData.commitMessage = 'foo';
+
+ expect(getters.isReadyToCommit(state)).toBe(false);
+ });
+
+ it(`in files with resolvedMode = ${INTERACTIVE_RESOLVE_MODE} we count resolvedConflicts vs unresolved ones`, () => {
+ state.conflictsData.files = [
+ {
+ resolveMode: INTERACTIVE_RESOLVE_MODE,
+ type: CONFLICT_TYPES.TEXT,
+ sections: [{ conflict: true }],
+ resolutionData: {},
+ },
+ ];
+ state.isSubmitting = false;
+ state.conflictsData.commitMessage = 'foo';
+
+ expect(getters.isReadyToCommit(state)).toBe(false);
+ });
+ });
+ });
+
+ describe('getCommitButtonText', () => {
+ it('when is submitting', () => {
+ state.isSubmitting = true;
+ expect(getters.getCommitButtonText(state)).toBe('Committing...');
+ });
+
+ it('when is not submitting', () => {
+ expect(getters.getCommitButtonText(state)).toBe('Commit to source branch');
+ });
+ });
+
+ describe('getCommitData', () => {
+ it('returns commit data', () => {
+ const baseFile = {
+ new_path: 'new_path',
+ old_path: 'new_path',
+ };
+
+ state.conflictsData.commitMessage = 'foo';
+ state.conflictsData.files = [
+ {
+ ...baseFile,
+ resolveMode: INTERACTIVE_RESOLVE_MODE,
+ type: CONFLICT_TYPES.TEXT,
+ sections: [{ conflict: true }],
+ resolutionData: { bar: 'baz' },
+ },
+ {
+ ...baseFile,
+ resolveMode: EDIT_RESOLVE_MODE,
+ type: CONFLICT_TYPES.TEXT,
+ content: 'resolve_mode_content',
+ },
+ {
+ ...baseFile,
+ type: CONFLICT_TYPES.TEXT_EDITOR,
+ content: 'text_editor_content',
+ },
+ ];
+
+ expect(getters.getCommitData(state)).toStrictEqual({
+ commit_message: 'foo',
+ files: [
+ { ...baseFile, sections: { bar: 'baz' } },
+ { ...baseFile, content: 'resolve_mode_content' },
+ { ...baseFile, content: 'text_editor_content' },
+ ],
+ });
+ });
+ });
+
+ describe('fileTextTypePresent', () => {
+ it(`returns true if there is a file with type ${CONFLICT_TYPES.TEXT}`, () => {
+ state.conflictsData.files = [{ type: CONFLICT_TYPES.TEXT }];
+
+ expect(getters.fileTextTypePresent(state)).toBe(true);
+ });
+ it(`returns false if there is no file with type ${CONFLICT_TYPES.TEXT}`, () => {
+ state.conflictsData.files = [{ type: CONFLICT_TYPES.TEXT_EDITOR }];
+
+ expect(getters.fileTextTypePresent(state)).toBe(false);
+ });
+ });
+
+ describe('getFileIndex', () => {
+ it(`returns the index of a file from it's blob path`, () => {
+ const blobPath = 'blobPath/foo';
+ state.conflictsData.files = [{ foo: 'bar' }, { baz: 'foo', blobPath }];
+
+ expect(getters.getFileIndex(state)({ blobPath })).toBe(1);
+ });
+ });
+});
diff --git a/spec/frontend/merge_conflicts/store/mutations_spec.js b/spec/frontend/merge_conflicts/store/mutations_spec.js
new file mode 100644
index 00000000000..1476f0c5369
--- /dev/null
+++ b/spec/frontend/merge_conflicts/store/mutations_spec.js
@@ -0,0 +1,99 @@
+import { VIEW_TYPES } from '~/merge_conflicts/constants';
+import * as types from '~/merge_conflicts/store/mutation_types';
+import mutations from '~/merge_conflicts/store/mutations';
+import realState from '~/merge_conflicts/store/state';
+
+describe('Mutations merge conflicts store', () => {
+ let mockState;
+
+ beforeEach(() => {
+ mockState = realState();
+ });
+
+ describe('SET_LOADING_STATE', () => {
+ it('should set loading', () => {
+ mutations[types.SET_LOADING_STATE](mockState, true);
+
+ expect(mockState.isLoading).toBe(true);
+ });
+ });
+
+ describe('SET_ERROR_STATE', () => {
+ it('should set hasError', () => {
+ mutations[types.SET_ERROR_STATE](mockState, true);
+
+ expect(mockState.hasError).toBe(true);
+ });
+ });
+
+ describe('SET_FAILED_REQUEST', () => {
+ it('should set hasError and errorMessage', () => {
+ const payload = 'message';
+ mutations[types.SET_FAILED_REQUEST](mockState, payload);
+
+ expect(mockState.hasError).toBe(true);
+ expect(mockState.conflictsData.errorMessage).toBe(payload);
+ });
+ });
+
+ describe('SET_VIEW_TYPE', () => {
+ it('should set diffView', () => {
+ mutations[types.SET_VIEW_TYPE](mockState, VIEW_TYPES.INLINE);
+
+ expect(mockState.diffView).toBe(VIEW_TYPES.INLINE);
+ });
+
+ it(`if payload is ${VIEW_TYPES.PARALLEL} sets isParallel`, () => {
+ mutations[types.SET_VIEW_TYPE](mockState, VIEW_TYPES.PARALLEL);
+
+ expect(mockState.isParallel).toBe(true);
+ });
+ });
+
+ describe('SET_SUBMIT_STATE', () => {
+ it('should set isSubmitting', () => {
+ mutations[types.SET_SUBMIT_STATE](mockState, true);
+
+ expect(mockState.isSubmitting).toBe(true);
+ });
+ });
+
+ describe('SET_CONFLICTS_DATA', () => {
+ it('should set conflictsData', () => {
+ mutations[types.SET_CONFLICTS_DATA](mockState, {
+ files: [],
+ commit_message: 'foo',
+ source_branch: 'bar',
+ target_branch: 'baz',
+ commit_sha: '123456789',
+ });
+
+ expect(mockState.conflictsData).toStrictEqual({
+ files: [],
+ commitMessage: 'foo',
+ sourceBranch: 'bar',
+ targetBranch: 'baz',
+ shortCommitSha: '1234567',
+ });
+ });
+ });
+
+ describe('UPDATE_CONFLICTS_DATA', () => {
+ it('should update existing conflicts data', () => {
+ const payload = { foo: 'bar' };
+ mutations[types.UPDATE_CONFLICTS_DATA](mockState, payload);
+
+ expect(mockState.conflictsData).toStrictEqual(payload);
+ });
+ });
+
+ describe('UPDATE_FILE', () => {
+ it('should update a file based on its index', () => {
+ mockState.conflictsData.files = [{ foo: 'bar' }, { baz: 'bar' }];
+
+ mutations[types.UPDATE_FILE](mockState, { file: { new: 'one' }, index: 1 });
+
+ expect(mockState.conflictsData.files).toStrictEqual([{ foo: 'bar' }, { new: 'one' }]);
+ });
+ });
+});
diff --git a/spec/frontend/merge_conflicts/utils_spec.js b/spec/frontend/merge_conflicts/utils_spec.js
new file mode 100644
index 00000000000..5bf7ecf8cfe
--- /dev/null
+++ b/spec/frontend/merge_conflicts/utils_spec.js
@@ -0,0 +1,106 @@
+import * as utils from '~/merge_conflicts/utils';
+
+describe('merge conflicts utils', () => {
+ describe('getFilePath', () => {
+ it('returns new path if they are the same', () => {
+ expect(utils.getFilePath({ new_path: 'a', old_path: 'a' })).toBe('a');
+ });
+
+ it('returns concatenated paths if they are different', () => {
+ expect(utils.getFilePath({ new_path: 'b', old_path: 'a' })).toBe('a → b');
+ });
+ });
+
+ describe('checkLineLengths', () => {
+ it('add empty lines to the left when right has more lines', () => {
+ const result = utils.checkLineLengths({ left: [1], right: [1, 2] });
+
+ expect(result.left).toHaveLength(result.right.length);
+ expect(result.left).toStrictEqual([1, { lineType: 'emptyLine', richText: '' }]);
+ });
+
+ it('add empty lines to the right when left has more lines', () => {
+ const result = utils.checkLineLengths({ left: [1, 2], right: [1] });
+
+ expect(result.right).toHaveLength(result.left.length);
+ expect(result.right).toStrictEqual([1, { lineType: 'emptyLine', richText: '' }]);
+ });
+ });
+
+ describe('getHeadHeaderLine', () => {
+ it('decorates the id', () => {
+ expect(utils.getHeadHeaderLine(1)).toStrictEqual({
+ buttonTitle: 'Use ours',
+ id: 1,
+ isHead: true,
+ isHeader: true,
+ isSelected: false,
+ isUnselected: false,
+ richText: 'HEAD//our changes',
+ section: 'head',
+ type: 'new',
+ });
+ });
+ });
+
+ describe('decorateLineForInlineView', () => {
+ it.each`
+ type | truthyProp
+ ${'new'} | ${'isHead'}
+ ${'old'} | ${'isOrigin'}
+ ${'match'} | ${'hasMatch'}
+ `(
+ 'when the type is $type decorates the line with $truthyProp set as true',
+ ({ type, truthyProp }) => {
+ expect(utils.decorateLineForInlineView({ type, rich_text: 'rich' }, 1, true)).toStrictEqual(
+ {
+ id: 1,
+ hasConflict: true,
+ isHead: false,
+ isOrigin: false,
+ hasMatch: false,
+ richText: 'rich',
+ isSelected: false,
+ isUnselected: false,
+ [truthyProp]: true,
+ },
+ );
+ },
+ );
+ });
+
+ describe('getLineForParallelView', () => {
+ it.todo('should return a proper value');
+ });
+
+ describe('getOriginHeaderLine', () => {
+ it('decorates the id', () => {
+ expect(utils.getOriginHeaderLine(1)).toStrictEqual({
+ buttonTitle: 'Use theirs',
+ id: 1,
+ isHeader: true,
+ isOrigin: true,
+ isSelected: false,
+ isUnselected: false,
+ richText: 'origin//their changes',
+ section: 'origin',
+ type: 'old',
+ });
+ });
+ });
+ describe('setInlineLine', () => {
+ it.todo('should return a proper value');
+ });
+ describe('setParallelLine', () => {
+ it.todo('should return a proper value');
+ });
+ describe('decorateFiles', () => {
+ it.todo('should return a proper value');
+ });
+ describe('restoreFileLinesState', () => {
+ it.todo('should return a proper value');
+ });
+ describe('markLine', () => {
+ it.todo('should return a proper value');
+ });
+});
diff --git a/spec/frontend/merge_request/components/status_box_spec.js b/spec/frontend/merge_request/components/status_box_spec.js
index 9212ae19c2d..de0f3574ab2 100644
--- a/spec/frontend/merge_request/components/status_box_spec.js
+++ b/spec/frontend/merge_request/components/status_box_spec.js
@@ -27,7 +27,7 @@ const testCases = [
name: 'Closed',
state: 'closed',
class: 'status-box-mr-closed',
- icon: 'close',
+ icon: 'issue-close',
},
{
name: 'Merged',
diff --git a/spec/frontend/mini_pipeline_graph_dropdown_spec.js b/spec/frontend/mini_pipeline_graph_dropdown_spec.js
deleted file mode 100644
index ccd5a4ea142..00000000000
--- a/spec/frontend/mini_pipeline_graph_dropdown_spec.js
+++ /dev/null
@@ -1,104 +0,0 @@
-import MockAdapter from 'axios-mock-adapter';
-import $ from 'jquery';
-import waitForPromises from 'helpers/wait_for_promises';
-import axios from '~/lib/utils/axios_utils';
-import MiniPipelineGraph from '~/mini_pipeline_graph_dropdown';
-
-describe('Mini Pipeline Graph Dropdown', () => {
- beforeEach(() => {
- loadFixtures('static/mini_dropdown_graph.html');
- });
-
- describe('When is initialized', () => {
- it('should initialize without errors when no options are given', () => {
- const miniPipelineGraph = new MiniPipelineGraph();
-
- expect(miniPipelineGraph.dropdownListSelector).toEqual('.js-builds-dropdown-container');
- });
-
- it('should set the container as the given prop', () => {
- const container = '.foo';
-
- const miniPipelineGraph = new MiniPipelineGraph({ container });
-
- expect(miniPipelineGraph.container).toEqual(container);
- });
- });
-
- describe('When dropdown is clicked', () => {
- let mock;
-
- beforeEach(() => {
- mock = new MockAdapter(axios);
- });
-
- afterEach(() => {
- mock.restore();
- });
-
- it('should call getBuildsList', () => {
- const getBuildsListSpy = jest
- .spyOn(MiniPipelineGraph.prototype, 'getBuildsList')
- .mockImplementation(() => {});
-
- new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents();
-
- document.querySelector('.js-builds-dropdown-button').click();
-
- expect(getBuildsListSpy).toHaveBeenCalled();
- });
-
- it('should make a request to the endpoint provided in the html', () => {
- const ajaxSpy = jest.spyOn(axios, 'get');
-
- mock.onGet('foobar').reply(200, {
- html: '',
- });
-
- new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents();
-
- document.querySelector('.js-builds-dropdown-button').click();
-
- expect(ajaxSpy.mock.calls[0][0]).toEqual('foobar');
- });
-
- it('should not close when user uses cmd/ctrl + click', (done) => {
- mock.onGet('foobar').reply(200, {
- html: `<li>
- <a class="mini-pipeline-graph-dropdown-item" href="#">
- <span class="ci-status-icon ci-status-icon-failed"></span>
- <span>build</span>
- </a>
- <a class="ci-action-icon-wrapper js-ci-action-icon" href="#"></a>
- </li>`,
- });
- new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents();
-
- document.querySelector('.js-builds-dropdown-button').click();
-
- waitForPromises()
- .then(() => {
- document.querySelector('a.mini-pipeline-graph-dropdown-item').click();
- })
- .then(waitForPromises)
- .then(() => {
- expect($('.js-builds-dropdown-list').is(':visible')).toEqual(true);
- })
- .then(done)
- .catch(done.fail);
- });
-
- it('should close the dropdown when request returns an error', (done) => {
- mock.onGet('foobar').networkError();
-
- new MiniPipelineGraph({ container: '.js-builds-dropdown-tests' }).bindEvents();
-
- document.querySelector('.js-builds-dropdown-button').click();
-
- setImmediate(() => {
- expect($('.js-builds-dropdown-tests .dropdown').hasClass('open')).toEqual(false);
- done();
- });
- });
- });
-});
diff --git a/spec/frontend/mocks/ce/diffs/workers/tree_worker.js b/spec/frontend/mocks/ce/diffs/workers/tree_worker.js
deleted file mode 100644
index 5532a22f8e6..00000000000
--- a/spec/frontend/mocks/ce/diffs/workers/tree_worker.js
+++ /dev/null
@@ -1 +0,0 @@
-export { default } from 'helpers/web_worker_mock';
diff --git a/spec/frontend/mocks/ce/ide/lib/diff/diff_worker.js b/spec/frontend/mocks/ce/ide/lib/diff/diff_worker.js
deleted file mode 100644
index 5532a22f8e6..00000000000
--- a/spec/frontend/mocks/ce/ide/lib/diff/diff_worker.js
+++ /dev/null
@@ -1 +0,0 @@
-export { default } from 'helpers/web_worker_mock';
diff --git a/spec/frontend/mr_notes/stores/actions_spec.js b/spec/frontend/mr_notes/stores/actions_spec.js
new file mode 100644
index 00000000000..c6578453d85
--- /dev/null
+++ b/spec/frontend/mr_notes/stores/actions_spec.js
@@ -0,0 +1,92 @@
+import MockAdapter from 'axios-mock-adapter';
+
+import testAction from 'helpers/vuex_action_helper';
+import axios from '~/lib/utils/axios_utils';
+
+import { setEndpoints, setMrMetadata, fetchMrMetadata } from '~/mr_notes/stores/actions';
+import mutationTypes from '~/mr_notes/stores/mutation_types';
+
+describe('MR Notes Mutator Actions', () => {
+ describe('setEndpoints', () => {
+ it('should trigger the SET_ENDPOINTS state mutation', (done) => {
+ const endpoints = { endpointA: 'a' };
+
+ testAction(
+ setEndpoints,
+ endpoints,
+ {},
+ [
+ {
+ type: mutationTypes.SET_ENDPOINTS,
+ payload: endpoints,
+ },
+ ],
+ [],
+ done,
+ );
+ });
+ });
+
+ describe('setMrMetadata', () => {
+ it('should trigger the SET_MR_METADATA state mutation', async () => {
+ const mrMetadata = { propA: 'a', propB: 'b' };
+
+ await testAction(
+ setMrMetadata,
+ mrMetadata,
+ {},
+ [
+ {
+ type: mutationTypes.SET_MR_METADATA,
+ payload: mrMetadata,
+ },
+ ],
+ [],
+ );
+ });
+ });
+
+ describe('fetchMrMetadata', () => {
+ const mrMetadata = { meta: true, data: 'foo' };
+ const state = {
+ endpoints: {
+ metadata: 'metadata',
+ },
+ };
+ let mock;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+
+ mock.onGet(state.endpoints.metadata).reply(200, mrMetadata);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ });
+
+ it('should fetch the data from the API', async () => {
+ await fetchMrMetadata({ state, dispatch: () => {} });
+
+ await axios.waitForAll();
+
+ expect(mock.history.get).toHaveLength(1);
+ expect(mock.history.get[0].url).toBe(state.endpoints.metadata);
+ });
+
+ it('should set the fetched data into state', () => {
+ return testAction(
+ fetchMrMetadata,
+ {},
+ state,
+ [],
+ [
+ {
+ type: 'setMrMetadata',
+ payload: mrMetadata,
+ },
+ ],
+ );
+ });
+ });
+});
diff --git a/spec/frontend/mr_notes/stores/mutations_spec.js b/spec/frontend/mr_notes/stores/mutations_spec.js
new file mode 100644
index 00000000000..35b8a2e4be2
--- /dev/null
+++ b/spec/frontend/mr_notes/stores/mutations_spec.js
@@ -0,0 +1,27 @@
+import mutationTypes from '~/mr_notes/stores/mutation_types';
+import mutations from '~/mr_notes/stores/mutations';
+
+describe('MR Notes Mutations', () => {
+ describe(mutationTypes.SET_ENDPOINTS, () => {
+ it('should set the endpoints value', () => {
+ const state = {};
+ const endpoints = { endpointA: 'A', endpointB: 'B' };
+
+ mutations[mutationTypes.SET_ENDPOINTS](state, endpoints);
+
+ expect(state.endpoints).toEqual(endpoints);
+ });
+ });
+
+ describe(mutationTypes.SET_MR_METADATA, () => {
+ it('store the provided MR Metadata in the state', () => {
+ const state = {};
+ const metadata = { propA: 'A', propB: 'B' };
+
+ mutations[mutationTypes.SET_MR_METADATA](state, metadata);
+
+ expect(state.mrMetadata.propA).toBe('A');
+ expect(state.mrMetadata.propB).toBe('B');
+ });
+ });
+});
diff --git a/spec/frontend/notebook/cells/markdown_spec.js b/spec/frontend/notebook/cells/markdown_spec.js
index 4d6addaf47c..219d74595bd 100644
--- a/spec/frontend/notebook/cells/markdown_spec.js
+++ b/spec/frontend/notebook/cells/markdown_spec.js
@@ -39,16 +39,15 @@ describe('Markdown component', () => {
expect(vm.$el.querySelector('.markdown h1')).not.toBeNull();
});
- it('sanitizes output', () => {
+ it('sanitizes output', async () => {
Object.assign(cell, {
source: [
'[XSS](data:text/html;base64,PHNjcmlwdD5hbGVydChkb2N1bWVudC5kb21haW4pPC9zY3JpcHQ+Cg==)\n',
],
});
- return vm.$nextTick().then(() => {
- expect(vm.$el.querySelector('a').getAttribute('href')).toBeNull();
- });
+ await vm.$nextTick();
+ expect(vm.$el.querySelector('a').getAttribute('href')).toBeNull();
});
describe('katex', () => {
@@ -56,43 +55,40 @@ describe('Markdown component', () => {
json = getJSONFixture('blob/notebook/math.json');
});
- it('renders multi-line katex', () => {
+ it('renders multi-line katex', async () => {
vm = new Component({
propsData: {
cell: json.cells[0],
},
}).$mount();
- return vm.$nextTick().then(() => {
- expect(vm.$el.querySelector('.katex')).not.toBeNull();
- });
+ await vm.$nextTick();
+ expect(vm.$el.querySelector('.katex')).not.toBeNull();
});
- it('renders inline katex', () => {
+ it('renders inline katex', async () => {
vm = new Component({
propsData: {
cell: json.cells[1],
},
}).$mount();
- return vm.$nextTick().then(() => {
- expect(vm.$el.querySelector('p:first-child .katex')).not.toBeNull();
- });
+ await vm.$nextTick();
+ expect(vm.$el.querySelector('p:first-child .katex')).not.toBeNull();
});
- it('renders multiple inline katex', () => {
+ it('renders multiple inline katex', async () => {
vm = new Component({
propsData: {
cell: json.cells[1],
},
}).$mount();
- return vm.$nextTick().then(() => {
- expect(vm.$el.querySelectorAll('p:nth-child(2) .katex').length).toBe(4);
- });
+ await vm.$nextTick();
+ expect(vm.$el.querySelectorAll('p:nth-child(2) .katex')).toHaveLength(4);
});
- it('output cell in case of katex error', () => {
+ it('output cell in case of katex error', async () => {
vm = new Component({
propsData: {
cell: {
@@ -103,14 +99,13 @@ describe('Markdown component', () => {
},
}).$mount();
- return vm.$nextTick().then(() => {
- // expect one paragraph with no katex formula in it
- expect(vm.$el.querySelectorAll('p').length).toBe(1);
- expect(vm.$el.querySelectorAll('p .katex').length).toBe(0);
- });
+ await vm.$nextTick();
+ // expect one paragraph with no katex formula in it
+ expect(vm.$el.querySelectorAll('p')).toHaveLength(1);
+ expect(vm.$el.querySelectorAll('p .katex')).toHaveLength(0);
});
- it('output cell and render remaining formula in case of katex error', () => {
+ it('output cell and render remaining formula in case of katex error', async () => {
vm = new Component({
propsData: {
cell: {
@@ -121,14 +116,13 @@ describe('Markdown component', () => {
},
}).$mount();
- return vm.$nextTick().then(() => {
- // expect one paragraph with no katex formula in it
- expect(vm.$el.querySelectorAll('p').length).toBe(1);
- expect(vm.$el.querySelectorAll('p .katex').length).toBe(1);
- });
+ await vm.$nextTick();
+ // expect one paragraph with no katex formula in it
+ expect(vm.$el.querySelectorAll('p')).toHaveLength(1);
+ expect(vm.$el.querySelectorAll('p .katex')).toHaveLength(1);
});
- it('renders math formula in list object', () => {
+ it('renders math formula in list object', async () => {
vm = new Component({
propsData: {
cell: {
@@ -139,14 +133,13 @@ describe('Markdown component', () => {
},
}).$mount();
- return vm.$nextTick().then(() => {
- // expect one list with a katex formula in it
- expect(vm.$el.querySelectorAll('li').length).toBe(1);
- expect(vm.$el.querySelectorAll('li .katex').length).toBe(2);
- });
+ await vm.$nextTick();
+ // expect one list with a katex formula in it
+ expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
+ expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
});
- it("renders math formula with tick ' in it", () => {
+ it("renders math formula with tick ' in it", async () => {
vm = new Component({
propsData: {
cell: {
@@ -157,11 +150,44 @@ describe('Markdown component', () => {
},
}).$mount();
- return vm.$nextTick().then(() => {
- // expect one list with a katex formula in it
- expect(vm.$el.querySelectorAll('li').length).toBe(1);
- expect(vm.$el.querySelectorAll('li .katex').length).toBe(2);
- });
+ await vm.$nextTick();
+ // expect one list with a katex formula in it
+ expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
+ expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
+ });
+
+ it('renders math formula with less-than-operator < in it', async () => {
+ vm = new Component({
+ propsData: {
+ cell: {
+ cell_type: 'markdown',
+ metadata: {},
+ source: ['- list with inline $a=2$ inline formula $a + b < c$\n', '\n'],
+ },
+ },
+ }).$mount();
+
+ await vm.$nextTick();
+ // expect one list with a katex formula in it
+ expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
+ expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
+ });
+
+ it('renders math formula with greater-than-operator > in it', async () => {
+ vm = new Component({
+ propsData: {
+ cell: {
+ cell_type: 'markdown',
+ metadata: {},
+ source: ['- list with inline $a=2$ inline formula $a + b > c$\n', '\n'],
+ },
+ },
+ }).$mount();
+
+ await vm.$nextTick();
+ // expect one list with a katex formula in it
+ expect(vm.$el.querySelectorAll('li')).toHaveLength(1);
+ expect(vm.$el.querySelectorAll('li .katex')).toHaveLength(2);
});
});
});
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index bab90723578..b717bab7c3f 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -1,10 +1,11 @@
-import { GlDropdown, GlAlert } from '@gitlab/ui';
+import { GlAlert } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import Autosize from 'autosize';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import batchComments from '~/batch_comments/stores/modules/batch_comments';
import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
import { deprecatedCreateFlash as flash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
@@ -29,8 +30,10 @@ describe('issue_comment_form component', () => {
const findCloseReopenButton = () => wrapper.findByTestId('close-reopen-button');
const findTextArea = () => wrapper.findByTestId('comment-field');
+ const findAddToReviewButton = () => wrapper.findByTestId('add-to-review-button');
+ const findAddCommentNowButton = () => wrapper.findByTestId('add-comment-now-button');
const findConfidentialNoteCheckbox = () => wrapper.findByTestId('confidential-note-checkbox');
- const findCommentGlDropdown = () => wrapper.find(GlDropdown);
+ const findCommentGlDropdown = () => wrapper.findByTestId('comment-button');
const findCommentButton = () => findCommentGlDropdown().find('button');
const findErrorAlerts = () => wrapper.findAllComponents(GlAlert).wrappers;
@@ -582,4 +585,64 @@ describe('issue_comment_form component', () => {
expect(findTextArea().exists()).toBe(false);
});
});
+
+ describe('with batchComments in store', () => {
+ beforeEach(() => {
+ store.registerModule('batchComments', batchComments());
+ });
+
+ describe('add to review and comment now buttons', () => {
+ it('when no drafts exist, should not render', () => {
+ mountComponent();
+
+ expect(findCommentGlDropdown().exists()).toBe(true);
+ expect(findAddToReviewButton().exists()).toBe(false);
+ expect(findAddCommentNowButton().exists()).toBe(false);
+ });
+
+ describe('when drafts exist', () => {
+ beforeEach(() => {
+ store.state.batchComments.drafts = [{ note: 'A' }];
+ });
+
+ it('should render', () => {
+ mountComponent();
+
+ expect(findCommentGlDropdown().exists()).toBe(false);
+ expect(findAddToReviewButton().exists()).toBe(true);
+ expect(findAddCommentNowButton().exists()).toBe(true);
+ });
+
+ it('clicking `add to review`, should call draft endpoint, set `isDraft` true', () => {
+ mountComponent({ mountFunction: mount, initialData: { note: 'a draft note' } });
+
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+ findAddToReviewButton().trigger('click');
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'saveNote',
+ expect.objectContaining({
+ endpoint: notesDataMock.draftsPath,
+ isDraft: true,
+ }),
+ );
+ });
+
+ it('clicking `add comment now`, should call note endpoint, set `isDraft` false ', () => {
+ mountComponent({ mountFunction: mount, initialData: { note: 'a comment' } });
+
+ jest.spyOn(store, 'dispatch').mockResolvedValue();
+ findAddCommentNowButton().trigger('click');
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ 'saveNote',
+ expect.objectContaining({
+ endpoint: noteableDataMock.create_note_path,
+ isDraft: false,
+ }),
+ );
+ });
+ });
+ });
+ });
});
diff --git a/spec/frontend/notes/components/discussion_navigator_spec.js b/spec/frontend/notes/components/discussion_navigator_spec.js
index 4d55eee2ffa..e430e18b76a 100644
--- a/spec/frontend/notes/components/discussion_navigator_spec.js
+++ b/spec/frontend/notes/components/discussion_navigator_spec.js
@@ -2,6 +2,11 @@
import 'mousetrap';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vue from 'vue';
+import {
+ keysFor,
+ MR_NEXT_UNRESOLVED_DISCUSSION,
+ MR_PREVIOUS_UNRESOLVED_DISCUSSION,
+} from '~/behaviors/shortcuts/keybindings';
import DiscussionNavigator from '~/notes/components/discussion_navigator.vue';
import eventHub from '~/notes/event_hub';
@@ -60,13 +65,13 @@ describe('notes/components/discussion_navigator', () => {
});
it('calls jumpToNextDiscussion when pressing `n`', () => {
- Mousetrap.trigger('n');
+ Mousetrap.trigger(keysFor(MR_NEXT_UNRESOLVED_DISCUSSION));
expect(jumpToNextDiscussion).toHaveBeenCalled();
});
it('calls jumpToPreviousDiscussion when pressing `p`', () => {
- Mousetrap.trigger('p');
+ Mousetrap.trigger(keysFor(MR_PREVIOUS_UNRESOLVED_DISCUSSION));
expect(jumpToPreviousDiscussion).toHaveBeenCalled();
});
@@ -87,8 +92,8 @@ describe('notes/components/discussion_navigator', () => {
});
it('unbinds keys', () => {
- expect(Mousetrap.unbind).toHaveBeenCalledWith('n');
- expect(Mousetrap.unbind).toHaveBeenCalledWith('p');
+ expect(Mousetrap.unbind).toHaveBeenCalledWith(keysFor(MR_NEXT_UNRESOLVED_DISCUSSION));
+ expect(Mousetrap.unbind).toHaveBeenCalledWith(keysFor(MR_PREVIOUS_UNRESOLVED_DISCUSSION));
});
it('unbinds event hub listeners', () => {
diff --git a/spec/frontend/notes/components/note_actions_spec.js b/spec/frontend/notes/components/note_actions_spec.js
index cc41088e21e..ecce854b00a 100644
--- a/spec/frontend/notes/components/note_actions_spec.js
+++ b/spec/frontend/notes/components/note_actions_spec.js
@@ -151,6 +151,22 @@ describe('noteActions', () => {
const assignUserButton = wrapper.find('[data-testid="assign-user"]');
expect(assignUserButton.exists()).toBe(false);
});
+
+ it('should render the correct (unescaped) name in the Resolved By tooltip', () => {
+ const complexUnescapedName = 'This is a Ǝ\'𝞓\'E "cat"?';
+ wrapper = mountNoteActions({
+ ...props,
+ canResolve: true,
+ isResolving: false,
+ isResolved: true,
+ resolvedBy: {
+ name: complexUnescapedName,
+ },
+ });
+
+ const { resolveButton } = wrapper.vm.$refs;
+ expect(resolveButton.$el.getAttribute('title')).toBe(`Resolved by ${complexUnescapedName}`);
+ });
});
});
diff --git a/spec/frontend/notes/components/note_body_spec.js b/spec/frontend/notes/components/note_body_spec.js
index 4922de987fa..40251244423 100644
--- a/spec/frontend/notes/components/note_body_spec.js
+++ b/spec/frontend/notes/components/note_body_spec.js
@@ -81,14 +81,21 @@ describe('issue_note_body component', () => {
state: {
defaultSuggestionCommitMessage:
'%{branch_name}%{project_path}%{project_name}%{username}%{user_full_name}%{file_paths}%{suggestions_count}%{files_count}',
- branchName: 'branch',
- projectPath: '/path',
- projectName: 'name',
- username: 'user',
- userFullName: 'user userton',
},
getters: { suggestionCommitMessage },
},
+ page: {
+ namespaced: true,
+ state: {
+ mrMetadata: {
+ branch_name: 'branch',
+ project_path: '/path',
+ project_name: 'name',
+ username: 'user',
+ user_full_name: 'user userton',
+ },
+ },
+ },
},
});
diff --git a/spec/frontend/notes/components/noteable_discussion_spec.js b/spec/frontend/notes/components/noteable_discussion_spec.js
index dd65351ef88..735bc2b70dd 100644
--- a/spec/frontend/notes/components/noteable_discussion_spec.js
+++ b/spec/frontend/notes/components/noteable_discussion_spec.js
@@ -124,14 +124,7 @@ describe('noteable_discussion component', () => {
...getJSONFixture(discussionWithTwoUnresolvedNotes)[0],
expanded: true,
};
- discussion.notes = discussion.notes.map((note) => ({
- ...note,
- resolved: false,
- current_user: {
- ...note.current_user,
- can_resolve: true,
- },
- }));
+ discussion.resolved = false;
wrapper.setProps({ discussion });
diff --git a/spec/frontend/notes/components/noteable_note_spec.js b/spec/frontend/notes/components/noteable_note_spec.js
index 112983f3ac2..7444c441e06 100644
--- a/spec/frontend/notes/components/noteable_note_spec.js
+++ b/spec/frontend/notes/components/noteable_note_spec.js
@@ -1,32 +1,65 @@
-import { mount, createLocalVue } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import { escape } from 'lodash';
+import Vue from 'vue';
+import Vuex from 'vuex';
+
import waitForPromises from 'helpers/wait_for_promises';
+
+import DiffsModule from '~/diffs/store/modules';
+
import NoteActions from '~/notes/components/note_actions.vue';
import NoteBody from '~/notes/components/note_body.vue';
import NoteHeader from '~/notes/components/note_header.vue';
import issueNote from '~/notes/components/noteable_note.vue';
-import createStore from '~/notes/stores';
+import NotesModule from '~/notes/stores/modules';
+
import UserAvatarLink from '~/vue_shared/components/user_avatar/user_avatar_link.vue';
+
import { noteableDataMock, notesDataMock, note } from '../mock_data';
+Vue.use(Vuex);
+
+const singleLineNotePosition = {
+ line_range: {
+ start: {
+ line_code: 'abc_1_1',
+ type: null,
+ old_line: '1',
+ new_line: '1',
+ },
+ end: {
+ line_code: 'abc_1_1',
+ type: null,
+ old_line: '1',
+ new_line: '1',
+ },
+ },
+};
+
describe('issue_note', () => {
let store;
let wrapper;
const findMultilineComment = () => wrapper.find('[data-testid="multiline-comment"]');
- const createWrapper = (props = {}) => {
- store = createStore();
+ const createWrapper = (props = {}, storeUpdater = (s) => s) => {
+ store = new Vuex.Store(
+ storeUpdater({
+ modules: {
+ notes: NotesModule(),
+ diffs: DiffsModule(),
+ },
+ }),
+ );
+
store.dispatch('setNoteableData', noteableDataMock);
store.dispatch('setNotesData', notesDataMock);
- const localVue = createLocalVue();
- wrapper = mount(localVue.extend(issueNote), {
+ wrapper = mount(issueNote, {
store,
propsData: {
note,
...props,
},
- localVue,
stubs: [
'note-header',
'user-avatar-link',
@@ -216,9 +249,13 @@ describe('issue_note', () => {
const noteBodyComponent = wrapper.findComponent(NoteBody);
store.hotUpdate({
- actions: {
- updateNote() {},
- setSelectedCommentPositionHover() {},
+ modules: {
+ notes: {
+ actions: {
+ updateNote() {},
+ setSelectedCommentPositionHover() {},
+ },
+ },
},
});
@@ -238,8 +275,12 @@ describe('issue_note', () => {
it('restores content of updated note', async () => {
const updatedText = 'updated note text';
store.hotUpdate({
- actions: {
- updateNote() {},
+ modules: {
+ notes: {
+ actions: {
+ updateNote() {},
+ },
+ },
},
});
const noteBody = wrapper.findComponent(NoteBody);
@@ -267,9 +308,13 @@ describe('issue_note', () => {
const updateActions = () => {
store.hotUpdate({
- actions: {
- updateNote,
- setSelectedCommentPositionHover() {},
+ modules: {
+ notes: {
+ actions: {
+ updateNote,
+ setSelectedCommentPositionHover() {},
+ },
+ },
},
});
};
@@ -299,4 +344,62 @@ describe('issue_note', () => {
expect(updateNote.mock.calls[0][1].note.note.position).toBe(expectation);
});
});
+
+ describe('diffFile', () => {
+ it.each`
+ scenario | files | noteDef
+ ${'the note has no position'} | ${undefined} | ${note}
+ ${'the Diffs store has no data'} | ${[]} | ${{ ...note, position: singleLineNotePosition }}
+ `(
+ 'returns `null` when $scenario and no diff file is provided as a prop',
+ ({ noteDef, diffs }) => {
+ const storeUpdater = (rawStore) => {
+ const updatedStore = { ...rawStore };
+
+ if (diffs) {
+ updatedStore.modules.diffs.state.diffFiles = diffs;
+ }
+
+ return updatedStore;
+ };
+
+ createWrapper({ note: noteDef, discussionFile: null }, storeUpdater);
+
+ expect(wrapper.vm.diffFile).toBe(null);
+ },
+ );
+
+ it("returns the correct diff file from the Diffs store if it's available", () => {
+ createWrapper(
+ {
+ note: { ...note, position: singleLineNotePosition },
+ },
+ (rawStore) => {
+ const updatedStore = { ...rawStore };
+ updatedStore.modules.diffs.state.diffFiles = [
+ { file_hash: 'abc', testId: 'diffFileTest' },
+ ];
+ return updatedStore;
+ },
+ );
+
+ expect(wrapper.vm.diffFile.testId).toBe('diffFileTest');
+ });
+
+ it('returns the provided diff file if the more robust getters fail', () => {
+ createWrapper(
+ {
+ note: { ...note, position: singleLineNotePosition },
+ discussionFile: { testId: 'diffFileTest' },
+ },
+ (rawStore) => {
+ const updatedStore = { ...rawStore };
+ updatedStore.modules.diffs.state.diffFiles = [];
+ return updatedStore;
+ },
+ );
+
+ expect(wrapper.vm.diffFile.testId).toBe('diffFileTest');
+ });
+ });
});
diff --git a/spec/frontend/notes/components/notes_app_spec.js b/spec/frontend/notes/components/notes_app_spec.js
index 163501d5ce8..241a89b2218 100644
--- a/spec/frontend/notes/components/notes_app_spec.js
+++ b/spec/frontend/notes/components/notes_app_spec.js
@@ -3,6 +3,8 @@ import AxiosMockAdapter from 'axios-mock-adapter';
import $ from 'jquery';
import Vue from 'vue';
import { setTestTimeout } from 'helpers/timeout';
+import DraftNote from '~/batch_comments/components/draft_note.vue';
+import batchComments from '~/batch_comments/stores/modules/batch_comments';
import axios from '~/lib/utils/axios_utils';
import * as urlUtility from '~/lib/utils/url_utility';
import CommentForm from '~/notes/components/comment_form.vue';
@@ -400,4 +402,32 @@ describe('note_app', () => {
expect(getComponentOrder()).toStrictEqual([TYPE_NOTES_LIST, TYPE_COMMENT_FORM]);
});
});
+
+ describe('when multiple draft types are present', () => {
+ beforeEach(() => {
+ store = createStore();
+ store.registerModule('batchComments', batchComments());
+ store.state.batchComments.drafts = [
+ mockData.draftDiffDiscussion,
+ mockData.draftReply,
+ ...mockData.draftComments,
+ ];
+ store.state.isLoading = false;
+ wrapper = shallowMount(NotesApp, {
+ propsData,
+ store,
+ stubs: {
+ OrderedLayout,
+ },
+ });
+ });
+
+ it('correctly finds only draft comments', () => {
+ const drafts = wrapper.findAll(DraftNote).wrappers;
+
+ expect(drafts.map((x) => x.props('draft'))).toEqual(
+ mockData.draftComments.map(({ note }) => expect.objectContaining({ note })),
+ );
+ });
+ });
});
diff --git a/spec/frontend/notes/mock_data.js b/spec/frontend/notes/mock_data.js
index 638a4edecd6..a4aeeda48d8 100644
--- a/spec/frontend/notes/mock_data.js
+++ b/spec/frontend/notes/mock_data.js
@@ -6,6 +6,7 @@ export const notesDataMock = {
markdownDocsPath: '/help/user/markdown',
newSessionPath: '/users/sign_in?redirect_to_referer=yes',
notesPath: '/gitlab-org/gitlab-foss/noteable/issue/98/notes',
+ draftsPath: '/flightjs/flight/-/merge_requests/4/drafts',
quickActionsDocsPath: '/help/user/project/quick_actions',
registerPath: '/users/sign_up?redirect_to_referer=yes',
prerenderedNotesCount: 1,
@@ -1270,3 +1271,18 @@ export const batchSuggestionsInfoMock = [
discussionId: 'c003',
},
];
+
+export const draftComments = [
+ { id: 7, note: 'test draft note', isDraft: true },
+ { id: 9, note: 'draft note 2', isDraft: true },
+];
+
+export const draftReply = { id: 8, note: 'draft reply', discussion_id: 1, isDraft: true };
+
+export const draftDiffDiscussion = {
+ id: 6,
+ note: 'draft diff discussion',
+ line_code: 1,
+ file_path: 'lib/foo.rb',
+ isDraft: true,
+};
diff --git a/spec/frontend/notes/stores/getters_spec.js b/spec/frontend/notes/stores/getters_spec.js
index 4d2f86a1ecf..3adb5da020e 100644
--- a/spec/frontend/notes/stores/getters_spec.js
+++ b/spec/frontend/notes/stores/getters_spec.js
@@ -1,4 +1,4 @@
-import { DESC } from '~/notes/constants';
+import { DESC, ASC } from '~/notes/constants';
import * as getters from '~/notes/stores/getters';
import {
notesDataMock,
@@ -12,6 +12,9 @@ import {
discussion3,
resolvedDiscussion1,
unresolvableDiscussion,
+ draftComments,
+ draftReply,
+ draftDiffDiscussion,
} from '../mock_data';
const discussionWithTwoUnresolvedNotes = 'merge_requests/resolved_diff_discussion.json';
@@ -23,6 +26,8 @@ const createDiscussionNeighborParams = (discussionId, diffOrder, step) => ({
step,
});
+const asDraftDiscussion = (x) => ({ ...x, individual_note: true });
+
describe('Getters Notes Store', () => {
let state;
@@ -61,20 +66,58 @@ describe('Getters Notes Store', () => {
});
describe('discussions', () => {
- it('should return all discussions in the store', () => {
- expect(getters.discussions(state)).toEqual([individualNote]);
- });
+ let batchComments = null;
+
+ const getDiscussions = () => getters.discussions(state, {}, { batchComments });
+
+ describe('without batchComments module', () => {
+ it('should return all discussions in the store', () => {
+ expect(getDiscussions()).toEqual([individualNote]);
+ });
+
+ it('should transform discussion to individual notes in timeline view', () => {
+ state.discussions = [discussionMock];
+ state.isTimelineEnabled = true;
- it('should transform discussion to individual notes in timeline view', () => {
- state.discussions = [discussionMock];
- state.isTimelineEnabled = true;
+ const discussions = getDiscussions();
+
+ expect(discussions.length).toEqual(discussionMock.notes.length);
+ discussions.forEach((discussion) => {
+ expect(discussion.individual_note).toBe(true);
+ expect(discussion.id).toBe(discussion.notes[0].id);
+ expect(discussion.created_at).toBe(discussion.notes[0].created_at);
+ });
+ });
+ });
- expect(getters.discussions(state).length).toEqual(discussionMock.notes.length);
- getters.discussions(state).forEach((discussion) => {
- expect(discussion.individual_note).toBe(true);
- expect(discussion.id).toBe(discussion.notes[0].id);
- expect(discussion.created_at).toBe(discussion.notes[0].created_at);
+ describe('with batchComments', () => {
+ beforeEach(() => {
+ batchComments = { drafts: [...draftComments, draftReply, draftDiffDiscussion] };
});
+
+ it.each`
+ discussionSortOrder | expectation
+ ${ASC} | ${[individualNote, ...draftComments.map(asDraftDiscussion)]}
+ ${DESC} | ${[...draftComments.reverse().map(asDraftDiscussion), individualNote]}
+ `(
+ 'only appends draft comments (discussionSortOrder=$discussionSortOrder)',
+ ({ discussionSortOrder, expectation }) => {
+ state.discussionSortOrder = discussionSortOrder;
+
+ expect(getDiscussions()).toEqual(expectation);
+ },
+ );
+ });
+ });
+
+ describe('hasDrafts', () => {
+ it.each`
+ rootGetters | expected
+ ${{}} | ${false}
+ ${{ 'batchComments/hasDrafts': true }} | ${true}
+ ${{ 'batchComments/hasDrafts': false }} | ${false}
+ `('with rootGetters=$rootGetters, returns $expected', ({ rootGetters, expected }) => {
+ expect(getters.hasDrafts({}, {}, {}, rootGetters)).toBe(expected);
});
});
@@ -103,7 +146,7 @@ describe('Getters Notes Store', () => {
};
it('should return a single system note when a description was updated multiple times', () => {
- expect(getters.discussions(stateCollapsedNotes).length).toEqual(1);
+ expect(getters.discussions(stateCollapsedNotes, {}, {}).length).toEqual(1);
});
});
diff --git a/spec/frontend/packages/details/store/getters_spec.js b/spec/frontend/packages/details/store/getters_spec.js
index f12b75d3b70..005adece56e 100644
--- a/spec/frontend/packages/details/store/getters_spec.js
+++ b/spec/frontend/packages/details/store/getters_spec.js
@@ -27,6 +27,7 @@ import {
mockPipelineInfo,
mavenPackage as packageWithoutBuildInfo,
pypiPackage,
+ rubygemsPackage,
} from '../../mock_data';
import {
generateMavenCommand,
@@ -104,6 +105,7 @@ describe('Getters PackageDetails Store', () => {
${npmPackage} | ${'npm'}
${nugetPackage} | ${'NuGet'}
${pypiPackage} | ${'PyPI'}
+ ${rubygemsPackage} | ${'RubyGems'}
`(`package type`, ({ packageEntity, expectedResult }) => {
beforeEach(() => setupState({ packageEntity }));
diff --git a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
index 3f17731584c..07aba62fef6 100644
--- a/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
+++ b/spec/frontend/packages/list/components/__snapshots__/packages_list_app_spec.js.snap
@@ -2,11 +2,11 @@
exports[`packages_list_app renders 1`] = `
<div>
- <package-title-stub
- packagehelpurl="foo"
+ <div
+ help-url="foo"
/>
- <package-search-stub />
+ <div />
<div>
<section
@@ -52,7 +52,9 @@ exports[`packages_list_app renders 1`] = `
with GitLab.
</p>
- <div>
+ <div
+ class="gl-display-flex gl-flex-wrap gl-justify-content-center"
+ >
<!---->
<!---->
diff --git a/spec/frontend/packages/list/components/packages_list_app_spec.js b/spec/frontend/packages/list/components/packages_list_app_spec.js
index 6862d23c4ff..4de2dd0789e 100644
--- a/spec/frontend/packages/list/components/packages_list_app_spec.js
+++ b/spec/frontend/packages/list/components/packages_list_app_spec.js
@@ -3,10 +3,11 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import createFlash from '~/flash';
import * as commonUtils from '~/lib/utils/common_utils';
-import PackageSearch from '~/packages/list/components/package_search.vue';
import PackageListApp from '~/packages/list/components/packages_list_app.vue';
import { DELETE_PACKAGE_SUCCESS_MESSAGE } from '~/packages/list/constants';
import { SHOW_DELETE_SUCCESS_ALERT } from '~/packages/shared/constants';
+import { FILTERED_SEARCH_TERM } from '~/packages_and_registries/shared/constants';
+import * as packageUtils from '~/packages_and_registries/shared/utils';
jest.mock('~/lib/utils/common_utils');
jest.mock('~/flash');
@@ -24,10 +25,19 @@ describe('packages_list_app', () => {
};
const GlLoadingIcon = { name: 'gl-loading-icon', template: '<div>loading</div>' };
+ // we need to manually stub dynamic imported components because shallowMount is not able to stub them automatically. See: https://github.com/vuejs/vue-test-utils/issues/1279
+ const PackageSearch = { name: 'PackageSearch', template: '<div></div>' };
+ const PackageTitle = { name: 'PackageTitle', template: '<div></div>' };
+ const InfrastructureTitle = { name: 'InfrastructureTitle', template: '<div></div>' };
+ const InfrastructureSearch = { name: 'InfrastructureSearch', template: '<div></div>' };
+
const emptyListHelpUrl = 'helpUrl';
const findEmptyState = () => wrapper.find(GlEmptyState);
const findListComponent = () => wrapper.find(PackageList);
const findPackageSearch = () => wrapper.find(PackageSearch);
+ const findPackageTitle = () => wrapper.find(PackageTitle);
+ const findInfrastructureTitle = () => wrapper.find(InfrastructureTitle);
+ const findInfrastructureSearch = () => wrapper.find(InfrastructureSearch);
const createStore = (filter = []) => {
store = new Vuex.Store({
@@ -45,7 +55,7 @@ describe('packages_list_app', () => {
store.dispatch = jest.fn();
};
- const mountComponent = () => {
+ const mountComponent = (provide) => {
wrapper = shallowMount(PackageListApp, {
localVue,
store,
@@ -55,12 +65,18 @@ describe('packages_list_app', () => {
PackageList,
GlSprintf,
GlLink,
+ PackageSearch,
+ PackageTitle,
+ InfrastructureTitle,
+ InfrastructureSearch,
},
+ provide,
});
};
beforeEach(() => {
createStore();
+ jest.spyOn(packageUtils, 'getQueryParams').mockReturnValue({});
});
afterEach(() => {
@@ -72,25 +88,6 @@ describe('packages_list_app', () => {
expect(wrapper.element).toMatchSnapshot();
});
- describe('empty state', () => {
- it('generate the correct empty list link', () => {
- mountComponent();
-
- const link = findListComponent().find(GlLink);
-
- expect(link.attributes('href')).toBe(emptyListHelpUrl);
- expect(link.text()).toBe('publish and share your packages');
- });
-
- it('includes the right content on the default tab', () => {
- mountComponent();
-
- const heading = findEmptyState().find('h1');
-
- expect(heading.text()).toBe('There are no packages yet');
- });
- });
-
it('call requestPackagesList on page:changed', () => {
mountComponent();
store.dispatch.mockClear();
@@ -108,10 +105,75 @@ describe('packages_list_app', () => {
expect(store.dispatch).toHaveBeenCalledWith('requestDeletePackage', 'foo');
});
- it('does not call requestPackagesList two times on render', () => {
+ it('does call requestPackagesList only one time on render', () => {
mountComponent();
- expect(store.dispatch).toHaveBeenCalledTimes(1);
+ expect(store.dispatch).toHaveBeenCalledTimes(3);
+ expect(store.dispatch).toHaveBeenNthCalledWith(1, 'setSorting', expect.any(Object));
+ expect(store.dispatch).toHaveBeenNthCalledWith(2, 'setFilter', expect.any(Array));
+ expect(store.dispatch).toHaveBeenNthCalledWith(3, 'requestPackagesList');
+ });
+
+ describe('url query string handling', () => {
+ const defaultQueryParamsMock = {
+ search: [1, 2],
+ type: 'npm',
+ sort: 'asc',
+ orderBy: 'created',
+ };
+
+ it('calls setSorting with the query string based sorting', () => {
+ jest.spyOn(packageUtils, 'getQueryParams').mockReturnValue(defaultQueryParamsMock);
+
+ mountComponent();
+
+ expect(store.dispatch).toHaveBeenNthCalledWith(1, 'setSorting', {
+ orderBy: defaultQueryParamsMock.orderBy,
+ sort: defaultQueryParamsMock.sort,
+ });
+ });
+
+ it('calls setFilter with the query string based filters', () => {
+ jest.spyOn(packageUtils, 'getQueryParams').mockReturnValue(defaultQueryParamsMock);
+
+ mountComponent();
+
+ expect(store.dispatch).toHaveBeenNthCalledWith(2, 'setFilter', [
+ { type: 'type', value: { data: defaultQueryParamsMock.type } },
+ { type: FILTERED_SEARCH_TERM, value: { data: defaultQueryParamsMock.search[0] } },
+ { type: FILTERED_SEARCH_TERM, value: { data: defaultQueryParamsMock.search[1] } },
+ ]);
+ });
+
+ it('calls setSorting and setFilters with the results of extractFilterAndSorting', () => {
+ jest
+ .spyOn(packageUtils, 'extractFilterAndSorting')
+ .mockReturnValue({ filters: ['foo'], sorting: { sort: 'desc' } });
+
+ mountComponent();
+
+ expect(store.dispatch).toHaveBeenNthCalledWith(1, 'setSorting', { sort: 'desc' });
+ expect(store.dispatch).toHaveBeenNthCalledWith(2, 'setFilter', ['foo']);
+ });
+ });
+
+ describe('empty state', () => {
+ it('generate the correct empty list link', () => {
+ mountComponent();
+
+ const link = findListComponent().find(GlLink);
+
+ expect(link.attributes('href')).toBe(emptyListHelpUrl);
+ expect(link.text()).toBe('publish and share your packages');
+ });
+
+ it('includes the right content on the default tab', () => {
+ mountComponent();
+
+ const heading = findEmptyState().find('h1');
+
+ expect(heading.text()).toBe('There are no packages yet');
+ });
});
describe('filter without results', () => {
@@ -145,6 +207,31 @@ describe('packages_list_app', () => {
});
});
+ describe('Infrastructure config', () => {
+ it('defaults to package registry components', () => {
+ mountComponent();
+
+ expect(findPackageSearch().exists()).toBe(true);
+ expect(findPackageTitle().exists()).toBe(true);
+
+ expect(findInfrastructureTitle().exists()).toBe(false);
+ expect(findInfrastructureSearch().exists()).toBe(false);
+ });
+
+ it('mount different component based on the provided values', () => {
+ mountComponent({
+ titleComponent: 'InfrastructureTitle',
+ searchComponent: 'InfrastructureSearch',
+ });
+
+ expect(findPackageSearch().exists()).toBe(false);
+ expect(findPackageTitle().exists()).toBe(false);
+
+ expect(findInfrastructureTitle().exists()).toBe(true);
+ expect(findInfrastructureSearch().exists()).toBe(true);
+ });
+ });
+
describe('delete alert handling', () => {
const { location } = window.location;
const search = `?${SHOW_DELETE_SUCCESS_ALERT}=true`;
diff --git a/spec/frontend/packages/list/components/packages_search_spec.js b/spec/frontend/packages/list/components/packages_search_spec.js
index 9b62dde8d2b..30fad74b493 100644
--- a/spec/frontend/packages/list/components/packages_search_spec.js
+++ b/spec/frontend/packages/list/components/packages_search_spec.js
@@ -2,8 +2,9 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import component from '~/packages/list/components/package_search.vue';
import PackageTypeToken from '~/packages/list/components/tokens/package_type_token.vue';
-import getTableHeaders from '~/packages/list/utils';
+import { sortableFields } from '~/packages/list/utils';
import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
+import UrlSync from '~/vue_shared/components/url_sync.vue';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -12,7 +13,8 @@ describe('Package Search', () => {
let wrapper;
let store;
- const findRegistrySearch = () => wrapper.find(RegistrySearch);
+ const findRegistrySearch = () => wrapper.findComponent(RegistrySearch);
+ const findUrlSync = () => wrapper.findComponent(UrlSync);
const createStore = (isGroupPage) => {
const state = {
@@ -37,6 +39,9 @@ describe('Package Search', () => {
wrapper = shallowMount(component, {
localVue,
store,
+ stubs: {
+ UrlSync,
+ },
});
};
@@ -55,7 +60,7 @@ describe('Package Search', () => {
tokens: expect.arrayContaining([
expect.objectContaining({ token: PackageTypeToken, type: 'type', icon: 'package' }),
]),
- sortableFields: getTableHeaders(),
+ sortableFields: sortableFields(),
});
});
@@ -72,7 +77,7 @@ describe('Package Search', () => {
tokens: expect.arrayContaining([
expect.objectContaining({ token: PackageTypeToken, type: 'type', icon: 'package' }),
]),
- sortableFields: getTableHeaders(isGroupPage),
+ sortableFields: sortableFields(isGroupPage),
});
});
@@ -104,4 +109,20 @@ describe('Package Search', () => {
expect(wrapper.emitted('update')).toEqual([[]]);
});
+
+ it('has a UrlSync component', () => {
+ mountComponent();
+
+ expect(findUrlSync().exists()).toBe(true);
+ });
+
+ it('on query:changed calls updateQuery from UrlSync', () => {
+ jest.spyOn(UrlSync.methods, 'updateQuery').mockImplementation(() => {});
+
+ mountComponent();
+
+ findRegistrySearch().vm.$emit('query:changed');
+
+ expect(UrlSync.methods.updateQuery).toHaveBeenCalled();
+ });
});
diff --git a/spec/frontend/packages/list/components/packages_title_spec.js b/spec/frontend/packages/list/components/packages_title_spec.js
index 3716e8daa7c..a17f72e3133 100644
--- a/spec/frontend/packages/list/components/packages_title_spec.js
+++ b/spec/frontend/packages/list/components/packages_title_spec.js
@@ -11,7 +11,7 @@ describe('PackageTitle', () => {
const findTitleArea = () => wrapper.find(TitleArea);
const findMetadataItem = () => wrapper.find(MetadataItem);
- const mountComponent = (propsData = { packageHelpUrl: 'foo' }) => {
+ const mountComponent = (propsData = { helpUrl: 'foo' }) => {
wrapper = shallowMount(PackageTitle, {
store,
propsData,
@@ -44,15 +44,15 @@ describe('PackageTitle', () => {
});
describe.each`
- packagesCount | exist | text
- ${null} | ${false} | ${''}
- ${undefined} | ${false} | ${''}
- ${0} | ${true} | ${'0 Packages'}
- ${1} | ${true} | ${'1 Package'}
- ${2} | ${true} | ${'2 Packages'}
- `('when packagesCount is $packagesCount metadata item', ({ packagesCount, exist, text }) => {
+ count | exist | text
+ ${null} | ${false} | ${''}
+ ${undefined} | ${false} | ${''}
+ ${0} | ${true} | ${'0 Packages'}
+ ${1} | ${true} | ${'1 Package'}
+ ${2} | ${true} | ${'2 Packages'}
+ `('when count is $count metadata item', ({ count, exist, text }) => {
beforeEach(() => {
- mountComponent({ packagesCount, packageHelpUrl: 'foo' });
+ mountComponent({ count, helpUrl: 'foo' });
});
it(`is ${exist} that it exists`, () => {
diff --git a/spec/frontend/packages/list/utils_spec.js b/spec/frontend/packages/list/utils_spec.js
index 5bcc3784752..4e4f7b8a723 100644
--- a/spec/frontend/packages/list/utils_spec.js
+++ b/spec/frontend/packages/list/utils_spec.js
@@ -1,6 +1,15 @@
-import { getNewPaginationPage } from '~/packages/list/utils';
+import { SORT_FIELDS } from '~/packages/list/constants';
+import { getNewPaginationPage, sortableFields } from '~/packages/list/utils';
describe('Packages list utils', () => {
+ describe('sortableFields', () => {
+ it('returns the correct list when is a project page', () => {
+ expect(sortableFields()).toEqual(SORT_FIELDS.filter((f) => f.orderBy !== 'project_path'));
+ });
+ it('returns the full list on the group page', () => {
+ expect(sortableFields(true)).toEqual(SORT_FIELDS);
+ });
+ });
describe('packageTypeDisplay', () => {
it('returns the current page when total items exceeds pagniation', () => {
expect(getNewPaginationPage(2, 20, 21)).toBe(2);
diff --git a/spec/frontend/packages/mock_data.js b/spec/frontend/packages/mock_data.js
index fbc167729d9..06009daba54 100644
--- a/spec/frontend/packages/mock_data.js
+++ b/spec/frontend/packages/mock_data.js
@@ -134,6 +134,23 @@ export const nugetPackage = {
},
};
+export const rubygemsPackage = {
+ created_at: '2015-12-10',
+ id: 4,
+ name: 'RubyGem1',
+ package_files: [],
+ package_type: 'rubygems',
+ project_id: 1,
+ tags: [],
+ updated_at: '2015-12-10',
+ version: '1.0.0',
+ rubygems_metadatum: {
+ author: 'Fake Name',
+ summary: 'My gem',
+ email: 'tanuki@fake.com',
+ },
+};
+
export const pypiPackage = {
created_at: '2015-12-10',
id: 5,
diff --git a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
index 77095f7c611..03b98478f3e 100644
--- a/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
+++ b/spec/frontend/packages/shared/components/__snapshots__/package_list_row_spec.js.snap
@@ -51,20 +51,7 @@ exports[`packages_list_row renders 1`] = `
<!---->
- <div
- class="d-flex align-items-center"
- data-testid="package-type"
- >
- <gl-icon-stub
- class="gl-ml-3 gl-mr-2"
- name="package"
- size="16"
- />
-
- <span>
- Maven
- </span>
- </div>
+ <div />
<package-path-stub
path="foo/bar/baz"
diff --git a/spec/frontend/packages/shared/components/package_icon_and_name_spec.js b/spec/frontend/packages/shared/components/package_icon_and_name_spec.js
new file mode 100644
index 00000000000..c96a570a29c
--- /dev/null
+++ b/spec/frontend/packages/shared/components/package_icon_and_name_spec.js
@@ -0,0 +1,32 @@
+import { GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import PackageIconAndName from '~/packages/shared/components/package_icon_and_name.vue';
+
+describe('PackageIconAndName', () => {
+ let wrapper;
+
+ const findIcon = () => wrapper.find(GlIcon);
+
+ const mountComponent = () => {
+ wrapper = shallowMount(PackageIconAndName, {
+ slots: {
+ default: 'test',
+ },
+ });
+ };
+
+ it('has an icon', () => {
+ mountComponent();
+
+ const icon = findIcon();
+
+ expect(icon.exists()).toBe(true);
+ expect(icon.props('name')).toBe('package');
+ });
+
+ it('renders the slot content', () => {
+ mountComponent();
+
+ expect(wrapper.text()).toBe('test');
+ });
+});
diff --git a/spec/frontend/packages/shared/components/package_list_row_spec.js b/spec/frontend/packages/shared/components/package_list_row_spec.js
index 1c0ef7e3539..fd54cd0f25d 100644
--- a/spec/frontend/packages/shared/components/package_list_row_spec.js
+++ b/spec/frontend/packages/shared/components/package_list_row_spec.js
@@ -1,7 +1,9 @@
import { shallowMount } from '@vue/test-utils';
+
import PackagesListRow from '~/packages/shared/components/package_list_row.vue';
import PackagePath from '~/packages/shared/components/package_path.vue';
import PackageTags from '~/packages/shared/components/package_tags.vue';
+
import ListItem from '~/vue_shared/components/registry/list_item.vue';
import { packageList } from '../../mock_data';
@@ -11,20 +13,30 @@ describe('packages_list_row', () => {
const [packageWithoutTags, packageWithTags] = packageList;
+ const InfrastructureIconAndName = { name: 'InfrastructureIconAndName', template: '<div></div>' };
+ const PackageIconAndName = { name: 'PackageIconAndName', template: '<div></div>' };
+
const findPackageTags = () => wrapper.find(PackageTags);
const findPackagePath = () => wrapper.find(PackagePath);
const findDeleteButton = () => wrapper.find('[data-testid="action-delete"]');
- const findPackageType = () => wrapper.find('[data-testid="package-type"]');
+ const findPackageIconAndName = () => wrapper.find(PackageIconAndName);
+ const findInfrastructureIconAndName = () => wrapper.find(InfrastructureIconAndName);
const mountComponent = ({
isGroup = false,
packageEntity = packageWithoutTags,
showPackageType = true,
disableDelete = false,
+ provide,
} = {}) => {
wrapper = shallowMount(PackagesListRow, {
store,
- stubs: { ListItem },
+ provide,
+ stubs: {
+ ListItem,
+ InfrastructureIconAndName,
+ PackageIconAndName,
+ },
propsData: {
packageLink: 'foo',
packageEntity,
@@ -72,13 +84,13 @@ describe('packages_list_row', () => {
it('shows the type when set', () => {
mountComponent();
- expect(findPackageType().exists()).toBe(true);
+ expect(findPackageIconAndName().exists()).toBe(true);
});
it('does not show the type when not set', () => {
mountComponent({ showPackageType: false });
- expect(findPackageType().exists()).toBe(false);
+ expect(findPackageIconAndName().exists()).toBe(false);
});
});
@@ -113,4 +125,25 @@ describe('packages_list_row', () => {
expect(wrapper.emitted('packageToDelete')[0]).toEqual([packageWithoutTags]);
});
});
+
+ describe('Infrastructure config', () => {
+ it('defaults to package registry components', () => {
+ mountComponent();
+
+ expect(findPackageIconAndName().exists()).toBe(true);
+ expect(findInfrastructureIconAndName().exists()).toBe(false);
+ });
+
+ it('mounts different component based on the provided values', () => {
+ mountComponent({
+ provide: {
+ iconComponent: 'InfrastructureIconAndName',
+ },
+ });
+
+ expect(findPackageIconAndName().exists()).toBe(false);
+
+ expect(findInfrastructureIconAndName().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/packages/shared/utils_spec.js b/spec/frontend/packages/shared/utils_spec.js
index 4a95def1bef..463e4a4febb 100644
--- a/spec/frontend/packages/shared/utils_spec.js
+++ b/spec/frontend/packages/shared/utils_spec.js
@@ -38,6 +38,7 @@ describe('Packages shared utils', () => {
${'npm'} | ${'npm'}
${'nuget'} | ${'NuGet'}
${'pypi'} | ${'PyPI'}
+ ${'rubygems'} | ${'RubyGems'}
${'composer'} | ${'Composer'}
${'foo'} | ${null}
`(`package type`, ({ packageType, expectedResult }) => {
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name_spec.js
new file mode 100644
index 00000000000..ef26c729691
--- /dev/null
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name_spec.js
@@ -0,0 +1,28 @@
+import { GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import InfrastructureIconAndName from '~/packages_and_registries/infrastructure_registry/components/infrastructure_icon_and_name.vue';
+
+describe('InfrastructureIconAndName', () => {
+ let wrapper;
+
+ const findIcon = () => wrapper.find(GlIcon);
+
+ const mountComponent = () => {
+ wrapper = shallowMount(InfrastructureIconAndName, {});
+ };
+
+ it('has an icon', () => {
+ mountComponent();
+
+ const icon = findIcon();
+
+ expect(icon.exists()).toBe(true);
+ expect(icon.props('name')).toBe('infrastructure-registry');
+ });
+
+ it('has the type fixed to terraform', () => {
+ mountComponent();
+
+ expect(wrapper.text()).toBe('Terraform');
+ });
+});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_search_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_search_spec.js
new file mode 100644
index 00000000000..119b678cc37
--- /dev/null
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_search_spec.js
@@ -0,0 +1,135 @@
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import Vuex from 'vuex';
+import component from '~/packages_and_registries/infrastructure_registry/components/infrastructure_search.vue';
+import RegistrySearch from '~/vue_shared/components/registry/registry_search.vue';
+import UrlSync from '~/vue_shared/components/url_sync.vue';
+
+const localVue = createLocalVue();
+localVue.use(Vuex);
+
+describe('Infrastructure Search', () => {
+ let wrapper;
+ let store;
+
+ const sortableFields = () => [
+ { orderBy: 'name', label: 'Name' },
+ { orderBy: 'version', label: 'Version' },
+ { orderBy: 'created_at', label: 'Published' },
+ ];
+
+ const groupSortableFields = () => [
+ { orderBy: 'name', label: 'Name' },
+ { orderBy: 'project_path', label: 'Project' },
+ { orderBy: 'version', label: 'Version' },
+ { orderBy: 'created_at', label: 'Published' },
+ ];
+
+ const findRegistrySearch = () => wrapper.findComponent(RegistrySearch);
+ const findUrlSync = () => wrapper.findComponent(UrlSync);
+
+ const createStore = (isGroupPage) => {
+ const state = {
+ config: {
+ isGroupPage,
+ },
+ sorting: {
+ orderBy: 'version',
+ sort: 'desc',
+ },
+ filter: [],
+ };
+ store = new Vuex.Store({
+ state,
+ });
+ store.dispatch = jest.fn();
+ };
+
+ const mountComponent = (isGroupPage = false) => {
+ createStore(isGroupPage);
+
+ wrapper = shallowMount(component, {
+ localVue,
+ store,
+ stubs: {
+ UrlSync,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it('has a registry search component', () => {
+ mountComponent();
+
+ expect(findRegistrySearch().exists()).toBe(true);
+ expect(findRegistrySearch().props()).toMatchObject({
+ filter: store.state.filter,
+ sorting: store.state.sorting,
+ tokens: [],
+ sortableFields: sortableFields(),
+ });
+ });
+
+ it.each`
+ isGroupPage | page | fields
+ ${false} | ${'project'} | ${sortableFields()}
+ ${true} | ${'group'} | ${groupSortableFields()}
+ `('in a $page page binds the right props', ({ isGroupPage, fields }) => {
+ mountComponent(isGroupPage);
+
+ expect(findRegistrySearch().props()).toMatchObject({
+ filter: store.state.filter,
+ sorting: store.state.sorting,
+ tokens: [],
+ sortableFields: fields,
+ });
+ });
+
+ it('on sorting:changed emits update event and calls vuex setSorting', () => {
+ const payload = { sort: 'foo' };
+
+ mountComponent();
+
+ findRegistrySearch().vm.$emit('sorting:changed', payload);
+
+ expect(store.dispatch).toHaveBeenCalledWith('setSorting', payload);
+ expect(wrapper.emitted('update')).toEqual([[]]);
+ });
+
+ it('on filter:changed calls vuex setFilter', () => {
+ const payload = ['foo'];
+
+ mountComponent();
+
+ findRegistrySearch().vm.$emit('filter:changed', payload);
+
+ expect(store.dispatch).toHaveBeenCalledWith('setFilter', payload);
+ });
+
+ it('on filter:submit emits update event', () => {
+ mountComponent();
+
+ findRegistrySearch().vm.$emit('filter:submit');
+
+ expect(wrapper.emitted('update')).toEqual([[]]);
+ });
+
+ it('has a UrlSync component', () => {
+ mountComponent();
+
+ expect(findUrlSync().exists()).toBe(true);
+ });
+
+ it('on query:changed calls updateQuery from UrlSync', () => {
+ jest.spyOn(UrlSync.methods, 'updateQuery').mockImplementation(() => {});
+
+ mountComponent();
+
+ findRegistrySearch().vm.$emit('query:changed');
+
+ expect(UrlSync.methods.updateQuery).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_title_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_title_spec.js
new file mode 100644
index 00000000000..db6e175b054
--- /dev/null
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/infrastructure_title_spec.js
@@ -0,0 +1,75 @@
+import { shallowMount } from '@vue/test-utils';
+import component from '~/packages_and_registries/infrastructure_registry/components/infrastructure_title.vue';
+import MetadataItem from '~/vue_shared/components/registry/metadata_item.vue';
+import TitleArea from '~/vue_shared/components/registry/title_area.vue';
+
+describe('Infrastructure Title', () => {
+ let wrapper;
+ let store;
+
+ const findTitleArea = () => wrapper.find(TitleArea);
+ const findMetadataItem = () => wrapper.find(MetadataItem);
+
+ const mountComponent = (propsData = { helpUrl: 'foo' }) => {
+ wrapper = shallowMount(component, {
+ store,
+ propsData,
+ stubs: {
+ TitleArea,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('title area', () => {
+ it('exists', () => {
+ mountComponent();
+
+ expect(findTitleArea().exists()).toBe(true);
+ });
+
+ it('has the correct props', () => {
+ mountComponent();
+
+ expect(findTitleArea().props()).toMatchObject({
+ title: 'Infrastructure Registry',
+ infoMessages: [
+ {
+ text: 'Publish and share your modules. %{docLinkStart}More information%{docLinkEnd}',
+ link: 'foo',
+ },
+ ],
+ });
+ });
+ });
+
+ describe.each`
+ count | exist | text
+ ${null} | ${false} | ${''}
+ ${undefined} | ${false} | ${''}
+ ${0} | ${true} | ${'0 Modules'}
+ ${1} | ${true} | ${'1 Module'}
+ ${2} | ${true} | ${'2 Modules'}
+ `('when count is $count metadata item', ({ count, exist, text }) => {
+ beforeEach(() => {
+ mountComponent({ count, helpUrl: 'foo' });
+ });
+
+ it(`is ${exist} that it exists`, () => {
+ expect(findMetadataItem().exists()).toBe(exist);
+ });
+
+ if (exist) {
+ it('has the correct props', () => {
+ expect(findMetadataItem().props()).toMatchObject({
+ icon: 'infrastructure-registry',
+ text,
+ });
+ });
+ }
+ });
+});
diff --git a/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js b/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js
index 2433c50ff24..859d3587223 100644
--- a/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js
+++ b/spec/frontend/packages_and_registries/settings/group/components/maven_settings_spec.js
@@ -59,7 +59,10 @@ describe('Maven Settings', () => {
mountComponent();
expect(findToggle().exists()).toBe(true);
- expect(findToggle().props('value')).toBe(defaultProps.mavenDuplicatesAllowed);
+ expect(findToggle().props()).toMatchObject({
+ label: component.i18n.MAVEN_TOGGLE_LABEL,
+ value: defaultProps.mavenDuplicatesAllowed,
+ });
});
it('toggle emits an update event', () => {
diff --git a/spec/frontend/packages_and_registries/shared/utils_spec.js b/spec/frontend/packages_and_registries/shared/utils_spec.js
new file mode 100644
index 00000000000..bbc8791ca21
--- /dev/null
+++ b/spec/frontend/packages_and_registries/shared/utils_spec.js
@@ -0,0 +1,59 @@
+import { FILTERED_SEARCH_TERM } from '~/packages_and_registries/shared/constants';
+import {
+ getQueryParams,
+ keyValueToFilterToken,
+ searchArrayToFilterTokens,
+ extractFilterAndSorting,
+} from '~/packages_and_registries/shared/utils';
+
+describe('Packages And Registries shared utils', () => {
+ describe('getQueryParams', () => {
+ it('returns an object from a query string, with arrays', () => {
+ const queryString = 'foo=bar&baz[]=1&baz[]=2';
+
+ expect(getQueryParams(queryString)).toStrictEqual({ foo: 'bar', baz: ['1', '2'] });
+ });
+ });
+
+ describe('keyValueToFilterToken', () => {
+ it('returns an object in the correct form', () => {
+ const type = 'myType';
+ const data = 1;
+
+ expect(keyValueToFilterToken(type, data)).toStrictEqual({ type, value: { data } });
+ });
+ });
+
+ describe('searchArrayToFilterTokens', () => {
+ it('returns an array of objects in the correct form', () => {
+ const search = ['one', 'two'];
+
+ expect(searchArrayToFilterTokens(search)).toStrictEqual([
+ { type: FILTERED_SEARCH_TERM, value: { data: 'one' } },
+ { type: FILTERED_SEARCH_TERM, value: { data: 'two' } },
+ ]);
+ });
+ });
+ describe('extractFilterAndSorting', () => {
+ it.each`
+ search | type | sort | orderBy | result
+ ${['one']} | ${'myType'} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [{ type: 'type', value: { data: 'myType' } }, { type: FILTERED_SEARCH_TERM, value: { data: 'one' } }] }}
+ ${['one']} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [{ type: FILTERED_SEARCH_TERM, value: { data: 'one' } }] }}
+ ${[]} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [] }}
+ ${null} | ${null} | ${'asc'} | ${'foo'} | ${{ sorting: { sort: 'asc', orderBy: 'foo' }, filters: [] }}
+ ${null} | ${null} | ${null} | ${'foo'} | ${{ sorting: { orderBy: 'foo' }, filters: [] }}
+ ${null} | ${null} | ${null} | ${null} | ${{ sorting: {}, filters: [] }}
+ `(
+ 'returns sorting and filters objects in the correct form',
+ ({ search, type, sort, orderBy, result }) => {
+ const queryObject = {
+ search,
+ type,
+ sort,
+ orderBy,
+ };
+ expect(extractFilterAndSorting(queryObject)).toStrictEqual(result);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/pager_spec.js b/spec/frontend/pager_spec.js
index ad4222e7cb2..95679a51c6d 100644
--- a/spec/frontend/pager_spec.js
+++ b/spec/frontend/pager_spec.js
@@ -32,38 +32,12 @@ describe('pager', () => {
window.history.replaceState({}, null, originalHref);
});
- it('should use data-href attribute from list element', () => {
- const href = `${TEST_HOST}/some_list.json`;
- setFixtures(`<div class="content_list" data-href="${href}"></div>`);
- Pager.init();
-
- expect(Pager.url).toBe(href);
- });
-
- it('should use current url if data-href attribute not provided', () => {
- const href = `${TEST_HOST}/some_list`;
- removeParams.mockReturnValue(href);
- Pager.init();
-
- expect(Pager.url).toBe(href);
- });
-
it('should get initial offset from query parameter', () => {
window.history.replaceState({}, null, '?offset=100');
Pager.init();
expect(Pager.offset).toBe(100);
});
-
- it('keeps extra query parameters from url', () => {
- window.history.replaceState({}, null, '?filter=test&offset=100');
- const href = `${TEST_HOST}/some_list?filter=test`;
- removeParams.mockReturnValue(href);
- Pager.init();
-
- expect(removeParams).toHaveBeenCalledWith(['limit', 'offset']);
- expect(Pager.url).toEqual(href);
- });
});
describe('getOld', () => {
@@ -164,5 +138,50 @@ describe('pager', () => {
done();
});
});
+
+ describe('has data-href attribute from list element', () => {
+ const href = `${TEST_HOST}/some_list.json`;
+
+ beforeEach(() => {
+ setFixtures(`<div class="content_list" data-href="${href}"></div>`);
+ });
+
+ it('should use data-href attribute', () => {
+ Pager.getOld();
+
+ expect(axios.get).toHaveBeenCalledWith(href, expect.any(Object));
+ });
+
+ it('should not use current url', () => {
+ Pager.getOld();
+
+ expect(removeParams).not.toHaveBeenCalled();
+ expect(removeParams).not.toHaveBeenCalledWith(href);
+ });
+ });
+
+ describe('no data-href attribute attribute provided from list element', () => {
+ beforeEach(() => {
+ setFixtures(`<div class="content_list"></div>`);
+ });
+
+ it('should use current url', () => {
+ const href = `${TEST_HOST}/some_list`;
+ removeParams.mockReturnValue(href);
+ Pager.getOld();
+
+ expect(axios.get).toHaveBeenCalledWith(href, expect.any(Object));
+ });
+
+ it('keeps extra query parameters from url', () => {
+ window.history.replaceState({}, null, '?filter=test&offset=100');
+ const href = `${TEST_HOST}/some_list?filter=test`;
+ removeParams.mockReturnValue(href);
+ Pager.getOld();
+
+ expect(removeParams).toHaveBeenCalledWith(['limit', 'offset']);
+ expect(axios.get).toHaveBeenCalledWith(href, expect.any(Object));
+ });
+ });
});
});
diff --git a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
index ddeaa2a79db..9f02e5b9432 100644
--- a/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
+++ b/spec/frontend/pages/admin/users/components/__snapshots__/delete_user_modal_spec.js.snap
@@ -50,11 +50,11 @@ exports[`User Operation confirmation modal renders modal with form included 1`]
<gl-button-stub
buttontextclasses=""
- category="primary"
+ category="secondary"
disabled="true"
icon=""
size="medium"
- variant="warning"
+ variant="danger"
>
secondaryAction
diff --git a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
index c7293b00adf..318b6d16008 100644
--- a/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
+++ b/spec/frontend/pages/admin/users/components/delete_user_modal_spec.js
@@ -11,15 +11,15 @@ describe('User Operation confirmation modal', () => {
let wrapper;
let formSubmitSpy;
- const findButton = (variant) =>
+ const findButton = (variant, category) =>
wrapper
.findAll(GlButton)
- .filter((w) => w.attributes('variant') === variant)
+ .filter((w) => w.attributes('variant') === variant && w.attributes('category') === category)
.at(0);
const findForm = () => wrapper.find('form');
const findUsernameInput = () => wrapper.find(GlFormInput);
- const findPrimaryButton = () => findButton('danger');
- const findSecondaryButton = () => findButton('warning');
+ const findPrimaryButton = () => findButton('danger', 'primary');
+ const findSecondaryButton = () => findButton('danger', 'secondary');
const findAuthenticityToken = () => new FormData(findForm().element).get('authenticity_token');
const getUsername = () => findUsernameInput().attributes('value');
const getMethodParam = () => new FormData(findForm().element).get('_method');
diff --git a/spec/frontend/pages/admin/users/new/index_spec.js b/spec/frontend/pages/admin/users/new/index_spec.js
deleted file mode 100644
index ec9fe487030..00000000000
--- a/spec/frontend/pages/admin/users/new/index_spec.js
+++ /dev/null
@@ -1,41 +0,0 @@
-import $ from 'jquery';
-import UserInternalRegexHandler from '~/pages/admin/users/new/index';
-
-describe('UserInternalRegexHandler', () => {
- const FIXTURE = 'admin/users/new_with_internal_user_regex.html';
- let $userExternal;
- let $userEmail;
- let $warningMessage;
-
- beforeEach(() => {
- loadFixtures(FIXTURE);
- // eslint-disable-next-line no-new
- new UserInternalRegexHandler();
- $userExternal = $('#user_external');
- $userEmail = $('#user_email');
- $warningMessage = $('#warning_external_automatically_set');
- if (!$userExternal.prop('checked')) $userExternal.prop('checked', 'checked');
- });
-
- describe('Behaviour of userExternal checkbox when', () => {
- it('matches email as internal', (done) => {
- expect($warningMessage.hasClass('hidden')).toBeTruthy();
-
- $userEmail.val('test@').trigger('input');
-
- expect($userExternal.prop('checked')).toBeFalsy();
- expect($warningMessage.hasClass('hidden')).toBeFalsy();
- done();
- });
-
- it('matches email as external', (done) => {
- expect($warningMessage.hasClass('hidden')).toBeTruthy();
-
- $userEmail.val('test.ext@').trigger('input');
-
- expect($userExternal.prop('checked')).toBeTruthy();
- expect($warningMessage.hasClass('hidden')).toBeTruthy();
- done();
- });
- });
-});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index 694a0c2b9c1..2992c7f0624 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -1,7 +1,8 @@
-import { GlForm, GlFormInputGroup } from '@gitlab/ui';
+import { GlForm, GlFormInputGroup, GlFormInput } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import axios from 'axios';
import AxiosMockAdapter from 'axios-mock-adapter';
+import { kebabCase } from 'lodash';
import createFlash from '~/flash';
import httpStatus from '~/lib/utils/http_status';
import * as urlUtility from '~/lib/utils/url_utility';
@@ -59,6 +60,7 @@ describe('ForkForm component', () => {
},
stubs: {
GlFormInputGroup,
+ GlFormInput,
},
});
};
@@ -204,6 +206,37 @@ describe('ForkForm component', () => {
});
});
+ describe('project slug', () => {
+ const projectPath = 'some other project slug';
+
+ beforeEach(() => {
+ mockGetRequest();
+ createComponent({
+ projectPath,
+ });
+ });
+
+ it('initially loads slug without kebab-case transformation', () => {
+ expect(findForkSlugInput().attributes('value')).toBe(projectPath);
+ });
+
+ it('changes to kebab case when project name changes', async () => {
+ const newInput = `${projectPath}1`;
+ findForkNameInput().vm.$emit('input', newInput);
+ await wrapper.vm.$nextTick();
+
+ expect(findForkSlugInput().attributes('value')).toBe(kebabCase(newInput));
+ });
+
+ it('does not change to kebab case when project slug is changed manually', async () => {
+ const newInput = `${projectPath}1`;
+ findForkSlugInput().vm.$emit('input', newInput);
+ await wrapper.vm.$nextTick();
+
+ expect(findForkSlugInput().attributes('value')).toBe(newInput);
+ });
+ });
+
describe('visibility level', () => {
it.each`
project | namespace | privateIsDisabled | internalIsDisabled | publicIsDisabled
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap
index 1c1327e7a4e..8b54a06ac7c 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_a_spec.js.snap
@@ -1,70 +1,322 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
-exports[`Learn GitLab Design A should render the loading state 1`] = `
-<ul>
- <li>
- <span>
- Create or import a repository
- </span>
- </li>
- <li>
- <span>
- Invite your colleagues
- </span>
- </li>
- <li>
- <span>
- <gl-link-stub
- href="http://example.com/"
+exports[`Learn GitLab Design A renders correctly 1`] = `
+<div>
+ <div
+ class="row"
+ >
+ <div
+ class="gl-mb-7 gl-ml-5"
+ >
+ <h1
+ class="gl-font-size-h1"
>
- Set up CI/CD
- </gl-link-stub>
- </span>
- </li>
- <li>
- <span>
- <gl-link-stub
- href="http://example.com/"
+ Learn GitLab
+ </h1>
+
+ <p
+ class="gl-text-gray-700 gl-mb-0"
>
- Start a free Ultimate trial
- </gl-link-stub>
- </span>
- </li>
- <li>
- <span>
- <gl-link-stub
- href="http://example.com/"
+ Ready to get started with GitLab? Follow these steps to set up your workspace, plan and commit changes, and deploy your project.
+ </p>
+ </div>
+ </div>
+
+ <div
+ class="gl-mb-3"
+ >
+ <p
+ class="gl-text-gray-500 gl-mb-2"
+ data-testid="completion-percentage"
+ >
+ 22% completed
+ </p>
+
+ <div
+ class="progress"
+ max="9"
+ value="2"
+ >
+ <div
+ aria-valuemax="9"
+ aria-valuemin="0"
+ aria-valuenow="2"
+ class="progress-bar"
+ role="progressbar"
+ style="width: 22.22222222222222%;"
>
- Add code owners
- </gl-link-stub>
- </span>
- </li>
- <li>
- <span>
- <gl-link-stub
- href="http://example.com/"
+ <!---->
+ </div>
+ </div>
+ </div>
+
+ <div
+ class="row row-cols-1 row-cols-md-3 gl-mt-5"
+ >
+ <div
+ class="col gl-mb-6"
+ >
+ <div
+ class="gl-card gl-pt-0 learn-gitlab-section-card"
>
- Add merge request approval
- </gl-link-stub>
- </span>
- </li>
- <li>
- <span>
- <gl-link-stub
- href="http://example.com/"
+ <!---->
+
+ <div
+ class="gl-card-body"
+ >
+ <div
+ class="learn-gitlab-section-card-header"
+ >
+ <img
+ src="/assets/learn_gitlab/section_workspace.svg"
+ />
+
+ <h2
+ class="gl-font-lg gl-mb-3"
+ >
+ Set up your workspace
+ </h2>
+
+ <p
+ class="gl-text-gray-700 gl-mb-6"
+ >
+ Complete these tasks first so you can enjoy GitLab's features to their fullest:
+ </p>
+ </div>
+
+ <div
+ class="gl-mb-4"
+ >
+ <span
+ class="gl-text-green-500"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-icon s16"
+ data-testid="completed-icon"
+ >
+ <use
+ href="#check-circle-filled"
+ />
+ </svg>
+
+ Invite your colleagues
+
+ </span>
+
+ <!---->
+ </div>
+ <div
+ class="gl-mb-4"
+ >
+ <span
+ class="gl-text-green-500"
+ >
+ <svg
+ aria-hidden="true"
+ class="gl-icon s16"
+ data-testid="completed-icon"
+ >
+ <use
+ href="#check-circle-filled"
+ />
+ </svg>
+
+ Create or import a repository
+
+ </span>
+
+ <!---->
+ </div>
+ <div
+ class="gl-mb-4"
+ >
+ <span>
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ >
+ Set up CI/CD
+ </a>
+ </span>
+
+ <!---->
+ </div>
+ <div
+ class="gl-mb-4"
+ >
+ <span>
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ >
+ Start a free Ultimate trial
+ </a>
+ </span>
+
+ <!---->
+ </div>
+ <div
+ class="gl-mb-4"
+ >
+ <span>
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ >
+ Add code owners
+ </a>
+ </span>
+
+ <span
+ class="gl-font-style-italic gl-text-gray-500"
+ data-testid="trial-only"
+ >
+
+ - Trial only
+
+ </span>
+ </div>
+ <div
+ class="gl-mb-4"
+ >
+ <span>
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ >
+ Add merge request approval
+ </a>
+ </span>
+
+ <span
+ class="gl-font-style-italic gl-text-gray-500"
+ data-testid="trial-only"
+ >
+
+ - Trial only
+
+ </span>
+ </div>
+ </div>
+
+ <!---->
+ </div>
+ </div>
+ <div
+ class="col gl-mb-6"
+ >
+ <div
+ class="gl-card gl-pt-0 learn-gitlab-section-card"
>
- Submit a merge request
- </gl-link-stub>
- </span>
- </li>
- <li>
- <span>
- <gl-link-stub
- href="http://example.com/"
+ <!---->
+
+ <div
+ class="gl-card-body"
+ >
+ <div
+ class="learn-gitlab-section-card-header"
+ >
+ <img
+ src="/assets/learn_gitlab/section_plan.svg"
+ />
+
+ <h2
+ class="gl-font-lg gl-mb-3"
+ >
+ Plan and execute
+ </h2>
+
+ <p
+ class="gl-text-gray-700 gl-mb-6"
+ >
+ Create a workflow for your new workspace, and learn how GitLab features work together:
+ </p>
+ </div>
+
+ <div
+ class="gl-mb-4"
+ >
+ <span>
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ >
+ Create an issue
+ </a>
+ </span>
+
+ <!---->
+ </div>
+ <div
+ class="gl-mb-4"
+ >
+ <span>
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ >
+ Submit a merge request
+ </a>
+ </span>
+
+ <!---->
+ </div>
+ </div>
+
+ <!---->
+ </div>
+ </div>
+ <div
+ class="col gl-mb-6"
+ >
+ <div
+ class="gl-card gl-pt-0 learn-gitlab-section-card"
>
- Run a security scan
- </gl-link-stub>
- </span>
- </li>
-</ul>
+ <!---->
+
+ <div
+ class="gl-card-body"
+ >
+ <div
+ class="learn-gitlab-section-card-header"
+ >
+ <img
+ src="/assets/learn_gitlab/section_deploy.svg"
+ />
+
+ <h2
+ class="gl-font-lg gl-mb-3"
+ >
+ Deploy
+ </h2>
+
+ <p
+ class="gl-text-gray-700 gl-mb-6"
+ >
+ Use your new GitLab workflow to deploy your application, monitor its health, and keep it secure:
+ </p>
+ </div>
+
+ <div
+ class="gl-mb-4"
+ >
+ <span>
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ >
+ Run a Security scan using CI/CD
+ </a>
+ </span>
+
+ <!---->
+ </div>
+ </div>
+
+ <!---->
+ </div>
+ </div>
+ </div>
+</div>
`;
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap
index dd899b93302..07c7f2df09e 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_b_spec.js.snap
@@ -29,21 +29,21 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-gray-500 gl-mb-2"
data-testid="completion-percentage"
>
- 25% completed
+ 22% completed
</p>
<div
class="progress"
- max="8"
+ max="9"
value="2"
>
<div
- aria-valuemax="8"
+ aria-valuemax="9"
aria-valuemin="0"
aria-valuenow="2"
class="progress-bar"
role="progressbar"
- style="width: 25%;"
+ style="width: 22.22222222222222%;"
>
<!---->
</div>
@@ -94,6 +94,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Invite your colleagues"
src="http://example.com/images/illustration.svg"
/>
@@ -151,6 +152,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Create or import a repository"
src="http://example.com/images/illustration.svg"
/>
@@ -200,6 +202,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Set-up CI/CD"
src="http://example.com/images/illustration.svg"
/>
@@ -249,6 +252,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Try GitLab Ultimate for free"
src="http://example.com/images/illustration.svg"
/>
@@ -303,6 +307,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Add code owners"
src="http://example.com/images/illustration.svg"
/>
@@ -357,6 +362,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Enable require merge approvals"
src="http://example.com/images/illustration.svg"
/>
@@ -422,6 +428,57 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Create an issue"
+ src="http://example.com/images/illustration.svg"
+ />
+
+ <h6>
+ Create an issue
+ </h6>
+
+ <p
+ class="gl-font-sm gl-text-gray-700"
+ >
+ Create/import issues (tickets) to collaborate on ideas and plan work.
+ </p>
+
+ <a
+ class="gl-link"
+ href="http://example.com/"
+ rel="noopener noreferrer"
+ target="_blank"
+ >
+ Create an issue
+ </a>
+ </div>
+ </div>
+
+ <!---->
+ </div>
+ </div>
+
+ <div
+ class="col gl-mb-6"
+ >
+ <div
+ class="gl-card gl-pt-0"
+ >
+ <!---->
+
+ <div
+ class="gl-card-body"
+ >
+ <div
+ class="gl-text-right gl-h-5"
+ >
+ <!---->
+ </div>
+
+ <div
+ class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
+ >
+ <img
+ alt="Submit a merge request (MR)"
src="http://example.com/images/illustration.svg"
/>
@@ -487,11 +544,12 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
class="gl-text-center gl-display-flex gl-justify-content-center gl-align-items-center gl-flex-direction-column learn-gitlab-info-card-content"
>
<img
+ alt="Run a Security scan using CI/CD"
src="http://example.com/images/illustration.svg"
/>
<h6>
- Run a security scan
+ Run a Security scan using CI/CD
</h6>
<p
@@ -506,7 +564,7 @@ exports[`Learn GitLab Design B renders correctly 1`] = `
rel="noopener noreferrer"
target="_blank"
>
- Run a Security scan
+ Run a Security scan using CI/CD
</a>
</div>
</div>
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap
new file mode 100644
index 00000000000..ad8db0822cc
--- /dev/null
+++ b/spec/frontend/pages/projects/learn_gitlab/components/__snapshots__/learn_gitlab_section_card_spec.js.snap
@@ -0,0 +1,67 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Learn GitLab Section Card renders correctly 1`] = `
+<gl-card-stub
+ bodyclass=""
+ class="gl-pt-0 learn-gitlab-section-card"
+ footerclass=""
+ headerclass=""
+>
+ <div
+ class="learn-gitlab-section-card-header"
+ >
+ <img
+ src="/assets/learn_gitlab/section_workspace.svg"
+ />
+
+ <h2
+ class="gl-font-lg gl-mb-3"
+ >
+ Set up your workspace
+ </h2>
+
+ <p
+ class="gl-text-gray-700 gl-mb-6"
+ >
+ Complete these tasks first so you can enjoy GitLab's features to their fullest:
+ </p>
+ </div>
+
+ <learn-gitlab-section-link-stub
+ action="userAdded"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="issueCreated"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="gitWrite"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="mergeRequestCreated"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="securityScanEnabled"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="pipelineCreated"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="trialStarted"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="codeOwnersEnabled"
+ value="[object Object]"
+ />
+ <learn-gitlab-section-link-stub
+ action="requiredMrApprovalsEnabled"
+ value="[object Object]"
+ />
+</gl-card-stub>
+`;
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js
index 2154358de51..64ace341038 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_a_spec.js
@@ -1,22 +1,38 @@
-import { shallowMount } from '@vue/test-utils';
+import { GlProgressBar } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
import LearnGitlabA from '~/pages/projects/learn_gitlab/components/learn_gitlab_a.vue';
import { testActions } from './mock_data';
describe('Learn GitLab Design A', () => {
let wrapper;
+ const createWrapper = () => {
+ wrapper = mount(LearnGitlabA, { propsData: { actions: testActions } });
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
afterEach(() => {
wrapper.destroy();
wrapper = null;
});
- const createWrapper = () => {
- wrapper = shallowMount(LearnGitlabA, { propsData: { actions: testActions } });
- };
+ it('renders correctly', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
- it('should render the loading state', () => {
- createWrapper();
+ it('renders the progress percentage', () => {
+ const text = wrapper.find('[data-testid="completion-percentage"]').text();
- expect(wrapper.element).toMatchSnapshot();
+ expect(text).toBe('22% completed');
+ });
+
+ it('renders the progress bar with correct values', () => {
+ const progressBar = wrapper.findComponent(GlProgressBar);
+
+ expect(progressBar.attributes('value')).toBe('2');
+ expect(progressBar.attributes('max')).toBe('9');
});
});
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_b_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_b_spec.js
index fbb989fae32..207944bfa1f 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_b_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_b_spec.js
@@ -26,13 +26,13 @@ describe('Learn GitLab Design B', () => {
it('renders the progress percentage', () => {
const text = wrapper.find('[data-testid="completion-percentage"]').text();
- expect(text).toEqual('25% completed');
+ expect(text).toBe('22% completed');
});
it('renders the progress bar with correct values', () => {
- const progressBar = wrapper.find(GlProgressBar);
+ const progressBar = wrapper.findComponent(GlProgressBar);
expect(progressBar.attributes('value')).toBe('2');
- expect(progressBar.attributes('max')).toBe('8');
+ expect(progressBar.attributes('max')).toBe('9');
});
});
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js
new file mode 100644
index 00000000000..de6aca08235
--- /dev/null
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_card_spec.js
@@ -0,0 +1,26 @@
+import { shallowMount } from '@vue/test-utils';
+import LearnGitlabSectionCard from '~/pages/projects/learn_gitlab/components/learn_gitlab_section_card.vue';
+import { testActions } from './mock_data';
+
+const defaultSection = 'workspace';
+
+describe('Learn GitLab Section Card', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const createWrapper = () => {
+ wrapper = shallowMount(LearnGitlabSectionCard, {
+ propsData: { section: defaultSection, actions: testActions },
+ });
+ };
+
+ it('renders correctly', () => {
+ createWrapper({ completed: false });
+
+ expect(wrapper.element).toMatchSnapshot();
+ });
+});
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
new file mode 100644
index 00000000000..882d233a239
--- /dev/null
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
@@ -0,0 +1,49 @@
+import { shallowMount } from '@vue/test-utils';
+import LearnGitlabSectionLink from '~/pages/projects/learn_gitlab/components/learn_gitlab_section_link.vue';
+
+const defaultAction = 'gitWrite';
+const defaultProps = {
+ title: 'Create Repository',
+ description: 'Some description',
+ url: 'https://example.com',
+ completed: false,
+};
+
+describe('Learn GitLab Section Link', () => {
+ let wrapper;
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ const createWrapper = (action = defaultAction, props = {}) => {
+ wrapper = shallowMount(LearnGitlabSectionLink, {
+ propsData: { action, value: { ...defaultProps, ...props } },
+ });
+ };
+
+ it('renders no icon when not completed', () => {
+ createWrapper(undefined, { completed: false });
+
+ expect(wrapper.find('[data-testid="completed-icon"]').exists()).toBe(false);
+ });
+
+ it('renders the completion icon when completed', () => {
+ createWrapper(undefined, { completed: true });
+
+ expect(wrapper.find('[data-testid="completed-icon"]').exists()).toBe(true);
+ });
+
+ it('renders no trial only when it is not required', () => {
+ createWrapper();
+
+ expect(wrapper.find('[data-testid="trial-only"]').exists()).toBe(false);
+ });
+
+ it('renders trial only when trial is required', () => {
+ createWrapper('codeOwnersEnabled');
+
+ expect(wrapper.find('[data-testid="trial-only"]').exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
index caac667e2b1..d6ee2b00c8e 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/mock_data.js
@@ -39,4 +39,9 @@ export const testActions = {
completed: false,
svg: 'http://example.com/images/illustration.svg',
},
+ issueCreated: {
+ url: 'http://example.com/',
+ completed: false,
+ svg: 'http://example.com/images/illustration.svg',
+ },
};
diff --git a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
index bee628c3a56..878721666ff 100644
--- a/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
+++ b/spec/frontend/pages/projects/shared/permissions/components/settings_panel_spec.js
@@ -46,6 +46,7 @@ const defaultProps = {
pagesHelpPath: '/help/user/project/pages/introduction#gitlab-pages-access-control',
packagesAvailable: false,
packagesHelpPath: '/help/user/packages/index',
+ requestCveAvailable: true,
};
describe('Settings Panel', () => {
@@ -76,6 +77,7 @@ describe('Settings Panel', () => {
const findRepositoryFeatureSetting = () =>
findRepositoryFeatureProjectRow().find(projectFeatureSetting);
const findProjectVisibilitySettings = () => wrapper.find({ ref: 'project-visibility-settings' });
+ const findIssuesSettingsRow = () => wrapper.find({ ref: 'issues-settings' });
const findAnalyticsRow = () => wrapper.find({ ref: 'analytics-settings' });
const findProjectVisibilityLevelInput = () => wrapper.find('[name="project[visibility_level]"]');
const findRequestAccessEnabledInput = () =>
@@ -174,6 +176,16 @@ describe('Settings Panel', () => {
});
});
+ describe('Issues settings', () => {
+ it('has label for CVE request toggle', () => {
+ wrapper = mountComponent();
+
+ expect(findIssuesSettingsRow().findComponent(GlToggle).props('label')).toBe(
+ settingsPanel.i18n.cve_request_toggle_label,
+ );
+ });
+ });
+
describe('Repository', () => {
it('should set the repository help text when the visibility level is set to private', () => {
wrapper = mountComponent({ currentSettings: { visibilityLevel: visibilityOptions.PRIVATE } });
@@ -228,7 +240,7 @@ describe('Settings Panel', () => {
});
});
- describe('Pipelines', () => {
+ describe('CI/CD', () => {
it('should enable the builds access level input when the repository is enabled', () => {
wrapper = mountComponent({
currentSettings: { repositoryAccessLevel: featureAccessLevel.EVERYONE },
@@ -304,6 +316,17 @@ describe('Settings Panel', () => {
expect(findContainerRegistryEnabledInput().props('disabled')).toBe(true);
});
+
+ it('has label for the toggle', () => {
+ wrapper = mountComponent({
+ currentSettings: { visibilityLevel: visibilityOptions.PUBLIC },
+ registryAvailable: true,
+ });
+
+ expect(findContainerRegistrySettings().findComponent(GlToggle).props('label')).toBe(
+ settingsPanel.i18n.containerRegistryLabel,
+ );
+ });
});
describe('Git Large File Storage', () => {
@@ -342,6 +365,15 @@ describe('Settings Panel', () => {
expect(findLFSFeatureToggle().props('disabled')).toBe(true);
});
+ it('has label for toggle', () => {
+ wrapper = mountComponent({
+ currentSettings: { repositoryAccessLevel: featureAccessLevel.EVERYONE },
+ lfsAvailable: true,
+ });
+
+ expect(findLFSFeatureToggle().props('label')).toBe(settingsPanel.i18n.lfsLabel);
+ });
+
it('should not change lfsEnabled when disabling the repository', async () => {
// mount over shallowMount, because we are aiming to test rendered state of toggle
wrapper = mountComponent({ currentSettings: { lfsEnabled: true } }, mount);
@@ -432,6 +464,17 @@ describe('Settings Panel', () => {
expect(findPackagesEnabledInput().props('disabled')).toBe(true);
});
+
+ it('has label for toggle', () => {
+ wrapper = mountComponent({
+ currentSettings: { repositoryAccessLevel: featureAccessLevel.EVERYONE },
+ packagesAvailable: true,
+ });
+
+ expect(findPackagesEnabledInput().findComponent(GlToggle).props('label')).toBe(
+ settingsPanel.i18n.packagesLabel,
+ );
+ });
});
describe('Pages', () => {
diff --git a/spec/frontend/pages/shared/wikis/wiki_alert_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_alert_spec.js
index 6a18473b1a7..6a18473b1a7 100644
--- a/spec/frontend/pages/shared/wikis/wiki_alert_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_alert_spec.js
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
new file mode 100644
index 00000000000..8ab0b87d2ee
--- /dev/null
+++ b/spec/frontend/pages/shared/wikis/components/wiki_form_spec.js
@@ -0,0 +1,222 @@
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import WikiForm from '~/pages/shared/wikis/components/wiki_form.vue';
+
+describe('WikiForm', () => {
+ let wrapper;
+
+ const findForm = () => wrapper.find('form');
+ const findTitle = () => wrapper.find('#wiki_title');
+ const findFormat = () => wrapper.find('#wiki_format');
+ const findContent = () => wrapper.find('#wiki_content');
+ const findMessage = () => wrapper.find('#wiki_message');
+ const findSubmitButton = () => wrapper.findByTestId('wiki-submit-button');
+ const findCancelButton = () => wrapper.findByTestId('wiki-cancel-button');
+ const findTitleHelpLink = () => wrapper.findByTestId('wiki-title-help-link');
+ const findMarkdownHelpLink = () => wrapper.findByTestId('wiki-markdown-help-link');
+
+ const pageInfoNew = {
+ persisted: false,
+ uploadsPath: '/project/path/-/wikis/attachments',
+ wikiPath: '/project/path/-/wikis',
+ helpPath: '/help/user/project/wiki/index',
+ markdownHelpPath: '/help/user/markdown',
+ markdownPreviewPath: '/project/path/-/wikis/.md/preview-markdown',
+ createPath: '/project/path/-/wikis/new',
+ };
+
+ const pageInfoPersisted = {
+ ...pageInfoNew,
+ persisted: true,
+
+ title: 'My page',
+ content: 'My page content',
+ format: 'markdown',
+ path: '/project/path/-/wikis/home',
+ };
+
+ function createWrapper(persisted = false, pageInfo = {}) {
+ wrapper = extendedWrapper(
+ mount(
+ WikiForm,
+ {
+ provide: {
+ formatOptions: {
+ Markdown: 'markdown',
+ RDoc: 'rdoc',
+ AsciiDoc: 'asciidoc',
+ Org: 'org',
+ },
+ pageInfo: {
+ ...(persisted ? pageInfoPersisted : pageInfoNew),
+ ...pageInfo,
+ },
+ },
+ },
+ { attachToDocument: true },
+ ),
+ );
+
+ jest.spyOn(wrapper.vm, 'onBeforeUnload');
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ it.each`
+ title | persisted | message
+ ${'my page'} | ${false} | ${'Create my page'}
+ ${'my-page'} | ${false} | ${'Create my page'}
+ ${'somedir/my-page'} | ${false} | ${'Create somedir/my page'}
+ ${'my-page'} | ${true} | ${'Update my page'}
+ `(
+ 'updates the commit message to $message when title is $title and persisted=$persisted',
+ async ({ title, message, persisted }) => {
+ createWrapper(persisted);
+
+ findTitle().setValue(title);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findMessage().element.value).toBe(message);
+ },
+ );
+
+ it('sets the commit message to "Update My page" when the page first loads when persisted', async () => {
+ createWrapper(true);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findMessage().element.value).toBe('Update My page');
+ });
+
+ it.each`
+ value | text
+ ${'markdown'} | ${'[Link Title](page-slug)'}
+ ${'rdoc'} | ${'{Link title}[link:page-slug]'}
+ ${'asciidoc'} | ${'link:page-slug[Link title]'}
+ ${'org'} | ${'[[page-slug]]'}
+ `('updates the link help message when format=$value is selected', async ({ value, text }) => {
+ createWrapper();
+
+ findFormat().find(`option[value=${value}]`).setSelected();
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.text()).toContain(text);
+ });
+
+ it('starts with no unload warning', async () => {
+ createWrapper();
+
+ await wrapper.vm.$nextTick();
+
+ window.dispatchEvent(new Event('beforeunload'));
+
+ expect(wrapper.vm.onBeforeUnload).not.toHaveBeenCalled();
+ });
+
+ it.each`
+ persisted | titleHelpText | titleHelpLink
+ ${true} | ${'You can move this page by adding the path to the beginning of the title.'} | ${'/help/user/project/wiki/index#move-a-wiki-page'}
+ ${false} | ${'You can specify the full path for the new file. We will automatically create any missing directories.'} | ${'/help/user/project/wiki/index#create-a-new-wiki-page'}
+ `(
+ 'shows appropriate title help text and help link for when persisted=$persisted',
+ async ({ persisted, titleHelpLink, titleHelpText }) => {
+ createWrapper(persisted);
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.text()).toContain(titleHelpText);
+ expect(findTitleHelpLink().attributes().href).toEqual(titleHelpLink);
+ },
+ );
+
+ it('shows correct link for wiki specific markdown docs', async () => {
+ createWrapper();
+
+ await wrapper.vm.$nextTick();
+
+ expect(findMarkdownHelpLink().attributes().href).toEqual(
+ '/help/user/markdown#wiki-specific-markdown',
+ );
+ });
+
+ describe('when wiki content is updated', () => {
+ beforeEach(() => {
+ createWrapper();
+
+ const input = findContent();
+ input.setValue('Lorem ipsum dolar sit!');
+ input.element.dispatchEvent(new Event('input'));
+
+ return wrapper.vm.$nextTick();
+ });
+
+ it('sets before unload warning', () => {
+ window.dispatchEvent(new Event('beforeunload'));
+
+ expect(wrapper.vm.onBeforeUnload).toHaveBeenCalled();
+ });
+
+ it('when form submitted, unsets before unload warning', async () => {
+ findForm().element.dispatchEvent(new Event('submit'));
+
+ await wrapper.vm.$nextTick();
+
+ window.dispatchEvent(new Event('beforeunload'));
+
+ expect(wrapper.vm.onBeforeUnload).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('submit button state', () => {
+ it.each`
+ title | content | buttonState | disabledAttr
+ ${'something'} | ${'something'} | ${'enabled'} | ${undefined}
+ ${''} | ${'something'} | ${'disabled'} | ${'disabled'}
+ ${'something'} | ${''} | ${'disabled'} | ${'disabled'}
+ ${''} | ${''} | ${'disabled'} | ${'disabled'}
+ ${' '} | ${' '} | ${'disabled'} | ${'disabled'}
+ `(
+ "when title='$title', content='$content', then the button is $buttonState'",
+ async ({ title, content, disabledAttr }) => {
+ createWrapper();
+
+ findTitle().setValue(title);
+ findContent().setValue(content);
+
+ await wrapper.vm.$nextTick();
+
+ expect(findSubmitButton().attributes().disabled).toBe(disabledAttr);
+ },
+ );
+
+ it.each`
+ persisted | buttonLabel
+ ${true} | ${'Save changes'}
+ ${false} | ${'Create page'}
+ `('when persisted=$persisted, label is set to $buttonLabel', ({ persisted, buttonLabel }) => {
+ createWrapper(persisted);
+
+ expect(findSubmitButton().text()).toBe(buttonLabel);
+ });
+ });
+
+ describe('cancel button state', () => {
+ it.each`
+ persisted | redirectLink
+ ${false} | ${'/project/path/-/wikis'}
+ ${true} | ${'/project/path/-/wikis/home'}
+ `(
+ 'when persisted=$persisted, redirects the user to appropriate path',
+ ({ persisted, redirectLink }) => {
+ createWrapper(persisted);
+
+ expect(findCancelButton().attributes().href).toEqual(redirectLink);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/performance_bar/components/detailed_metric_spec.js b/spec/frontend/performance_bar/components/detailed_metric_spec.js
index 6ddd047d549..c35bd772c86 100644
--- a/spec/frontend/performance_bar/components/detailed_metric_spec.js
+++ b/spec/frontend/performance_bar/components/detailed_metric_spec.js
@@ -1,24 +1,40 @@
import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import { trimText } from 'helpers/text_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import DetailedMetric from '~/performance_bar/components/detailed_metric.vue';
import RequestWarning from '~/performance_bar/components/request_warning.vue';
+import { sortOrders } from '~/performance_bar/constants';
describe('detailedMetric', () => {
let wrapper;
- const createComponent = (props) => {
- wrapper = shallowMount(DetailedMetric, {
- propsData: {
- ...props,
- },
- });
+ const defaultProps = {
+ currentRequest: {},
+ metric: 'gitaly',
+ header: 'Gitaly calls',
+ keys: ['feature', 'request'],
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(DetailedMetric, {
+ propsData: { ...defaultProps, ...props },
+ }),
+ );
};
const findAllTraceBlocks = () => wrapper.findAll('pre');
const findTraceBlockAtIndex = (index) => findAllTraceBlocks().at(index);
- const findExpandBacktraceBtns = () => wrapper.findAll('[data-testid="backtrace-expand-btn"]');
+ const findExpandBacktraceBtns = () => wrapper.findAllByTestId('backtrace-expand-btn');
const findExpandedBacktraceBtnAtIndex = (index) => findExpandBacktraceBtns().at(index);
+ const findDetailsLabel = () => wrapper.findByTestId('performance-bar-details-label');
+ const findSortOrderSwitcher = () => wrapper.findByTestId('performance-bar-sort-order');
+ const findEmptyDetailNotice = () => wrapper.findByTestId('performance-bar-empty-detail-notice');
+ const findAllDetailDurations = () =>
+ wrapper.findAllByTestId('performance-item-duration').wrappers.map((w) => w.text());
+ const findAllSummaryItems = () =>
+ wrapper.findAllByTestId('performance-bar-summary-item').wrappers.map((w) => w.text());
afterEach(() => {
wrapper.destroy();
@@ -26,13 +42,7 @@ describe('detailedMetric', () => {
describe('when the current request has no details', () => {
beforeEach(() => {
- createComponent({
- currentRequest: {},
- metric: 'gitaly',
- header: 'Gitaly calls',
- details: 'details',
- keys: ['feature', 'request'],
- });
+ createComponent();
});
it('does not render the element', () => {
@@ -42,36 +52,171 @@ describe('detailedMetric', () => {
describe('when the current request has details', () => {
const requestDetails = [
- { duration: '100', feature: 'find_commit', request: 'abcdef', backtrace: ['hello', 'world'] },
{
- duration: '23',
+ duration: 23,
feature: 'rebase_in_progress',
request: '',
backtrace: ['other', 'example'],
},
+ { duration: 100, feature: 'find_commit', request: 'abcdef', backtrace: ['hello', 'world'] },
];
- describe('with a default metric name', () => {
+ describe('with an empty detail', () => {
+ beforeEach(() => {
+ createComponent({
+ currentRequest: {
+ details: {
+ gitaly: {
+ duration: '0ms',
+ calls: 0,
+ details: [],
+ warnings: [],
+ },
+ },
+ },
+ });
+ });
+
+ it('displays an empty title', () => {
+ expect(findDetailsLabel().text()).toBe('0');
+ });
+
+ it('displays an empty modal', () => {
+ expect(findEmptyDetailNotice().text()).toContain('No gitaly calls for this request');
+ });
+
+ it('does not display sort by switcher', () => {
+ expect(findSortOrderSwitcher().exists()).toBe(false);
+ });
+ });
+
+ describe('when the details have a summary field', () => {
beforeEach(() => {
createComponent({
currentRequest: {
details: {
gitaly: {
duration: '123ms',
- calls: '456',
+ calls: 456,
+ details: requestDetails,
+ warnings: ['gitaly calls: 456 over 30'],
+ summary: {
+ 'In controllers': 100,
+ 'In middlewares': 20,
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('displays a summary section', () => {
+ expect(findAllSummaryItems()).toEqual([
+ 'Total 456',
+ 'Total duration 123ms',
+ 'In controllers 100',
+ 'In middlewares 20',
+ ]);
+ });
+ });
+
+ describe('when the details have summaryOptions option', () => {
+ const gitalyDetails = {
+ duration: '123ms',
+ calls: 456,
+ details: requestDetails,
+ warnings: ['gitaly calls: 456 over 30'],
+ };
+
+ describe('when the details have summaryOptions > hideTotal option', () => {
+ beforeEach(() => {
+ createComponent({
+ currentRequest: {
+ details: {
+ gitaly: { ...gitalyDetails, summaryOptions: { hideTotal: true } },
+ },
+ },
+ });
+ });
+
+ it('displays a summary section', () => {
+ expect(findAllSummaryItems()).toEqual(['Total duration 123ms']);
+ });
+ });
+
+ describe('when the details have summaryOptions > hideDuration option', () => {
+ beforeEach(() => {
+ createComponent({
+ currentRequest: {
+ details: {
+ gitaly: { ...gitalyDetails, summaryOptions: { hideDuration: true } },
+ },
+ },
+ });
+ });
+
+ it('displays a summary section', () => {
+ expect(findAllSummaryItems()).toEqual(['Total 456']);
+ });
+ });
+
+ describe('when the details have both summary and summaryOptions field', () => {
+ beforeEach(() => {
+ createComponent({
+ currentRequest: {
+ details: {
+ gitaly: {
+ ...gitalyDetails,
+ summary: {
+ 'In controllers': 100,
+ 'In middlewares': 20,
+ },
+ summaryOptions: {
+ hideDuration: true,
+ hideTotal: true,
+ },
+ },
+ },
+ },
+ });
+ });
+
+ it('displays a summary section', () => {
+ expect(findAllSummaryItems()).toEqual(['In controllers 100', 'In middlewares 20']);
+ });
+ });
+ });
+
+ describe("when the details don't have a start field", () => {
+ beforeEach(() => {
+ createComponent({
+ currentRequest: {
+ details: {
+ gitaly: {
+ duration: '123ms',
+ calls: 456,
details: requestDetails,
warnings: ['gitaly calls: 456 over 30'],
},
},
},
- metric: 'gitaly',
- header: 'Gitaly calls',
- keys: ['feature', 'request'],
});
});
- it('displays details', () => {
- expect(wrapper.text().replace(/\s+/g, ' ')).toContain('123ms / 456');
+ it('displays details header', () => {
+ expect(findDetailsLabel().text()).toBe('123ms / 456');
+ });
+
+ it('displays a basic summary section', () => {
+ expect(findAllSummaryItems()).toEqual(['Total 456', 'Total duration 123ms']);
+ });
+
+ it('sorts the details by descending duration order', () => {
+ expect(findAllDetailDurations()).toEqual(['100ms', '23ms']);
+ });
+
+ it('does not display sort by switcher', () => {
+ expect(findSortOrderSwitcher().exists()).toBe(false);
});
it('adds a modal with a table of the details', () => {
@@ -119,17 +264,75 @@ describe('detailedMetric', () => {
findExpandedBacktraceBtnAtIndex(0).vm.$emit('click');
await nextTick();
expect(findAllTraceBlocks()).toHaveLength(1);
- expect(findTraceBlockAtIndex(0).text()).toContain(requestDetails[0].backtrace[0]);
+ expect(findTraceBlockAtIndex(0).text()).toContain(requestDetails[1].backtrace[0]);
secondExpandButton.vm.$emit('click');
await nextTick();
expect(findAllTraceBlocks()).toHaveLength(2);
- expect(findTraceBlockAtIndex(1).text()).toContain(requestDetails[1].backtrace[0]);
+ expect(findTraceBlockAtIndex(1).text()).toContain(requestDetails[0].backtrace[0]);
secondExpandButton.vm.$emit('click');
await nextTick();
expect(findAllTraceBlocks()).toHaveLength(1);
- expect(findTraceBlockAtIndex(0).text()).toContain(requestDetails[0].backtrace[0]);
+ expect(findTraceBlockAtIndex(0).text()).toContain(requestDetails[1].backtrace[0]);
+ });
+ });
+
+ describe('when the details have a start field', () => {
+ const requestDetailsWithStart = [
+ {
+ start: '2021-03-18 11:41:49.846356 +0700',
+ duration: 23,
+ feature: 'rebase_in_progress',
+ request: '',
+ },
+ {
+ start: '2021-03-18 11:42:11.645711 +0700',
+ duration: 75,
+ feature: 'find_commit',
+ request: 'abcdef',
+ },
+ {
+ start: '2021-03-18 11:42:10.645711 +0700',
+ duration: 100,
+ feature: 'find_commit',
+ request: 'abcdef',
+ },
+ ];
+
+ beforeEach(() => {
+ createComponent({
+ currentRequest: {
+ details: {
+ gitaly: {
+ duration: '123ms',
+ calls: 456,
+ details: requestDetailsWithStart,
+ warnings: ['gitaly calls: 456 over 30'],
+ },
+ },
+ },
+ metric: 'gitaly',
+ header: 'Gitaly calls',
+ keys: ['feature', 'request'],
+ });
+ });
+
+ it('sorts the details by descending duration order', () => {
+ expect(findAllDetailDurations()).toEqual(['100ms', '75ms', '23ms']);
+ });
+
+ it('displays sort by switcher', () => {
+ expect(findSortOrderSwitcher().exists()).toBe(true);
+ });
+
+ it('allows switch sorting orders', async () => {
+ findSortOrderSwitcher().vm.$emit('input', sortOrders.CHRONOLOGICAL);
+ await nextTick();
+ expect(findAllDetailDurations()).toEqual(['23ms', '100ms', '75ms']);
+ findSortOrderSwitcher().vm.$emit('input', sortOrders.DURATION);
+ await nextTick();
+ expect(findAllDetailDurations()).toEqual(['100ms', '75ms', '23ms']);
});
});
@@ -145,10 +348,7 @@ describe('detailedMetric', () => {
},
},
},
- metric: 'gitaly',
title: 'custom',
- header: 'Gitaly calls',
- keys: ['feature', 'request'],
});
});
@@ -156,31 +356,39 @@ describe('detailedMetric', () => {
expect(wrapper.text()).toContain('custom');
});
});
- });
- describe('when the details has no duration', () => {
- beforeEach(() => {
- createComponent({
- currentRequest: {
- details: {
- bullet: {
- calls: '456',
- details: [{ notification: 'notification', backtrace: 'backtrace' }],
+ describe('when the details has no duration', () => {
+ beforeEach(() => {
+ createComponent({
+ currentRequest: {
+ details: {
+ bullet: {
+ calls: '456',
+ details: [{ notification: 'notification', backtrace: 'backtrace' }],
+ },
},
},
- },
- metric: 'bullet',
- header: 'Bullet notifications',
- keys: ['notification'],
+ metric: 'bullet',
+ header: 'Bullet notifications',
+ keys: ['notification'],
+ });
});
- });
- it('renders only the number of calls', async () => {
- expect(trimText(wrapper.text())).toEqual('456 notification bullet');
+ it('displays calls in the label', () => {
+ expect(findDetailsLabel().text()).toBe('456');
+ });
+
+ it('displays a basic summary section', () => {
+ expect(findAllSummaryItems()).toEqual(['Total 456']);
+ });
+
+ it('renders only the number of calls', async () => {
+ expect(trimText(wrapper.text())).toContain('notification bullet');
- findExpandedBacktraceBtnAtIndex(0).vm.$emit('click');
- await nextTick();
- expect(trimText(wrapper.text())).toEqual('456 notification backtrace bullet');
+ findExpandedBacktraceBtnAtIndex(0).vm.$emit('click');
+ await nextTick();
+ expect(trimText(wrapper.text())).toContain('notification backtrace bullet');
+ });
});
});
});
diff --git a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
index 94dc1237cb0..b7324ba2f6e 100644
--- a/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
+++ b/spec/frontend/performance_bar/stores/performance_bar_store_spec.js
@@ -59,4 +59,44 @@ describe('PerformanceBarStore', () => {
expect(store.findRequest('id').details.test.calls).toEqual(123);
});
});
+
+ describe('canTrackRequest', () => {
+ let store;
+
+ beforeEach(() => {
+ store = new PerformanceBarStore();
+ });
+
+ it('limits to 10 requests for GraphQL', () => {
+ expect(store.canTrackRequest('https://gitlab.com/api/graphql')).toBe(true);
+
+ store.addRequest('0', 'https://gitlab.com/api/graphql');
+ store.addRequest('1', 'https://gitlab.com/api/graphql');
+ store.addRequest('2', 'https://gitlab.com/api/graphql');
+ store.addRequest('3', 'https://gitlab.com/api/graphql');
+ store.addRequest('4', 'https://gitlab.com/api/graphql');
+ store.addRequest('5', 'https://gitlab.com/api/graphql');
+ store.addRequest('6', 'https://gitlab.com/api/graphql');
+ store.addRequest('7', 'https://gitlab.com/api/graphql');
+ store.addRequest('8', 'https://gitlab.com/api/graphql');
+
+ expect(store.canTrackRequest('https://gitlab.com/api/graphql')).toBe(true);
+
+ store.addRequest('9', 'https://gitlab.com/api/graphql');
+
+ expect(store.canTrackRequest('https://gitlab.com/api/graphql')).toBe(false);
+ });
+
+ it('limits to 2 requests for all other URLs', () => {
+ expect(store.canTrackRequest('https://gitlab.com/api/v4/users/1')).toBe(true);
+
+ store.addRequest('a', 'https://gitlab.com/api/v4/users/1');
+
+ expect(store.canTrackRequest('https://gitlab.com/api/v4/users/1')).toBe(true);
+
+ store.addRequest('b', 'https://gitlab.com/api/v4/users/1');
+
+ expect(store.canTrackRequest('https://gitlab.com/api/v4/users/1')).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/pipeline_editor/components/code_snippet_alert/code_snippet_alert_spec.js b/spec/frontend/pipeline_editor/components/code_snippet_alert/code_snippet_alert_spec.js
new file mode 100644
index 00000000000..d03f12bc249
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/code_snippet_alert/code_snippet_alert_spec.js
@@ -0,0 +1,61 @@
+import { within } from '@testing-library/dom';
+import { mount } from '@vue/test-utils';
+import { merge } from 'lodash';
+import { TEST_HOST } from 'helpers/test_constants';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CodeSnippetAlert from '~/pipeline_editor/components/code_snippet_alert/code_snippet_alert.vue';
+import { CODE_SNIPPET_SOURCE_API_FUZZING } from '~/pipeline_editor/components/code_snippet_alert/constants';
+
+const apiFuzzingConfigurationPath = '/namespace/project/-/security/configuration/api_fuzzing';
+
+describe('EE - CodeSnippetAlert', () => {
+ let wrapper;
+
+ const createWrapper = (options) => {
+ wrapper = extendedWrapper(
+ mount(
+ CodeSnippetAlert,
+ merge(
+ {
+ provide: {
+ configurationPaths: {
+ [CODE_SNIPPET_SOURCE_API_FUZZING]: apiFuzzingConfigurationPath,
+ },
+ },
+ propsData: {
+ source: CODE_SNIPPET_SOURCE_API_FUZZING,
+ },
+ },
+ options,
+ ),
+ ),
+ );
+ };
+
+ const withinComponent = () => within(wrapper.element);
+ const findDocsLink = () => withinComponent().getByRole('link', { name: /read documentation/i });
+ const findConfigurationLink = () =>
+ withinComponent().getByRole('link', { name: /Go back to configuration/i });
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it("provides a link to the feature's documentation", () => {
+ const docsLink = findDocsLink();
+
+ expect(docsLink).not.toBe(null);
+ expect(docsLink.href).toBe(`${TEST_HOST}/help/user/application_security/api_fuzzing/index`);
+ });
+
+ it("provides a link to the feature's configuration form", () => {
+ const configurationLink = findConfigurationLink();
+
+ expect(configurationLink).not.toBe(null);
+ expect(configurationLink.href).toBe(TEST_HOST + apiFuzzingConfigurationPath);
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js b/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js
index 866069f337b..fb191fccb0d 100644
--- a/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js
+++ b/spec/frontend/pipeline_editor/components/editor/ci_config_merged_preview_spec.js
@@ -1,10 +1,8 @@
-import { GlAlert, GlIcon } from '@gitlab/ui';
+import { GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { EDITOR_READY_EVENT } from '~/editor/constants';
import CiConfigMergedPreview from '~/pipeline_editor/components/editor/ci_config_merged_preview.vue';
-import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
-import { INVALID_CI_CONFIG } from '~/pipelines/constants';
import { mockLintResponse, mockCiConfigPath } from '../../mock_data';
describe('Text editor component', () => {
@@ -33,28 +31,11 @@ describe('Text editor component', () => {
});
};
- const findAlert = () => wrapper.findComponent(GlAlert);
const findIcon = () => wrapper.findComponent(GlIcon);
const findEditor = () => wrapper.findComponent(MockEditorLite);
afterEach(() => {
wrapper.destroy();
- wrapper = null;
- });
-
- describe('when status is invalid', () => {
- beforeEach(() => {
- createComponent({ props: { ciConfigData: { status: CI_CONFIG_STATUS_INVALID } } });
- });
-
- it('show an error message', () => {
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[INVALID_CI_CONFIG]);
- });
-
- it('hides the editor', () => {
- expect(findEditor().exists()).toBe(false);
- });
});
describe('when status is valid', () => {
diff --git a/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
new file mode 100644
index 00000000000..fa937100982
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/file-nav/branch_switcher_spec.js
@@ -0,0 +1,123 @@
+import { GlDropdown, GlDropdownItem, GlIcon } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import BranchSwitcher from '~/pipeline_editor/components/file_nav/branch_switcher.vue';
+import { DEFAULT_FAILURE } from '~/pipeline_editor/constants';
+import { mockDefaultBranch, mockProjectBranches, mockProjectFullPath } from '../../mock_data';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('Pipeline editor branch switcher', () => {
+ let wrapper;
+ let mockApollo;
+ let mockAvailableBranchQuery;
+
+ const createComponentWithApollo = () => {
+ const resolvers = {
+ Query: {
+ project: mockAvailableBranchQuery,
+ },
+ };
+
+ mockApollo = createMockApollo([], resolvers);
+ wrapper = shallowMount(BranchSwitcher, {
+ localVue,
+ apolloProvider: mockApollo,
+ provide: {
+ projectFullPath: mockProjectFullPath,
+ },
+ data() {
+ return {
+ currentBranch: mockDefaultBranch,
+ };
+ },
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItems = () => wrapper.findAll(GlDropdownItem);
+
+ beforeEach(() => {
+ mockAvailableBranchQuery = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('while querying', () => {
+ beforeEach(() => {
+ createComponentWithApollo();
+ });
+
+ it('does not render dropdown', () => {
+ expect(findDropdown().exists()).toBe(false);
+ });
+ });
+
+ describe('after querying', () => {
+ beforeEach(async () => {
+ mockAvailableBranchQuery.mockResolvedValue(mockProjectBranches);
+ createComponentWithApollo();
+ await waitForPromises();
+ });
+
+ it('query is called with correct variables', async () => {
+ expect(mockAvailableBranchQuery).toHaveBeenCalledTimes(1);
+ expect(mockAvailableBranchQuery).toHaveBeenCalledWith(
+ expect.anything(),
+ {
+ fullPath: mockProjectFullPath,
+ },
+ expect.anything(),
+ expect.anything(),
+ );
+ });
+
+ it('renders list of branches', () => {
+ expect(findDropdown().exists()).toBe(true);
+ expect(findDropdownItems()).toHaveLength(mockProjectBranches.repository.branches.length);
+ });
+
+ it('renders current branch at the top of the list with a check mark', () => {
+ const firstDropdownItem = findDropdownItems().at(0);
+ const icon = firstDropdownItem.findComponent(GlIcon);
+
+ expect(firstDropdownItem.text()).toBe(mockDefaultBranch);
+ expect(icon.exists()).toBe(true);
+ expect(icon.props('name')).toBe('check');
+ });
+
+ it('does not render check mark for other branches', () => {
+ const secondDropdownItem = findDropdownItems().at(1);
+ const icon = secondDropdownItem.findComponent(GlIcon);
+
+ expect(icon.classes()).toContain('gl-visibility-hidden');
+ });
+ });
+
+ describe('on fetch error', () => {
+ beforeEach(async () => {
+ mockAvailableBranchQuery.mockResolvedValue(new Error());
+ createComponentWithApollo();
+ await waitForPromises();
+ });
+
+ it('does not render dropdown', () => {
+ expect(findDropdown().exists()).toBe(false);
+ });
+
+ it('shows an error message', () => {
+ expect(wrapper.emitted('showError')).toBeDefined();
+ expect(wrapper.emitted('showError')[0]).toEqual([
+ {
+ reasons: [wrapper.vm.$options.i18n.fetchError],
+ type: DEFAULT_FAILURE,
+ },
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js b/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
new file mode 100644
index 00000000000..94a0a7d14ee
--- /dev/null
+++ b/spec/frontend/pipeline_editor/components/file-nav/pipeline_editor_file_nav_spec.js
@@ -0,0 +1,49 @@
+import { shallowMount } from '@vue/test-utils';
+import BranchSwitcher from '~/pipeline_editor/components/file_nav/branch_switcher.vue';
+import PipelineEditorFileNav from '~/pipeline_editor/components/file_nav/pipeline_editor_file_nav.vue';
+
+describe('Pipeline editor file nav', () => {
+ let wrapper;
+ const mockProvide = {
+ glFeatures: {
+ pipelineEditorBranchSwitcher: true,
+ },
+ };
+
+ const createComponent = ({ provide = {} } = {}) => {
+ wrapper = shallowMount(PipelineEditorFileNav, {
+ provide: {
+ ...mockProvide,
+ ...provide,
+ },
+ });
+ };
+
+ const findBranchSwitcher = () => wrapper.findComponent(BranchSwitcher);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('template', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders the branch switcher', () => {
+ expect(findBranchSwitcher().exists()).toBe(true);
+ });
+ });
+
+ describe('with branch switcher feature flag OFF', () => {
+ it('does not render the branch switcher', () => {
+ createComponent({
+ provide: {
+ glFeatures: { pipelineEditorBranchSwitcher: false },
+ },
+ });
+
+ expect(findBranchSwitcher().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
index ef8ca574e59..27652bb268b 100644
--- a/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
+++ b/spec/frontend/pipeline_editor/components/header/pipeline_editor_header_spec.js
@@ -13,7 +13,7 @@ describe('Pipeline editor header', () => {
},
};
- const createComponent = ({ provide = {} } = {}) => {
+ const createComponent = ({ provide = {}, props = {} } = {}) => {
wrapper = shallowMount(PipelineEditorHeader, {
provide: {
...mockProvide,
@@ -23,6 +23,8 @@ describe('Pipeline editor header', () => {
ciConfigData: mockLintResponse,
ciFileContent: mockCiYml,
isCiConfigDataLoading: false,
+ isNewCiConfigFile: false,
+ ...props,
},
});
};
@@ -36,15 +38,21 @@ describe('Pipeline editor header', () => {
});
describe('template', () => {
- beforeEach(() => {
- createComponent();
+ it('hides the pipeline status for new projects without a CI file', () => {
+ createComponent({ props: { isNewCiConfigFile: true } });
+
+ expect(findPipelineStatus().exists()).toBe(false);
});
- it('renders the pipeline status', () => {
+ it('renders the pipeline status when CI file exists', () => {
+ createComponent({ props: { isNewCiConfigFile: false } });
+
expect(findPipelineStatus().exists()).toBe(true);
});
it('renders the validation segment', () => {
+ createComponent();
+
expect(findValidationSegment().exists()).toBe(true);
});
});
diff --git a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
index de6e112866b..b6d49d0d0f8 100644
--- a/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
+++ b/spec/frontend/pipeline_editor/components/header/pipeline_status_spec.js
@@ -4,6 +4,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import PipelineStatus, { i18n } from '~/pipeline_editor/components/header/pipeline_status.vue';
+import getPipelineQuery from '~/pipeline_editor/graphql/queries/client/pipeline.graphql';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import { mockCommitSha, mockProjectPipeline, mockProjectFullPath } from '../../mock_data';
@@ -19,32 +20,9 @@ describe('Pipeline Status', () => {
let mockApollo;
let mockPipelineQuery;
- const createComponent = ({ hasPipeline = true, isQueryLoading = false }) => {
- const pipeline = hasPipeline
- ? { loading: isQueryLoading, ...mockProjectPipeline.pipeline }
- : { loading: isQueryLoading };
-
- wrapper = shallowMount(PipelineStatus, {
- provide: mockProvide,
- stubs: { GlLink, GlSprintf },
- data: () => (hasPipeline ? { pipeline } : {}),
- mocks: {
- $apollo: {
- queries: {
- pipeline,
- },
- },
- },
- });
- };
-
const createComponentWithApollo = () => {
- const resolvers = {
- Query: {
- project: mockPipelineQuery,
- },
- };
- mockApollo = createMockApollo([], resolvers);
+ const handlers = [[getPipelineQuery, mockPipelineQuery]];
+ mockApollo = createMockApollo(handlers);
wrapper = shallowMount(PipelineStatus, {
localVue,
@@ -78,16 +56,17 @@ describe('Pipeline Status', () => {
wrapper = null;
});
- describe('while querying', () => {
- it('renders loading icon', () => {
- createComponent({ isQueryLoading: true, hasPipeline: false });
+ describe('loading icon', () => {
+ it('renders while query is being fetched', () => {
+ createComponentWithApollo();
expect(findLoadingIcon().exists()).toBe(true);
expect(findPipelineLoadingMsg().text()).toBe(i18n.fetchLoading);
});
- it('does not render loading icon if pipeline data is already set', () => {
- createComponent({ isQueryLoading: true });
+ it('does not render if query is no longer loading', async () => {
+ createComponentWithApollo();
+ await waitForPromises();
expect(findLoadingIcon().exists()).toBe(false);
});
@@ -96,7 +75,9 @@ describe('Pipeline Status', () => {
describe('when querying data', () => {
describe('when data is set', () => {
beforeEach(async () => {
- mockPipelineQuery.mockResolvedValue(mockProjectPipeline);
+ mockPipelineQuery.mockResolvedValue({
+ data: { project: mockProjectPipeline },
+ });
createComponentWithApollo();
await waitForPromises();
@@ -104,14 +85,10 @@ describe('Pipeline Status', () => {
it('query is called with correct variables', async () => {
expect(mockPipelineQuery).toHaveBeenCalledTimes(1);
- expect(mockPipelineQuery).toHaveBeenCalledWith(
- expect.anything(),
- {
- fullPath: mockProjectFullPath,
- },
- expect.anything(),
- expect.anything(),
- );
+ expect(mockPipelineQuery).toHaveBeenCalledWith({
+ fullPath: mockProjectFullPath,
+ sha: mockCommitSha,
+ });
});
it('does not render error', () => {
diff --git a/spec/frontend/pipeline_editor/components/header/validation_segment_spec.js b/spec/frontend/pipeline_editor/components/header/validation_segment_spec.js
index 274c2d1b8da..fd8a100bb2c 100644
--- a/spec/frontend/pipeline_editor/components/header/validation_segment_spec.js
+++ b/spec/frontend/pipeline_editor/components/header/validation_segment_spec.js
@@ -6,13 +6,19 @@ import { sprintf } from '~/locale';
import ValidationSegment, {
i18n,
} from '~/pipeline_editor/components/header/validation_segment.vue';
-import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
+import {
+ CI_CONFIG_STATUS_INVALID,
+ EDITOR_APP_STATUS_EMPTY,
+ EDITOR_APP_STATUS_INVALID,
+ EDITOR_APP_STATUS_LOADING,
+ EDITOR_APP_STATUS_VALID,
+} from '~/pipeline_editor/constants';
import { mockYmlHelpPagePath, mergeUnwrappedCiConfig, mockCiYml } from '../../mock_data';
describe('Validation segment component', () => {
let wrapper;
- const createComponent = (props = {}) => {
+ const createComponent = ({ props = {}, appStatus }) => {
wrapper = extendedWrapper(
shallowMount(ValidationSegment, {
provide: {
@@ -21,9 +27,14 @@ describe('Validation segment component', () => {
propsData: {
ciConfig: mergeUnwrappedCiConfig(),
ciFileContent: mockCiYml,
- loading: false,
...props,
},
+ // Simulate graphQL client query result
+ data() {
+ return {
+ appStatus,
+ };
+ },
}),
);
};
@@ -34,18 +45,17 @@ describe('Validation segment component', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
it('shows the loading state', () => {
- createComponent({ loading: true });
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
expect(wrapper.text()).toBe(i18n.loading);
});
describe('when config is empty', () => {
beforeEach(() => {
- createComponent({ ciFileContent: '' });
+ createComponent({ appStatus: EDITOR_APP_STATUS_EMPTY });
});
it('has check icon', () => {
@@ -59,7 +69,7 @@ describe('Validation segment component', () => {
describe('when config is valid', () => {
beforeEach(() => {
- createComponent({});
+ createComponent({ appStatus: EDITOR_APP_STATUS_VALID });
});
it('has check icon', () => {
@@ -79,12 +89,9 @@ describe('Validation segment component', () => {
describe('when config is invalid', () => {
beforeEach(() => {
createComponent({
- ciConfig: mergeUnwrappedCiConfig({
- status: CI_CONFIG_STATUS_INVALID,
- }),
+ appStatus: EDITOR_APP_STATUS_INVALID,
});
});
-
it('has warning icon', () => {
expect(findIcon().props('name')).toBe('warning-solid');
});
@@ -93,43 +100,53 @@ describe('Validation segment component', () => {
expect(findValidationMsg().text()).toBe(i18n.invalid);
});
- it('shows an invalid state with an error', () => {
+ it('shows the learn more link', () => {
+ expect(findLearnMoreLink().attributes('href')).toBe(mockYmlHelpPagePath);
+ expect(findLearnMoreLink().text()).toBe('Learn more');
+ });
+
+ describe('with multiple errors', () => {
const firstError = 'First Error';
const secondError = 'Second Error';
- createComponent({
- ciConfig: mergeUnwrappedCiConfig({
- status: CI_CONFIG_STATUS_INVALID,
- errors: [firstError, secondError],
- }),
+ beforeEach(() => {
+ createComponent({
+ props: {
+ ciConfig: mergeUnwrappedCiConfig({
+ status: CI_CONFIG_STATUS_INVALID,
+ errors: [firstError, secondError],
+ }),
+ },
+ });
+ });
+ it('shows an invalid state with an error', () => {
+ // Test the error is shown _and_ the string matches
+ expect(findValidationMsg().text()).toContain(firstError);
+ expect(findValidationMsg().text()).toBe(
+ sprintf(i18n.invalidWithReason, { reason: firstError }),
+ );
});
-
- // Test the error is shown _and_ the string matches
- expect(findValidationMsg().text()).toContain(firstError);
- expect(findValidationMsg().text()).toBe(
- sprintf(i18n.invalidWithReason, { reason: firstError }),
- );
});
- it('shows an invalid state with an error while preventing XSS', () => {
+ describe('with XSS inside the error', () => {
const evilError = '<script>evil();</script>';
- createComponent({
- ciConfig: mergeUnwrappedCiConfig({
- status: CI_CONFIG_STATUS_INVALID,
- errors: [evilError],
- }),
+ beforeEach(() => {
+ createComponent({
+ props: {
+ ciConfig: mergeUnwrappedCiConfig({
+ status: CI_CONFIG_STATUS_INVALID,
+ errors: [evilError],
+ }),
+ },
+ });
});
+ it('shows an invalid state with an error while preventing XSS', () => {
+ const { innerHTML } = findValidationMsg().element;
- const { innerHTML } = findValidationMsg().element;
-
- expect(innerHTML).not.toContain(evilError);
- expect(innerHTML).toContain(escape(evilError));
- });
-
- it('shows the learn more link', () => {
- expect(findLearnMoreLink().attributes('href')).toBe(mockYmlHelpPagePath);
- expect(findLearnMoreLink().text()).toBe('Learn more');
+ expect(innerHTML).not.toContain(evilError);
+ expect(innerHTML).toContain(escape(evilError));
+ });
});
});
});
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
index 6775433deb9..5fc0880b09e 100644
--- a/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
+++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_results_spec.js
@@ -7,7 +7,7 @@ import { mockJobs, mockErrors, mockWarnings } from '../../mock_data';
describe('CI Lint Results', () => {
let wrapper;
const defaultProps = {
- valid: true,
+ isValid: true,
jobs: mockJobs,
errors: [],
warnings: [],
@@ -42,7 +42,6 @@ describe('CI Lint Results', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('Empty results', () => {
@@ -72,7 +71,7 @@ describe('CI Lint Results', () => {
describe('Invalid results', () => {
beforeEach(() => {
- createComponent({ valid: false, errors: mockErrors, warnings: mockWarnings }, mount);
+ createComponent({ isValid: false, errors: mockErrors, warnings: mockWarnings }, mount);
});
it('does not display the table', () => {
diff --git a/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js b/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js
index fdddca3d62b..238942a34ff 100644
--- a/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js
+++ b/spec/frontend/pipeline_editor/components/lint/ci_lint_spec.js
@@ -1,13 +1,12 @@
import { GlAlert, GlLink } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
-import { CI_CONFIG_STATUS_INVALID } from '~/pipeline_editor/constants';
import { mergeUnwrappedCiConfig, mockLintHelpPagePath } from '../../mock_data';
describe('~/pipeline_editor/components/lint/ci_lint.vue', () => {
let wrapper;
- const createComponent = (props = {}, mountFn = shallowMount) => {
+ const createComponent = ({ props, mountFn = shallowMount } = {}) => {
wrapper = mountFn(CiLint, {
provide: {
lintHelpPagePath: mockLintHelpPagePath,
@@ -27,12 +26,11 @@ describe('~/pipeline_editor/components/lint/ci_lint.vue', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('Valid Results', () => {
beforeEach(() => {
- createComponent({}, mount);
+ createComponent({ props: { isValid: true }, mountFn: mount });
});
it('displays valid results', () => {
@@ -66,14 +64,7 @@ describe('~/pipeline_editor/components/lint/ci_lint.vue', () => {
});
it('displays invalid results', () => {
- createComponent(
- {
- ciConfig: mergeUnwrappedCiConfig({
- status: CI_CONFIG_STATUS_INVALID,
- }),
- },
- mount,
- );
+ createComponent({ props: { isValid: false }, mountFn: mount });
expect(findAlert().text()).toMatch('Status: Syntax is incorrect.');
});
diff --git a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
index 24af17e9ce6..eba853180cd 100644
--- a/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
+++ b/spec/frontend/pipeline_editor/components/pipeline_editor_tabs_spec.js
@@ -4,8 +4,15 @@ import { nextTick } from 'vue';
import CiConfigMergedPreview from '~/pipeline_editor/components/editor/ci_config_merged_preview.vue';
import CiLint from '~/pipeline_editor/components/lint/ci_lint.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
+import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
+import {
+ EDITOR_APP_STATUS_EMPTY,
+ EDITOR_APP_STATUS_ERROR,
+ EDITOR_APP_STATUS_LOADING,
+ EDITOR_APP_STATUS_INVALID,
+ EDITOR_APP_STATUS_VALID,
+} from '~/pipeline_editor/constants';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
-
import { mockLintResponse, mockCiYml } from '../mock_data';
describe('Pipeline editor tabs component', () => {
@@ -20,17 +27,27 @@ describe('Pipeline editor tabs component', () => {
},
};
- const createComponent = ({ props = {}, provide = {}, mountFn = shallowMount } = {}) => {
+ const createComponent = ({
+ props = {},
+ provide = {},
+ appStatus = EDITOR_APP_STATUS_VALID,
+ mountFn = shallowMount,
+ } = {}) => {
wrapper = mountFn(PipelineEditorTabs, {
propsData: {
ciConfigData: mockLintResponse,
ciFileContent: mockCiYml,
- isCiConfigDataLoading: false,
...props,
},
+ data() {
+ return {
+ appStatus,
+ };
+ },
provide: { ...mockProvide, ...provide },
stubs: {
TextEditor: MockTextEditor,
+ EditorTab,
},
});
};
@@ -49,7 +66,6 @@ describe('Pipeline editor tabs component', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
describe('editor tab', () => {
@@ -69,7 +85,7 @@ describe('Pipeline editor tabs component', () => {
describe('with feature flag on', () => {
describe('while loading', () => {
beforeEach(() => {
- createComponent({ props: { isCiConfigDataLoading: true } });
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
});
it('displays a loading icon if the lint query is loading', () => {
@@ -108,7 +124,7 @@ describe('Pipeline editor tabs component', () => {
describe('lint tab', () => {
describe('while loading', () => {
beforeEach(() => {
- createComponent({ props: { isCiConfigDataLoading: true } });
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
});
it('displays a loading icon if the lint query is loading', () => {
@@ -135,7 +151,7 @@ describe('Pipeline editor tabs component', () => {
describe('with feature flag on', () => {
describe('while loading', () => {
beforeEach(() => {
- createComponent({ props: { isCiConfigDataLoading: true } });
+ createComponent({ appStatus: EDITOR_APP_STATUS_LOADING });
});
it('displays a loading icon if the lint query is loading', () => {
@@ -143,9 +159,9 @@ describe('Pipeline editor tabs component', () => {
});
});
- describe('when `mergedYaml` is undefined', () => {
+ describe('when there is a fetch error', () => {
beforeEach(() => {
- createComponent({ props: { ciConfigData: {} } });
+ createComponent({ appStatus: EDITOR_APP_STATUS_ERROR });
});
it('show an error message', () => {
@@ -180,4 +196,24 @@ describe('Pipeline editor tabs component', () => {
});
});
});
+
+ describe('show tab content based on status', () => {
+ it.each`
+ appStatus | editor | viz | lint | merged
+ ${undefined} | ${true} | ${true} | ${true} | ${true}
+ ${EDITOR_APP_STATUS_EMPTY} | ${true} | ${false} | ${false} | ${false}
+ ${EDITOR_APP_STATUS_INVALID} | ${true} | ${false} | ${true} | ${false}
+ ${EDITOR_APP_STATUS_VALID} | ${true} | ${true} | ${true} | ${true}
+ `(
+ 'when status is $appStatus, we show - editor:$editor | viz:$viz | lint:$lint | merged:$merged ',
+ ({ appStatus, editor, viz, lint, merged }) => {
+ createComponent({ appStatus });
+
+ expect(findTextEditor().exists()).toBe(editor);
+ expect(findPipelineGraph().exists()).toBe(viz);
+ expect(findCiLint().exists()).toBe(lint);
+ expect(findMergedPreview().exists()).toBe(merged);
+ },
+ );
+ });
});
diff --git a/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
index 291468c5229..8def83d578b 100644
--- a/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
+++ b/spec/frontend/pipeline_editor/components/ui/editor_tab_spec.js
@@ -1,12 +1,15 @@
-import { GlTabs } from '@gitlab/ui';
+import { GlAlert, GlTabs } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { nextTick } from 'vue';
-
import EditorTab from '~/pipeline_editor/components/ui/editor_tab.vue';
const mockContent1 = 'MOCK CONTENT 1';
const mockContent2 = 'MOCK CONTENT 2';
+const MockEditorLite = {
+ template: '<div>EDITOR</div>',
+};
+
describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
let wrapper;
let mockChildMounted = jest.fn();
@@ -37,22 +40,34 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
`,
};
- const createWrapper = () => {
+ const createMockedWrapper = () => {
wrapper = mount(MockTabbedContent);
};
+ const createWrapper = ({ props } = {}) => {
+ wrapper = mount(EditorTab, {
+ propsData: props,
+ slots: {
+ default: MockEditorLite,
+ },
+ });
+ };
+
+ const findSlotComponent = () => wrapper.findComponent(MockEditorLite);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
beforeEach(() => {
mockChildMounted = jest.fn();
});
it('tabs are mounted lazily', async () => {
- createWrapper();
+ createMockedWrapper();
expect(mockChildMounted).toHaveBeenCalledTimes(0);
});
it('first tab is only mounted after nextTick', async () => {
- createWrapper();
+ createMockedWrapper();
await nextTick();
@@ -60,6 +75,36 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
expect(mockChildMounted).toHaveBeenCalledWith(mockContent1);
});
+ describe('showing the tab content depending on `isEmpty` and `isInvalid`', () => {
+ it.each`
+ isEmpty | isInvalid | showSlotComponent | text
+ ${undefined} | ${undefined} | ${true} | ${'renders'}
+ ${false} | ${false} | ${true} | ${'renders'}
+ ${undefined} | ${true} | ${false} | ${'hides'}
+ ${true} | ${false} | ${false} | ${'hides'}
+ ${false} | ${true} | ${false} | ${'hides'}
+ `(
+ '$text the slot component when isEmpty:$isEmpty and isInvalid:$isInvalid',
+ ({ isEmpty, isInvalid, showSlotComponent }) => {
+ createWrapper({
+ props: { isEmpty, isInvalid },
+ });
+ expect(findSlotComponent().exists()).toBe(showSlotComponent);
+ expect(findAlert().exists()).toBe(!showSlotComponent);
+ },
+ );
+
+ it('can have a custom empty message', () => {
+ const text = 'my custom alert message';
+ createWrapper({ props: { isEmpty: true, emptyMessage: text } });
+
+ const alert = findAlert();
+
+ expect(alert.exists()).toBe(true);
+ expect(alert.text()).toBe(text);
+ });
+ });
+
describe('user interaction', () => {
const clickTab = async (testid) => {
wrapper.find(`[data-testid="${testid}"]`).trigger('click');
@@ -67,7 +112,7 @@ describe('~/pipeline_editor/components/ui/editor_tab.vue', () => {
};
beforeEach(() => {
- createWrapper();
+ createMockedWrapper();
});
it('mounts a tab once after selecting it', async () => {
diff --git a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
index 196a4133eea..f0932fc55d3 100644
--- a/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
+++ b/spec/frontend/pipeline_editor/graphql/resolvers_spec.js
@@ -9,6 +9,7 @@ import {
mockDefaultBranch,
mockLintResponse,
mockProjectFullPath,
+ mockProjectBranches,
} from '../mock_data';
jest.mock('~/api', () => {
@@ -47,21 +48,20 @@ describe('~/pipeline_editor/graphql/resolvers', () => {
});
});
- describe('pipeline', () => {
- it('resolves pipeline data with type names', async () => {
- const result = await resolvers.Query.project(null);
+ describe('project', () => {
+ it('resolves project data with type names', async () => {
+ const result = await resolvers.Query.project();
// eslint-disable-next-line no-underscore-dangle
expect(result.__typename).toBe('Project');
});
- it('resolves pipeline data with necessary data', async () => {
- const result = await resolvers.Query.project(null);
- const pipelineKeys = Object.keys(result.pipeline);
- const statusKeys = Object.keys(result.pipeline.detailedStatus);
+ it('resolves project with available list of branches', async () => {
+ const result = await resolvers.Query.project();
- expect(pipelineKeys).toContain('id', 'commitPath', 'detailedStatus', 'shortSha');
- expect(statusKeys).toContain('detailsPath', 'text');
+ expect(result.repository.branches).toHaveLength(
+ mockProjectBranches.repository.branches.length,
+ );
});
});
});
diff --git a/spec/frontend/pipeline_editor/mock_data.js b/spec/frontend/pipeline_editor/mock_data.js
index 16d5ba0e714..7f651a42231 100644
--- a/spec/frontend/pipeline_editor/mock_data.js
+++ b/spec/frontend/pipeline_editor/mock_data.js
@@ -138,6 +138,20 @@ export const mergeUnwrappedCiConfig = (mergedConfig) => {
};
};
+export const mockProjectBranches = {
+ __typename: 'Project',
+ repository: {
+ __typename: 'Repository',
+ branches: [
+ { __typename: 'Branch', name: 'master' },
+ { __typename: 'Branch', name: 'main' },
+ { __typename: 'Branch', name: 'develop' },
+ { __typename: 'Branch', name: 'production' },
+ { __typename: 'Branch', name: 'test' },
+ ],
+ },
+};
+
export const mockProjectPipeline = {
pipeline: {
commitPath: '/-/commit/aabbccdd',
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
index 887d296222f..d8e3436479c 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_app_spec.js
@@ -2,8 +2,11 @@ import { GlAlert, GlButton, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
+import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import httpStatusCodes from '~/lib/utils/http_status';
+import CodeSnippetAlert from '~/pipeline_editor/components/code_snippet_alert/code_snippet_alert.vue';
+import { CODE_SNIPPET_SOURCES } from '~/pipeline_editor/components/code_snippet_alert/constants';
import CommitForm from '~/pipeline_editor/components/commit/commit_form.vue';
import TextEditor from '~/pipeline_editor/components/editor/text_editor.vue';
@@ -72,7 +75,7 @@ describe('Pipeline editor app component', () => {
});
};
- const createComponentWithApollo = ({ props = {}, provide = {} } = {}) => {
+ const createComponentWithApollo = async ({ props = {}, provide = {} } = {}) => {
const handlers = [[getCiConfigData, mockCiConfigData]];
const resolvers = {
Query: {
@@ -94,6 +97,8 @@ describe('Pipeline editor app component', () => {
};
createComponent({ props, provide, options });
+
+ return waitForPromises();
};
const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
@@ -103,6 +108,7 @@ describe('Pipeline editor app component', () => {
const findEmptyState = () => wrapper.findComponent(PipelineEditorEmptyState);
const findEmptyStateButton = () =>
wrapper.findComponent(PipelineEditorEmptyState).findComponent(GlButton);
+ const findCodeSnippetAlert = () => wrapper.findComponent(CodeSnippetAlert);
beforeEach(() => {
mockBlobContentData = jest.fn();
@@ -116,11 +122,55 @@ describe('Pipeline editor app component', () => {
wrapper.destroy();
});
- it('displays a loading icon if the blob query is loading', () => {
- createComponent({ blobLoading: true });
+ describe('loading state', () => {
+ it('displays a loading icon if the blob query is loading', () => {
+ createComponent({ blobLoading: true });
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findTextEditor().exists()).toBe(false);
+ });
+ });
+
+ describe('code snippet alert', () => {
+ const setCodeSnippetUrlParam = (value) => {
+ global.jsdom.reconfigure({
+ url: `${TEST_HOST}/?code_snippet_copied_from=${value}`,
+ });
+ };
+
+ it('does not show by default', () => {
+ createComponent();
+
+ expect(findCodeSnippetAlert().exists()).toBe(false);
+ });
+
+ it.each(CODE_SNIPPET_SOURCES)('shows if URL param is %s, and cleans up URL', (source) => {
+ jest.spyOn(window.history, 'replaceState');
+ setCodeSnippetUrlParam(source);
+ createComponent();
+
+ expect(findCodeSnippetAlert().exists()).toBe(true);
+ expect(window.history.replaceState).toHaveBeenCalledWith({}, document.title, `${TEST_HOST}/`);
+ });
+
+ it('does not show if URL param is invalid', () => {
+ setCodeSnippetUrlParam('foo_bar');
+ createComponent();
+
+ expect(findCodeSnippetAlert().exists()).toBe(false);
+ });
+
+ it('disappears on dismiss', async () => {
+ setCodeSnippetUrlParam('api_fuzzing');
+ createComponent();
+ const alert = findCodeSnippetAlert();
+
+ expect(alert.exists()).toBe(true);
+
+ await alert.vm.$emit('dismiss');
- expect(findLoadingIcon().exists()).toBe(true);
- expect(findTextEditor().exists()).toBe(false);
+ expect(alert.exists()).toBe(false);
+ });
});
describe('when queries are called', () => {
@@ -131,9 +181,7 @@ describe('Pipeline editor app component', () => {
describe('when file exists', () => {
beforeEach(async () => {
- createComponentWithApollo();
-
- await waitForPromises();
+ await createComponentWithApollo();
});
it('shows pipeline editor home component', () => {
@@ -145,10 +193,6 @@ describe('Pipeline editor app component', () => {
});
it('ci config query is called with correct variables', async () => {
- createComponentWithApollo();
-
- await waitForPromises();
-
expect(mockCiConfigData).toHaveBeenCalledWith({
content: mockCiYml,
projectPath: mockProjectFullPath,
@@ -164,9 +208,7 @@ describe('Pipeline editor app component', () => {
status: httpStatusCodes.BAD_REQUEST,
},
});
- createComponentWithApollo();
-
- await waitForPromises();
+ await createComponentWithApollo();
expect(findEmptyState().exists()).toBe(true);
expect(findAlert().exists()).toBe(false);
@@ -181,9 +223,7 @@ describe('Pipeline editor app component', () => {
status: httpStatusCodes.NOT_FOUND,
},
});
- createComponentWithApollo();
-
- await waitForPromises();
+ await createComponentWithApollo();
expect(findEmptyState().exists()).toBe(true);
expect(findAlert().exists()).toBe(false);
@@ -194,8 +234,7 @@ describe('Pipeline editor app component', () => {
describe('because of a fetching error', () => {
it('shows a unkown error message', async () => {
mockBlobContentData.mockRejectedValueOnce(new Error('My error!'));
- createComponentWithApollo();
- await waitForPromises();
+ await createComponentWithApollo();
expect(findEmptyState().exists()).toBe(false);
expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[LOAD_FAILURE_UNKNOWN]);
@@ -212,7 +251,7 @@ describe('Pipeline editor app component', () => {
},
});
- createComponentWithApollo({
+ await createComponentWithApollo({
provide: {
glFeatures: {
pipelineEditorEmptyStateAction: true,
@@ -220,8 +259,6 @@ describe('Pipeline editor app component', () => {
},
});
- await waitForPromises();
-
expect(findEmptyState().exists()).toBe(true);
expect(findTextEditor().exists()).toBe(false);
@@ -254,9 +291,9 @@ describe('Pipeline editor app component', () => {
describe('and the commit mutation fails', () => {
const commitFailedReasons = ['Commit failed'];
- beforeEach(() => {
+ beforeEach(async () => {
window.scrollTo = jest.fn();
- createComponent();
+ await createComponentWithApollo();
findEditorHome().vm.$emit('showError', {
type: COMMIT_FAILURE,
@@ -278,9 +315,9 @@ describe('Pipeline editor app component', () => {
describe('when an unknown error occurs', () => {
const unknownReasons = ['Commit failed'];
- beforeEach(() => {
+ beforeEach(async () => {
window.scrollTo = jest.fn();
- createComponent();
+ await createComponentWithApollo();
findEditorHome().vm.$emit('showError', {
type: COMMIT_FAILURE,
diff --git a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
index 9864f3c13f9..a1e3d24acfa 100644
--- a/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
+++ b/spec/frontend/pipeline_editor/pipeline_editor_home_spec.js
@@ -2,6 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import { nextTick } from 'vue';
import CommitSection from '~/pipeline_editor/components/commit/commit_section.vue';
+import PipelineEditorFileNav from '~/pipeline_editor/components/file_nav/pipeline_editor_file_nav.vue';
import PipelineEditorHeader from '~/pipeline_editor/components/header/pipeline_editor_header.vue';
import PipelineEditorTabs from '~/pipeline_editor/components/pipeline_editor_tabs.vue';
import { MERGED_TAB, VISUALIZE_TAB } from '~/pipeline_editor/constants';
@@ -18,6 +19,7 @@ describe('Pipeline editor home wrapper', () => {
ciConfigData: mockLintResponse,
ciFileContent: mockCiYml,
isCiConfigDataLoading: false,
+ isNewCiConfigFile: false,
...props,
},
});
@@ -26,6 +28,7 @@ describe('Pipeline editor home wrapper', () => {
const findPipelineEditorHeader = () => wrapper.findComponent(PipelineEditorHeader);
const findPipelineEditorTabs = () => wrapper.findComponent(PipelineEditorTabs);
const findCommitSection = () => wrapper.findComponent(CommitSection);
+ const findFileNav = () => wrapper.findComponent(PipelineEditorFileNav);
afterEach(() => {
wrapper.destroy();
@@ -37,6 +40,10 @@ describe('Pipeline editor home wrapper', () => {
createComponent();
});
+ it('shows the file nav', () => {
+ expect(findFileNav().exists()).toBe(true);
+ });
+
it('shows the pipeline editor header', () => {
expect(findPipelineEditorHeader().exists()).toBe(true);
});
diff --git a/spec/frontend/pipelines/blank_state_spec.js b/spec/frontend/pipelines/blank_state_spec.js
deleted file mode 100644
index 5dcf3d267ed..00000000000
--- a/spec/frontend/pipelines/blank_state_spec.js
+++ /dev/null
@@ -1,20 +0,0 @@
-import { getByText } from '@testing-library/dom';
-import { mount } from '@vue/test-utils';
-import BlankState from '~/pipelines/components/pipelines_list/blank_state.vue';
-
-describe('Pipelines Blank State', () => {
- const wrapper = mount(BlankState, {
- propsData: {
- svgPath: 'foo',
- message: 'Blank State',
- },
- });
-
- it('should render svg', () => {
- expect(wrapper.find('.svg-content img').attributes('src')).toEqual('foo');
- });
-
- it('should render message', () => {
- expect(getByText(wrapper.element, /Blank State/i)).toBeTruthy();
- });
-});
diff --git a/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js b/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
index 60026f69b84..93bc8faa51b 100644
--- a/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
+++ b/spec/frontend/pipelines/components/pipelines_list/pipeline_stage_spec.js
@@ -11,10 +11,15 @@ const dropdownPath = 'path.json';
describe('Pipelines stage component', () => {
let wrapper;
let mock;
+ let glTooltipDirectiveMock;
const createComponent = (props = {}) => {
+ glTooltipDirectiveMock = jest.fn();
wrapper = mount(PipelineStage, {
attachTo: document.body,
+ directives: {
+ GlTooltip: glTooltipDirectiveMock,
+ },
propsData: {
stage: {
status: {
@@ -62,6 +67,10 @@ describe('Pipelines stage component', () => {
createComponent();
});
+ it('sets up the tooltip to not have a show delay animation', () => {
+ expect(glTooltipDirectiveMock.mock.calls[0][1].modifiers.ds0).toBe(true);
+ });
+
it('should render a dropdown with the status icon', () => {
expect(findDropdown().exists()).toBe(true);
expect(findDropdownToggle().exists()).toBe(true);
diff --git a/spec/frontend/pipelines/graph/action_component_spec.js b/spec/frontend/pipelines/graph/action_component_spec.js
index 6a7018fa1e5..177b026491c 100644
--- a/spec/frontend/pipelines/graph/action_component_spec.js
+++ b/spec/frontend/pipelines/graph/action_component_spec.js
@@ -3,7 +3,7 @@ import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import ActionComponent from '~/pipelines/components/graph/action_component.vue';
+import ActionComponent from '~/pipelines/components/jobs_shared/action_component.vue';
describe('pipeline graph action component', () => {
let wrapper;
diff --git a/spec/frontend/pipelines/graph/graph_component_spec.js b/spec/frontend/pipelines/graph/graph_component_spec.js
index 6c3f848333c..e8fb036368a 100644
--- a/spec/frontend/pipelines/graph/graph_component_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_spec.js
@@ -1,10 +1,11 @@
import { mount, shallowMount } from '@vue/test-utils';
-import { GRAPHQL } from '~/pipelines/components/graph/constants';
+import { GRAPHQL, LAYER_VIEW, STAGE_VIEW } from '~/pipelines/components/graph/constants';
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import JobItem from '~/pipelines/components/graph/job_item.vue';
import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
+import { listByLayers } from '~/pipelines/components/parsing_utils';
import {
generateResponse,
mockPipelineResponse,
@@ -17,9 +18,11 @@ describe('graph component', () => {
const findLinkedColumns = () => wrapper.findAll(LinkedPipelinesColumn);
const findLinksLayer = () => wrapper.find(LinksLayer);
const findStageColumns = () => wrapper.findAll(StageColumnComponent);
+ const findStageNameInJob = () => wrapper.find('[data-testid="stage-name-in-job"]');
const defaultProps = {
pipeline: generateResponse(mockPipelineResponse, 'root/fungi-xoxo'),
+ viewType: STAGE_VIEW,
configPaths: {
metricsPath: '',
graphqlResourceEtag: 'this/is/a/path',
@@ -81,6 +84,10 @@ describe('graph component', () => {
expect(findLinksLayer().exists()).toBe(true);
});
+ it('does not display stage name on the job in default (stage) mode', () => {
+ expect(findStageNameInJob().exists()).toBe(false);
+ });
+
describe('when column requests a refresh', () => {
beforeEach(() => {
findStageColumns().at(0).vm.$emit('refreshPipelineGraph');
@@ -92,7 +99,7 @@ describe('graph component', () => {
});
describe('when links are present', () => {
- beforeEach(async () => {
+ beforeEach(() => {
createComponent({
mountFn: mount,
stubOverride: { 'job-item': false },
@@ -131,4 +138,24 @@ describe('graph component', () => {
expect(findLinkedColumns()).toHaveLength(2);
});
});
+
+ describe('in layers mode', () => {
+ beforeEach(() => {
+ createComponent({
+ mountFn: mount,
+ stubOverride: {
+ 'job-item': false,
+ 'job-group-dropdown': false,
+ },
+ props: {
+ viewType: LAYER_VIEW,
+ pipelineLayers: listByLayers(defaultProps.pipeline),
+ },
+ });
+ });
+
+ it('displays the stage name on the job', () => {
+ expect(findStageNameInJob().exists()).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
index 44d8e467f51..8c469966be4 100644
--- a/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
+++ b/spec/frontend/pipelines/graph/graph_component_wrapper_spec.js
@@ -1,11 +1,21 @@
import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { mount, shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
+import {
+ IID_FAILURE,
+ LAYER_VIEW,
+ STAGE_VIEW,
+ VIEW_TYPE_KEY,
+} from '~/pipelines/components/graph/constants';
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import PipelineGraphWrapper from '~/pipelines/components/graph/graph_component_wrapper.vue';
+import GraphViewSelector from '~/pipelines/components/graph/graph_view_selector.vue';
+import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import * as parsingUtils from '~/pipelines/components/parsing_utils';
import { mockPipelineResponse } from './mock_data';
const defaultProvide = {
@@ -17,20 +27,28 @@ const defaultProvide = {
describe('Pipeline graph wrapper', () => {
Vue.use(VueApollo);
+ useLocalStorageSpy();
let wrapper;
const getAlert = () => wrapper.find(GlAlert);
const getLoadingIcon = () => wrapper.find(GlLoadingIcon);
const getGraph = () => wrapper.find(PipelineGraph);
+ const getStageColumnTitle = () => wrapper.find('[data-testid="stage-column-title"]');
+ const getAllStageColumnGroupsInColumn = () =>
+ wrapper.find(StageColumnComponent).findAll('[data-testid="stage-column-group"]');
+ const getViewSelector = () => wrapper.find(GraphViewSelector);
const createComponent = ({
apolloProvider,
data = {},
- provide = defaultProvide,
+ provide = {},
mountFn = shallowMount,
} = {}) => {
wrapper = mountFn(PipelineGraphWrapper, {
- provide,
+ provide: {
+ ...defaultProvide,
+ ...provide,
+ },
apolloProvider,
data() {
return {
@@ -40,13 +58,15 @@ describe('Pipeline graph wrapper', () => {
});
};
- const createComponentWithApollo = (
+ const createComponentWithApollo = ({
getPipelineDetailsHandler = jest.fn().mockResolvedValue(mockPipelineResponse),
- ) => {
+ mountFn = shallowMount,
+ provide = {},
+ } = {}) => {
const requestHandlers = [[getPipelineDetails, getPipelineDetailsHandler]];
const apolloProvider = createMockApollo(requestHandlers);
- createComponent({ apolloProvider });
+ createComponent({ apolloProvider, provide, mountFn });
};
afterEach(() => {
@@ -100,7 +120,9 @@ describe('Pipeline graph wrapper', () => {
describe('when there is an error', () => {
beforeEach(async () => {
- createComponentWithApollo(jest.fn().mockRejectedValue(new Error('GraphQL error')));
+ createComponentWithApollo({
+ getPipelineDetailsHandler: jest.fn().mockRejectedValue(new Error('GraphQL error')),
+ });
jest.runOnlyPendingTimers();
await wrapper.vm.$nextTick();
});
@@ -118,6 +140,31 @@ describe('Pipeline graph wrapper', () => {
});
});
+ describe('when there is no pipeline iid available', () => {
+ beforeEach(async () => {
+ createComponentWithApollo({
+ provide: {
+ pipelineIid: '',
+ },
+ });
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('does not display the loading icon', () => {
+ expect(getLoadingIcon().exists()).toBe(false);
+ });
+
+ it('displays the no iid alert', () => {
+ expect(getAlert().exists()).toBe(true);
+ expect(getAlert().text()).toBe(wrapper.vm.$options.errorTexts[IID_FAILURE]);
+ });
+
+ it('does not display the graph', () => {
+ expect(getGraph().exists()).toBe(false);
+ });
+ });
+
describe('when refresh action is emitted', () => {
beforeEach(async () => {
createComponentWithApollo();
@@ -154,7 +201,7 @@ describe('Pipeline graph wrapper', () => {
.mockResolvedValueOnce(mockPipelineResponse)
.mockResolvedValueOnce(errorData);
- createComponentWithApollo(failSucceedFail);
+ createComponentWithApollo({ getPipelineDetailsHandler: failSucceedFail });
await wrapper.vm.$nextTick();
});
@@ -174,4 +221,113 @@ describe('Pipeline graph wrapper', () => {
expect(getGraph().exists()).toBe(true);
});
});
+
+ describe('view dropdown', () => {
+ describe('when pipelineGraphLayersView feature flag is off', () => {
+ beforeEach(async () => {
+ createComponentWithApollo();
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('does not appear', () => {
+ expect(getViewSelector().exists()).toBe(false);
+ });
+ });
+
+ describe('when pipelineGraphLayersView feature flag is on', () => {
+ let layersFn;
+ beforeEach(async () => {
+ layersFn = jest.spyOn(parsingUtils, 'listByLayers');
+ createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ mountFn: mount,
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('appears when pipeline uses needs', () => {
+ expect(getViewSelector().exists()).toBe(true);
+ });
+
+ it('switches between views', async () => {
+ const groupsInFirstColumn =
+ mockPipelineResponse.data.project.pipeline.stages.nodes[0].groups.nodes.length;
+ expect(getAllStageColumnGroupsInColumn()).toHaveLength(groupsInFirstColumn);
+ expect(getStageColumnTitle().text()).toBe('Build');
+ await getViewSelector().vm.$emit('updateViewType', LAYER_VIEW);
+ expect(getAllStageColumnGroupsInColumn()).toHaveLength(groupsInFirstColumn + 1);
+ expect(getStageColumnTitle().text()).toBe('');
+ });
+
+ it('saves the view type to local storage', async () => {
+ await getViewSelector().vm.$emit('updateViewType', LAYER_VIEW);
+ expect(localStorage.setItem.mock.calls).toEqual([[VIEW_TYPE_KEY, LAYER_VIEW]]);
+ });
+
+ it('calls listByLayers only once no matter how many times view is switched', async () => {
+ expect(layersFn).not.toHaveBeenCalled();
+ await getViewSelector().vm.$emit('updateViewType', LAYER_VIEW);
+ expect(layersFn).toHaveBeenCalledTimes(1);
+ await getViewSelector().vm.$emit('updateViewType', STAGE_VIEW);
+ await getViewSelector().vm.$emit('updateViewType', LAYER_VIEW);
+ await getViewSelector().vm.$emit('updateViewType', STAGE_VIEW);
+ expect(layersFn).toHaveBeenCalledTimes(1);
+ });
+ });
+
+ describe('when feature flag is on and local storage is set', () => {
+ beforeEach(async () => {
+ localStorage.setItem(VIEW_TYPE_KEY, LAYER_VIEW);
+
+ createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ mountFn: mount,
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('reads the view type from localStorage when available', () => {
+ expect(wrapper.find('[data-testid="pipeline-view-selector"] code').text()).toContain(
+ 'needs:',
+ );
+ });
+ });
+
+ describe('when feature flag is on but pipeline does not use needs', () => {
+ beforeEach(async () => {
+ const nonNeedsResponse = { ...mockPipelineResponse };
+ nonNeedsResponse.data.project.pipeline.usesNeeds = false;
+
+ createComponentWithApollo({
+ provide: {
+ glFeatures: {
+ pipelineGraphLayersView: true,
+ },
+ },
+ mountFn: mount,
+ getPipelineDetailsHandler: jest.fn().mockResolvedValue(nonNeedsResponse),
+ });
+
+ jest.runOnlyPendingTimers();
+ await wrapper.vm.$nextTick();
+ });
+
+ it('does not appear when pipeline does not use needs', () => {
+ expect(getViewSelector().exists()).toBe(false);
+ });
+ });
+ });
});
diff --git a/spec/frontend/pipelines/graph/job_group_dropdown_spec.js b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
index b323e1d8a06..5d8e70bac31 100644
--- a/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
+++ b/spec/frontend/pipelines/graph/job_group_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { shallowMount } from '@vue/test-utils';
+import { shallowMount, mount } from '@vue/test-utils';
import JobGroupDropdown from '~/pipelines/components/graph/job_group_dropdown.vue';
describe('job group dropdown component', () => {
@@ -65,12 +65,16 @@ describe('job group dropdown component', () => {
let wrapper;
const findButton = () => wrapper.find('button');
+ const createComponent = ({ mountFn = shallowMount }) => {
+ wrapper = mountFn(JobGroupDropdown, { propsData: { group } });
+ };
+
afterEach(() => {
wrapper.destroy();
});
beforeEach(() => {
- wrapper = shallowMount(JobGroupDropdown, { propsData: { group } });
+ createComponent({ mountFn: mount });
});
it('renders button with group name and size', () => {
diff --git a/spec/frontend/pipelines/graph/job_item_spec.js b/spec/frontend/pipelines/graph/job_item_spec.js
index cb2837cbb39..4c7ea5edda9 100644
--- a/spec/frontend/pipelines/graph/job_item_spec.js
+++ b/spec/frontend/pipelines/graph/job_item_spec.js
@@ -122,7 +122,7 @@ describe('pipeline graph job item', () => {
},
});
- expect(wrapper.find('.js-job-component-tooltip').attributes('title')).toBe('test');
+ expect(findJobWithoutLink().attributes('title')).toBe('test');
});
it('should not render status label when it is provided', () => {
@@ -138,7 +138,7 @@ describe('pipeline graph job item', () => {
},
});
- expect(wrapper.find('.js-job-component-tooltip').attributes('title')).toBe('test - success');
+ expect(findJobWithoutLink().attributes('title')).toBe('test - success');
});
});
diff --git a/spec/frontend/pipelines/graph/job_name_component_spec.js b/spec/frontend/pipelines/graph/job_name_component_spec.js
index 658b5be87d4..d3008c046e8 100644
--- a/spec/frontend/pipelines/graph/job_name_component_spec.js
+++ b/spec/frontend/pipelines/graph/job_name_component_spec.js
@@ -1,5 +1,5 @@
import { mount } from '@vue/test-utils';
-import jobNameComponent from '~/pipelines/components/graph/job_name_component.vue';
+import jobNameComponent from '~/pipelines/components/jobs_shared/job_name_component.vue';
import ciIcon from '~/vue_shared/components/ci_icon.vue';
describe('job name component', () => {
diff --git a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
index 4c72dad735e..8aecfc1b649 100644
--- a/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
+++ b/spec/frontend/pipelines/graph/linked_pipelines_column_spec.js
@@ -2,10 +2,17 @@ import { mount, shallowMount, createLocalVue } from '@vue/test-utils';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import getPipelineDetails from 'shared_queries/pipelines/get_pipeline_details.query.graphql';
-import { DOWNSTREAM, GRAPHQL, UPSTREAM } from '~/pipelines/components/graph/constants';
+import {
+ DOWNSTREAM,
+ GRAPHQL,
+ UPSTREAM,
+ LAYER_VIEW,
+ STAGE_VIEW,
+} from '~/pipelines/components/graph/constants';
import PipelineGraph from '~/pipelines/components/graph/graph_component.vue';
import LinkedPipeline from '~/pipelines/components/graph/linked_pipeline.vue';
import LinkedPipelinesColumn from '~/pipelines/components/graph/linked_pipelines_column.vue';
+import * as parsingUtils from '~/pipelines/components/parsing_utils';
import { LOAD_FAILURE } from '~/pipelines/constants';
import {
mockPipelineResponse,
@@ -20,6 +27,7 @@ describe('Linked Pipelines Column', () => {
columnTitle: 'Downstream',
linkedPipelines: processedPipeline.downstream,
type: DOWNSTREAM,
+ viewType: STAGE_VIEW,
configPaths: {
metricsPath: '',
graphqlResourceEtag: 'this/is/a/path',
@@ -67,7 +75,7 @@ describe('Linked Pipelines Column', () => {
describe('it renders correctly', () => {
beforeEach(() => {
- createComponent();
+ createComponentWithApollo();
});
it('renders the pipeline title', () => {
@@ -91,6 +99,27 @@ describe('Linked Pipelines Column', () => {
await wrapper.vm.$nextTick();
};
+ describe('layer type rendering', () => {
+ let layersFn;
+
+ beforeEach(() => {
+ layersFn = jest.spyOn(parsingUtils, 'listByLayers');
+ createComponentWithApollo({ mountFn: mount });
+ });
+
+ it('calls listByLayers only once no matter how many times view is switched', async () => {
+ expect(layersFn).not.toHaveBeenCalled();
+ await clickExpandButtonAndAwaitTimers();
+ await wrapper.setProps({ viewType: LAYER_VIEW });
+ await wrapper.vm.$nextTick();
+ expect(layersFn).toHaveBeenCalledTimes(1);
+ await wrapper.setProps({ viewType: STAGE_VIEW });
+ await wrapper.setProps({ viewType: LAYER_VIEW });
+ await wrapper.setProps({ viewType: STAGE_VIEW });
+ expect(layersFn).toHaveBeenCalledTimes(1);
+ });
+ });
+
describe('downstream', () => {
describe('when successful', () => {
beforeEach(() => {
diff --git a/spec/frontend/pipelines/graph/mock_data.js b/spec/frontend/pipelines/graph/mock_data.js
index 7650cbd2d5c..cf420f68f37 100644
--- a/spec/frontend/pipelines/graph/mock_data.js
+++ b/spec/frontend/pipelines/graph/mock_data.js
@@ -8,6 +8,7 @@ export const mockPipelineResponse = {
__typename: 'Pipeline',
id: 163,
iid: '22',
+ usesNeeds: true,
downstream: null,
upstream: null,
stages: {
@@ -434,21 +435,7 @@ export const mockPipelineResponse = {
},
needs: {
__typename: 'CiBuildNeedConnection',
- nodes: [
- {
- __typename: 'CiBuildNeed',
- name: 'build_c',
- },
- {
- __typename: 'CiBuildNeed',
- name: 'build_b',
- },
- {
- __typename: 'CiBuildNeed',
- name:
- 'build_a_nlfjkdnlvskfnksvjknlfdjvlvnjdkjdf_nvjkenjkrlngjeknjkl',
- },
- ],
+ nodes: [],
},
},
],
@@ -583,6 +570,7 @@ export const wrappedPipelineReturn = {
__typename: 'Pipeline',
id: 'gid://gitlab/Ci::Pipeline/175',
iid: '38',
+ usesNeeds: true,
downstream: {
__typename: 'PipelineConnection',
nodes: [],
diff --git a/spec/frontend/pipelines/graph/stage_column_component_spec.js b/spec/frontend/pipelines/graph/stage_column_component_spec.js
index 16dc70a63a5..f9f6c96a1a6 100644
--- a/spec/frontend/pipelines/graph/stage_column_component_spec.js
+++ b/spec/frontend/pipelines/graph/stage_column_component_spec.js
@@ -1,7 +1,7 @@
import { mount, shallowMount } from '@vue/test-utils';
-import ActionComponent from '~/pipelines/components/graph/action_component.vue';
import JobItem from '~/pipelines/components/graph/job_item.vue';
import StageColumnComponent from '~/pipelines/components/graph/stage_column_component.vue';
+import ActionComponent from '~/pipelines/components/jobs_shared/action_component.vue';
const mockJob = {
id: 4250,
@@ -24,11 +24,11 @@ const mockJob = {
const mockGroups = Array(4)
.fill(0)
.map((item, idx) => {
- return { ...mockJob, id: idx, name: `fish-${idx}` };
+ return { ...mockJob, jobs: [mockJob], id: idx, name: `fish-${idx}` };
});
const defaultProps = {
- title: 'Fish',
+ name: 'Fish',
groups: mockGroups,
pipelineId: 159,
};
@@ -62,7 +62,7 @@ describe('stage column component', () => {
});
it('should render provided title', () => {
- expect(findStageColumnTitle().text()).toBe(defaultProps.title);
+ expect(findStageColumnTitle().text()).toBe(defaultProps.name);
});
it('should render the provided groups', () => {
@@ -104,16 +104,22 @@ describe('stage column component', () => {
props: {
groups: [
{
- id: 4259,
+ ...mockJob,
name: '<img src=x onerror=alert(document.domain)>',
- status: {
- icon: 'status_success',
- label: 'success',
- tooltip: '<img src=x onerror=alert(document.domain)>',
- },
+ jobs: [
+ {
+ id: 4259,
+ name: '<img src=x onerror=alert(document.domain)>',
+ status: {
+ icon: 'status_success',
+ label: 'success',
+ tooltip: '<img src=x onerror=alert(document.domain)>',
+ },
+ },
+ ],
},
],
- title: 'test <img src=x onerror=alert(document.domain)>',
+ name: 'test <img src=x onerror=alert(document.domain)>',
},
});
});
@@ -159,6 +165,7 @@ describe('stage column component', () => {
label: 'success',
tooltip: '<img src=x onerror=alert(document.domain)>',
},
+ jobs: [mockJob],
},
],
title: 'test',
@@ -191,6 +198,7 @@ describe('stage column component', () => {
label: 'success',
tooltip: '<img src=x onerror=alert(document.domain)>',
},
+ jobs: [mockJob],
},
],
title: 'test',
diff --git a/spec/frontend/pipelines/graph_shared/links_inner_spec.js b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
index 6fef1c9b62e..e81f046c1eb 100644
--- a/spec/frontend/pipelines/graph_shared/links_inner_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_inner_spec.js
@@ -8,9 +8,9 @@ import {
PIPELINES_DETAIL_LINKS_JOB_RATIO,
} from '~/performance/constants';
import * as perfUtils from '~/performance/utils';
-import * as sentryUtils from '~/pipelines/components/graph/utils';
import * as Api from '~/pipelines/components/graph_shared/api';
import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
+import * as sentryUtils from '~/pipelines/utils';
import { createJobsHash } from '~/pipelines/utils';
import {
jobRect,
diff --git a/spec/frontend/pipelines/graph_shared/links_layer_spec.js b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
index 43d8fe28893..5e5365eef30 100644
--- a/spec/frontend/pipelines/graph_shared/links_layer_spec.js
+++ b/spec/frontend/pipelines/graph_shared/links_layer_spec.js
@@ -1,4 +1,5 @@
-import { GlAlert, GlButton } from '@gitlab/ui';
+import { GlAlert } from '@gitlab/ui';
+import { fireEvent, within } from '@testing-library/dom';
import { mount, shallowMount } from '@vue/test-utils';
import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
@@ -7,8 +8,10 @@ import { generateResponse, mockPipelineResponse } from '../graph/mock_data';
describe('links layer component', () => {
let wrapper;
+ const withinComponent = () => within(wrapper.element);
const findAlert = () => wrapper.find(GlAlert);
- const findShowAnyways = () => findAlert().find(GlButton);
+ const findShowAnyways = () =>
+ withinComponent().getByText(wrapper.vm.$options.i18n.showLinksAnyways);
const findLinksInner = () => wrapper.find(LinksInner);
const pipeline = generateResponse(mockPipelineResponse, 'root/fungi-xoxo');
@@ -103,13 +106,13 @@ describe('links layer component', () => {
});
it('renders the disable button', () => {
- expect(findShowAnyways().exists()).toBe(true);
- expect(findShowAnyways().text()).toBe(wrapper.vm.$options.i18n.showLinksAnyways);
+ expect(findShowAnyways()).not.toBe(null);
});
it('shows links when override is clicked', async () => {
expect(findLinksInner().exists()).toBe(false);
- await findShowAnyways().trigger('click');
+ fireEvent(findShowAnyways(), new MouseEvent('click', { bubbles: true }));
+ await wrapper.vm.$nextTick();
expect(findLinksInner().exists()).toBe(true);
});
});
diff --git a/spec/frontend/pipelines/nav_controls_spec.js b/spec/frontend/pipelines/nav_controls_spec.js
index 305dc557b39..2c4740df174 100644
--- a/spec/frontend/pipelines/nav_controls_spec.js
+++ b/spec/frontend/pipelines/nav_controls_spec.js
@@ -1,17 +1,22 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import navControlsComp from '~/pipelines/components/pipelines_list/nav_controls.vue';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import NavControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
describe('Pipelines Nav Controls', () => {
- let NavControlsComponent;
- let component;
+ let wrapper;
- beforeEach(() => {
- NavControlsComponent = Vue.extend(navControlsComp);
- });
+ const createComponent = (props) => {
+ wrapper = shallowMount(NavControls, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findRunPipeline = () => wrapper.find('.js-run-pipeline');
afterEach(() => {
- component.$destroy();
+ wrapper.destroy();
});
it('should render link to create a new pipeline', () => {
@@ -21,12 +26,11 @@ describe('Pipelines Nav Controls', () => {
resetCachePath: 'foo',
};
- component = mountComponent(NavControlsComponent, mockData);
+ createComponent(mockData);
- expect(component.$el.querySelector('.js-run-pipeline').textContent).toContain('Run Pipeline');
- expect(component.$el.querySelector('.js-run-pipeline').getAttribute('href')).toEqual(
- mockData.newPipelinePath,
- );
+ const runPipeline = findRunPipeline();
+ expect(runPipeline.text()).toContain('Run pipeline');
+ expect(runPipeline.attributes('href')).toBe(mockData.newPipelinePath);
});
it('should not render link to create pipeline if no path is provided', () => {
@@ -36,9 +40,9 @@ describe('Pipelines Nav Controls', () => {
resetCachePath: 'foo',
};
- component = mountComponent(NavControlsComponent, mockData);
+ createComponent(mockData);
- expect(component.$el.querySelector('.js-run-pipeline')).toEqual(null);
+ expect(findRunPipeline().exists()).toBe(false);
});
it('should render link for CI lint', () => {
@@ -49,12 +53,10 @@ describe('Pipelines Nav Controls', () => {
resetCachePath: 'foo',
};
- component = mountComponent(NavControlsComponent, mockData);
+ createComponent(mockData);
- expect(component.$el.querySelector('.js-ci-lint').textContent.trim()).toContain('CI Lint');
- expect(component.$el.querySelector('.js-ci-lint').getAttribute('href')).toEqual(
- mockData.ciLintPath,
- );
+ expect(wrapper.find('.js-ci-lint').text().trim()).toContain('CI lint');
+ expect(wrapper.find('.js-ci-lint').attributes('href')).toBe(mockData.ciLintPath);
});
describe('Reset Runners Cache', () => {
@@ -64,22 +66,20 @@ describe('Pipelines Nav Controls', () => {
ciLintPath: 'foo',
resetCachePath: 'foo',
};
-
- component = mountComponent(NavControlsComponent, mockData);
+ createComponent(mockData);
});
it('should render button for resetting runner caches', () => {
- expect(component.$el.querySelector('.js-clear-cache').textContent.trim()).toContain(
- 'Clear Runner Caches',
- );
+ expect(wrapper.find('.js-clear-cache').text().trim()).toContain('Clear runner caches');
});
- it('should emit postAction event when reset runner cache button is clicked', () => {
- jest.spyOn(component, '$emit').mockImplementation(() => {});
+ it('should emit postAction event when reset runner cache button is clicked', async () => {
+ jest.spyOn(wrapper.vm, '$emit').mockImplementation(() => {});
- component.$el.querySelector('.js-clear-cache').click();
+ wrapper.find('.js-clear-cache').vm.$emit('click');
+ await nextTick();
- expect(component.$emit).toHaveBeenCalledWith('resetRunnersCache', 'foo');
+ expect(wrapper.vm.$emit).toHaveBeenCalledWith('resetRunnersCache', 'foo');
});
});
});
diff --git a/spec/frontend/pipelines/notification/pipeline_notification_spec.js b/spec/frontend/pipelines/notification/pipeline_notification_spec.js
new file mode 100644
index 00000000000..79aa337ba9d
--- /dev/null
+++ b/spec/frontend/pipelines/notification/pipeline_notification_spec.js
@@ -0,0 +1,79 @@
+import { GlBanner } from '@gitlab/ui';
+import { createLocalVue, shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import PipelineNotification from '~/pipelines/components/notification/pipeline_notification.vue';
+import getUserCallouts from '~/pipelines/graphql/queries/get_user_callouts.query.graphql';
+
+describe('Pipeline notification', () => {
+ const localVue = createLocalVue();
+
+ let wrapper;
+ const dagDocPath = 'my/dag/path';
+
+ const createWrapper = (apolloProvider) => {
+ return shallowMount(PipelineNotification, {
+ localVue,
+ provide: {
+ dagDocPath,
+ },
+ apolloProvider,
+ });
+ };
+
+ const createWrapperWithApollo = async ({ callouts = [], isLoading = false } = {}) => {
+ localVue.use(VueApollo);
+
+ const mappedCallouts = callouts.map((callout) => {
+ return { featureName: callout, __typename: 'UserCallout' };
+ });
+
+ const mockCalloutsResponse = {
+ data: {
+ currentUser: {
+ id: 45,
+ __typename: 'User',
+ callouts: {
+ id: 5,
+ __typename: 'UserCalloutConnection',
+ nodes: mappedCallouts,
+ },
+ },
+ },
+ };
+ const getUserCalloutsHandler = jest.fn().mockResolvedValue(mockCalloutsResponse);
+ const requestHandlers = [[getUserCallouts, getUserCalloutsHandler]];
+
+ const apolloWrapper = createWrapper(createMockApollo(requestHandlers));
+ if (!isLoading) {
+ await nextTick();
+ }
+
+ return apolloWrapper;
+ };
+
+ const findBanner = () => wrapper.findComponent(GlBanner);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('shows the banner if the user has never seen it', async () => {
+ wrapper = await createWrapperWithApollo({ callouts: ['random'] });
+
+ expect(findBanner().exists()).toBe(true);
+ });
+
+ it('does not show the banner while the user callout query is loading', async () => {
+ wrapper = await createWrapperWithApollo({ callouts: ['random'], isLoading: true });
+
+ expect(findBanner().exists()).toBe(false);
+ });
+
+ it('does not show the banner if the user has previously dismissed it', async () => {
+ wrapper = await createWrapperWithApollo({ callouts: ['pipeline_needs_banner'.toUpperCase()] });
+
+ expect(findBanner().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/pipelines/pipeline_graph/mock_data.js b/spec/frontend/pipelines/pipeline_graph/mock_data.js
index 339aac9f349..a79917bfd48 100644
--- a/spec/frontend/pipelines/pipeline_graph/mock_data.js
+++ b/spec/frontend/pipelines/pipeline_graph/mock_data.js
@@ -98,6 +98,42 @@ export const pipelineData = {
],
};
+export const invalidNeedsData = {
+ stages: [
+ {
+ name: 'build',
+ groups: [
+ {
+ name: 'build_1',
+ jobs: [{ script: 'echo hello', stage: 'build' }],
+ },
+ ],
+ },
+ {
+ name: 'test',
+ groups: [
+ {
+ name: 'test_1',
+ jobs: [{ script: 'yarn test', stage: 'test' }],
+ },
+ {
+ name: 'test_2',
+ jobs: [{ script: 'yarn karma', stage: 'test' }],
+ },
+ ],
+ },
+ {
+ name: 'deploy',
+ groups: [
+ {
+ name: 'deploy_1',
+ jobs: [{ script: 'yarn magick', stage: 'deploy', needs: ['invalid_job'] }],
+ },
+ ],
+ },
+ ],
+};
+
export const parallelNeedData = {
stages: [
{
diff --git a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
index 718667fcc73..258f2bda829 100644
--- a/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
+++ b/spec/frontend/pipelines/pipeline_graph/pipeline_graph_spec.js
@@ -1,11 +1,13 @@
import { GlAlert } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { CI_CONFIG_STATUS_INVALID, CI_CONFIG_STATUS_VALID } from '~/pipeline_editor/constants';
+import { CI_CONFIG_STATUS_VALID } from '~/pipeline_editor/constants';
+import LinksInner from '~/pipelines/components/graph_shared/links_inner.vue';
+import LinksLayer from '~/pipelines/components/graph_shared/links_layer.vue';
import JobPill from '~/pipelines/components/pipeline_graph/job_pill.vue';
import PipelineGraph from '~/pipelines/components/pipeline_graph/pipeline_graph.vue';
import StagePill from '~/pipelines/components/pipeline_graph/stage_pill.vue';
-import { DRAW_FAILURE, EMPTY_PIPELINE_DATA, INVALID_CI_CONFIG } from '~/pipelines/constants';
-import { pipelineData, singleStageData } from './mock_data';
+import { DRAW_FAILURE } from '~/pipelines/constants';
+import { invalidNeedsData, pipelineData, singleStageData } from './mock_data';
describe('pipeline graph component', () => {
const defaultProps = { pipelineData };
@@ -16,50 +18,37 @@ describe('pipeline graph component', () => {
propsData: {
...props,
},
+ stubs: { LinksLayer, LinksInner },
+ data() {
+ return {
+ measurements: {
+ width: 1000,
+ height: 1000,
+ },
+ };
+ },
});
};
- const findPipelineGraph = () => wrapper.find('[data-testid="graph-container"]');
- const findAlert = () => wrapper.find(GlAlert);
- const findAllStagePills = () => wrapper.findAll(StagePill);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findAllJobPills = () => wrapper.findAll(JobPill);
const findAllStageBackgroundElements = () => wrapper.findAll('[data-testid="stage-background"]');
+ const findAllStagePills = () => wrapper.findAllComponents(StagePill);
+ const findLinksLayer = () => wrapper.findComponent(LinksLayer);
+ const findPipelineGraph = () => wrapper.find('[data-testid="graph-container"]');
const findStageBackgroundElementAt = (index) => findAllStageBackgroundElements().at(index);
- const findAllJobPills = () => wrapper.findAll(JobPill);
afterEach(() => {
wrapper.destroy();
- wrapper = null;
- });
-
- describe('with no data', () => {
- beforeEach(() => {
- wrapper = createComponent({ pipelineData: {} });
- });
-
- it('renders an empty section', () => {
- expect(wrapper.text()).toBe(wrapper.vm.$options.errorTexts[EMPTY_PIPELINE_DATA]);
- expect(findPipelineGraph().exists()).toBe(false);
- expect(findAllStagePills()).toHaveLength(0);
- expect(findAllJobPills()).toHaveLength(0);
- });
- });
-
- describe('with `INVALID` status', () => {
- beforeEach(() => {
- wrapper = createComponent({ pipelineData: { status: CI_CONFIG_STATUS_INVALID } });
- });
-
- it('renders an error message and does not render the graph', () => {
- expect(findAlert().exists()).toBe(true);
- expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[INVALID_CI_CONFIG]);
- expect(findPipelineGraph().exists()).toBe(false);
- });
});
describe('with `VALID` status', () => {
beforeEach(() => {
wrapper = createComponent({
- pipelineData: { status: CI_CONFIG_STATUS_VALID, stages: [{ name: 'hello', groups: [] }] },
+ pipelineData: {
+ status: CI_CONFIG_STATUS_VALID,
+ stages: [{ name: 'hello', groups: [] }],
+ },
});
});
@@ -71,10 +60,11 @@ describe('pipeline graph component', () => {
describe('with error while rendering the links with needs', () => {
beforeEach(() => {
- wrapper = createComponent();
+ wrapper = createComponent({ pipelineData: invalidNeedsData });
});
it('renders the error that link could not be drawn', () => {
+ expect(findLinksLayer().exists()).toBe(true);
expect(findAlert().exists()).toBe(true);
expect(findAlert().text()).toBe(wrapper.vm.$options.errorTexts[DRAW_FAILURE]);
});
diff --git a/spec/frontend/pipelines/pipelines_ci_templates_spec.js b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
new file mode 100644
index 00000000000..d4cf6027ff7
--- /dev/null
+++ b/spec/frontend/pipelines/pipelines_ci_templates_spec.js
@@ -0,0 +1,111 @@
+import { shallowMount } from '@vue/test-utils';
+import ExperimentTracking from '~/experimentation/experiment_tracking';
+import PipelinesCiTemplate from '~/pipelines/components/pipelines_list/pipelines_ci_templates.vue';
+
+const addCiYmlPath = "/-/new/master?commit_message='Add%20.gitlab-ci.yml'";
+const suggestedCiTemplates = [
+ { name: 'Android', logo: '/assets/illustrations/logos/android.svg' },
+ { name: 'Bash', logo: '/assets/illustrations/logos/bash.svg' },
+ { name: 'C++', logo: '/assets/illustrations/logos/c_plus_plus.svg' },
+];
+
+jest.mock('~/experimentation/experiment_tracking');
+
+describe('Pipelines CI Templates', () => {
+ let wrapper;
+
+ const GlEmoji = { template: '<img/>' };
+
+ const createWrapper = () => {
+ return shallowMount(PipelinesCiTemplate, {
+ provide: {
+ addCiYmlPath,
+ suggestedCiTemplates,
+ },
+ stubs: {
+ GlEmoji,
+ },
+ });
+ };
+
+ const findTestTemplateLinks = () => wrapper.findAll('[data-testid="test-template-link"]');
+ const findTemplateDescriptions = () => wrapper.findAll('[data-testid="template-description"]');
+ const findTemplateLinks = () => wrapper.findAll('[data-testid="template-link"]');
+ const findTemplateNames = () => wrapper.findAll('[data-testid="template-name"]');
+ const findTemplateLogos = () => wrapper.findAll('[data-testid="template-logo"]');
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
+ describe('renders test template', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('links to the hello world template', () => {
+ expect(findTestTemplateLinks().at(0).attributes('href')).toBe(
+ addCiYmlPath.concat('&template=Hello-World'),
+ );
+ });
+ });
+
+ describe('renders template list', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('renders all suggested templates', () => {
+ const content = wrapper.text();
+
+ expect(content).toContain('Android', 'Bash', 'C++');
+ });
+
+ it('has the correct template name', () => {
+ expect(findTemplateNames().at(0).text()).toBe('Android');
+ });
+
+ it('links to the correct template', () => {
+ expect(findTemplateLinks().at(0).attributes('href')).toBe(
+ addCiYmlPath.concat('&template=Android'),
+ );
+ });
+
+ it('has the description of the template', () => {
+ expect(findTemplateDescriptions().at(0).text()).toBe(
+ 'CI/CD template to test and deploy your Android project.',
+ );
+ });
+
+ it('has the right logo of the template', () => {
+ expect(findTemplateLogos().at(0).attributes('src')).toBe(
+ '/assets/illustrations/logos/android.svg',
+ );
+ });
+ });
+
+ describe('tracking', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('sends an event when template is clicked', () => {
+ findTemplateLinks().at(0).vm.$emit('click');
+
+ expect(ExperimentTracking).toHaveBeenCalledWith('pipeline_empty_state_templates', {
+ label: 'Android',
+ });
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('template_clicked');
+ });
+
+ it('sends an event when Hello-World template is clicked', () => {
+ findTestTemplateLinks().at(0).vm.$emit('click');
+
+ expect(ExperimentTracking).toHaveBeenCalledWith('pipeline_empty_state_templates', {
+ label: 'Hello-World',
+ });
+ expect(ExperimentTracking.prototype.event).toHaveBeenCalledWith('template_clicked');
+ });
+ });
+});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index b04880b43ae..84a25f42201 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlFilteredSearch, GlLoadingIcon, GlPagination } from '@gitlab/ui';
+import { GlButton, GlEmptyState, GlFilteredSearch, GlLoadingIcon, GlPagination } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import { chunk } from 'lodash';
@@ -8,8 +8,6 @@ import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import { deprecatedCreateFlash as createFlash } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import BlankState from '~/pipelines/components/pipelines_list/blank_state.vue';
-import EmptyState from '~/pipelines/components/pipelines_list/empty_state.vue';
import NavigationControls from '~/pipelines/components/pipelines_list/nav_controls.vue';
import PipelinesComponent from '~/pipelines/components/pipelines_list/pipelines.vue';
import PipelinesTableComponent from '~/pipelines/components/pipelines_list/pipelines_table.vue';
@@ -58,11 +56,10 @@ describe('Pipelines', () => {
};
const findFilteredSearch = () => wrapper.findComponent(GlFilteredSearch);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findNavigationTabs = () => wrapper.findComponent(NavigationTabs);
const findNavigationControls = () => wrapper.findComponent(NavigationControls);
const findPipelinesTable = () => wrapper.findComponent(PipelinesTableComponent);
- const findEmptyState = () => wrapper.findComponent(EmptyState);
- const findBlankState = () => wrapper.findComponent(BlankState);
const findTablePagination = () => wrapper.findComponent(TablePagination);
const findTab = (tab) => wrapper.findByTestId(`pipelines-tab-${tab}`);
@@ -194,16 +191,16 @@ describe('Pipelines', () => {
expect(findNavigationControls().exists()).toBe(true);
});
- it('renders Run Pipeline link', () => {
+ it('renders Run pipeline link', () => {
expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
});
- it('renders CI Lint link', () => {
+ it('renders CI lint link', () => {
expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
});
- it('renders Clear Runner Cache button', () => {
- expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
+ it('renders Clear runner cache button', () => {
+ expect(findCleanCacheButton().text()).toBe('Clear runner caches');
});
it('renders pipelines in a table', () => {
@@ -268,7 +265,7 @@ describe('Pipelines', () => {
});
it('should filter pipelines', async () => {
- expect(findBlankState().text()).toBe('There are currently no pipelines.');
+ expect(findEmptyState().text()).toBe('There are currently no pipelines.');
});
it('should update browser bar', () => {
@@ -502,20 +499,24 @@ describe('Pipelines', () => {
expect(findTab('all').text()).toMatchInterpolatedText('All 0');
});
- it('renders Run Pipeline link', () => {
+ it('renders Run pipeline link', () => {
expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
});
- it('renders CI Lint link', () => {
+ it('renders CI lint link', () => {
expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
});
- it('renders Clear Runner Cache button', () => {
- expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
+ it('renders Clear runner cache button', () => {
+ expect(findCleanCacheButton().text()).toBe('Clear runner caches');
});
it('renders empty state', () => {
- expect(findBlankState().text()).toBe('There are currently no pipelines.');
+ expect(findEmptyState().text()).toBe('There are currently no pipelines.');
+ });
+
+ it('renders filtered search', () => {
+ expect(findFilteredSearch().exists()).toBe(true);
});
it('renders tab empty state finished scope', async () => {
@@ -528,7 +529,7 @@ describe('Pipelines', () => {
await waitForPromises();
- expect(findBlankState().text()).toBe('There are currently no finished pipelines.');
+ expect(findEmptyState().text()).toBe('There are currently no finished pipelines.');
});
});
@@ -550,6 +551,10 @@ describe('Pipelines', () => {
);
});
+ it('does not render filtered search', () => {
+ expect(findFilteredSearch().exists()).toBe(false);
+ });
+
it('does not render tabs nor buttons', () => {
expect(findNavigationTabs().exists()).toBe(false);
expect(findTab('all').exists()).toBe(false);
@@ -599,7 +604,7 @@ describe('Pipelines', () => {
});
it('renders empty state', () => {
- expect(findBlankState().text()).toBe('There are currently no pipelines.');
+ expect(findEmptyState().text()).toBe('There are currently no pipelines.');
});
});
});
@@ -688,7 +693,7 @@ describe('Pipelines', () => {
});
it('shows error state', () => {
- expect(findBlankState().text()).toBe(
+ expect(findEmptyState().text()).toBe(
'There was an error fetching the pipelines. Try again in a few moments or contact your support team.',
);
});
@@ -709,11 +714,11 @@ describe('Pipelines', () => {
expect(findRunPipelineButton().attributes('href')).toBe(paths.newPipelinePath);
expect(findCiLintButton().attributes('href')).toBe(paths.ciLintPath);
- expect(findCleanCacheButton().text()).toBe('Clear Runner Caches');
+ expect(findCleanCacheButton().text()).toBe('Clear runner caches');
});
it('shows error state', () => {
- expect(findBlankState().text()).toBe(
+ expect(findEmptyState().text()).toBe(
'There was an error fetching the pipelines. Try again in a few moments or contact your support team.',
);
});
diff --git a/spec/frontend/pipelines/pipelines_table_row_spec.js b/spec/frontend/pipelines/pipelines_table_row_spec.js
deleted file mode 100644
index 68d46575081..00000000000
--- a/spec/frontend/pipelines/pipelines_table_row_spec.js
+++ /dev/null
@@ -1,239 +0,0 @@
-import { mount } from '@vue/test-utils';
-import waitForPromises from 'helpers/wait_for_promises';
-import PipelinesTableRowComponent from '~/pipelines/components/pipelines_list/pipelines_table_row.vue';
-import eventHub from '~/pipelines/event_hub';
-
-describe('Pipelines Table Row', () => {
- const jsonFixtureName = 'pipelines/pipelines.json';
-
- const createWrapper = (pipeline) =>
- mount(PipelinesTableRowComponent, {
- propsData: {
- pipeline,
- viewType: 'root',
- },
- });
-
- let wrapper;
- let pipeline;
- let pipelineWithoutAuthor;
- let pipelineWithoutCommit;
-
- beforeEach(() => {
- const { pipelines } = getJSONFixture(jsonFixtureName);
-
- pipeline = pipelines.find((p) => p.user !== null && p.commit !== null);
- pipelineWithoutAuthor = pipelines.find((p) => p.user === null && p.commit !== null);
- pipelineWithoutCommit = pipelines.find((p) => p.user === null && p.commit === null);
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- it('should render a table row', () => {
- wrapper = createWrapper(pipeline);
-
- expect(wrapper.attributes('class')).toContain('gl-responsive-table-row');
- });
-
- describe('status column', () => {
- beforeEach(() => {
- wrapper = createWrapper(pipeline);
- });
-
- it('should render a pipeline link', () => {
- expect(wrapper.find('.table-section.commit-link a').attributes('href')).toEqual(
- pipeline.path,
- );
- });
-
- it('should render status text', () => {
- expect(wrapper.find('.table-section.commit-link a').text()).toContain(
- pipeline.details.status.text,
- );
- });
- });
-
- describe('information column', () => {
- beforeEach(() => {
- wrapper = createWrapper(pipeline);
- });
-
- it('should render a pipeline link', () => {
- expect(wrapper.find('.table-section:nth-child(2) a').attributes('href')).toEqual(
- pipeline.path,
- );
- });
-
- it('should render pipeline ID', () => {
- expect(wrapper.find('.table-section:nth-child(2) a > span').text()).toEqual(
- `#${pipeline.id}`,
- );
- });
-
- describe('when a user is provided', () => {
- it('should render user information', () => {
- expect(
- wrapper.find('.table-section:nth-child(3) .js-pipeline-url-user').attributes('href'),
- ).toEqual(pipeline.user.path);
-
- expect(
- wrapper.find('.table-section:nth-child(3) .js-user-avatar-image-tooltip').text().trim(),
- ).toEqual(pipeline.user.name);
- });
- });
- });
-
- describe('commit column', () => {
- it('should render link to commit', () => {
- wrapper = createWrapper(pipeline);
-
- const commitLink = wrapper.find('.branch-commit .commit-sha');
-
- expect(commitLink.attributes('href')).toEqual(pipeline.commit.commit_path);
- });
-
- const findElements = () => {
- const commitTitleElement = wrapper.find('.branch-commit .commit-title');
- const commitAuthorElement = commitTitleElement.find('a.avatar-image-container');
-
- if (!commitAuthorElement.exists()) {
- return {
- commitAuthorElement,
- };
- }
-
- const commitAuthorLink = commitAuthorElement.attributes('href');
- const commitAuthorName = commitAuthorElement
- .find('.js-user-avatar-image-tooltip')
- .text()
- .trim();
-
- return {
- commitAuthorElement,
- commitAuthorLink,
- commitAuthorName,
- };
- };
-
- it('renders nothing without commit', () => {
- expect(pipelineWithoutCommit.commit).toBe(null);
-
- wrapper = createWrapper(pipelineWithoutCommit);
- const { commitAuthorElement } = findElements();
-
- expect(commitAuthorElement.exists()).toBe(false);
- });
-
- it('renders commit author', () => {
- wrapper = createWrapper(pipeline);
- const { commitAuthorLink, commitAuthorName } = findElements();
-
- expect(commitAuthorLink).toEqual(pipeline.commit.author.path);
- expect(commitAuthorName).toEqual(pipeline.commit.author.username);
- });
-
- it('renders commit with unregistered author', () => {
- expect(pipelineWithoutAuthor.commit.author).toBe(null);
-
- wrapper = createWrapper(pipelineWithoutAuthor);
- const { commitAuthorLink, commitAuthorName } = findElements();
-
- expect(commitAuthorLink).toEqual(`mailto:${pipelineWithoutAuthor.commit.author_email}`);
- expect(commitAuthorName).toEqual(pipelineWithoutAuthor.commit.author_name);
- });
- });
-
- describe('stages column', () => {
- const findAllMiniPipelineStages = () =>
- wrapper.findAll('.table-section:nth-child(5) [data-testid="mini-pipeline-graph-dropdown"]');
-
- it('should render an icon for each stage', () => {
- wrapper = createWrapper(pipeline);
-
- expect(findAllMiniPipelineStages()).toHaveLength(pipeline.details.stages.length);
- });
-
- it('should not render stages when stages are empty', () => {
- const withoutStages = { ...pipeline };
- withoutStages.details = { ...withoutStages.details, stages: null };
-
- wrapper = createWrapper(withoutStages);
-
- expect(findAllMiniPipelineStages()).toHaveLength(0);
- });
- });
-
- describe('actions column', () => {
- const scheduledJobAction = {
- name: 'some scheduled job',
- };
-
- beforeEach(() => {
- const withActions = { ...pipeline };
- withActions.details.scheduled_actions = [scheduledJobAction];
- withActions.flags.cancelable = true;
- withActions.flags.retryable = true;
- withActions.cancel_path = '/cancel';
- withActions.retry_path = '/retry';
-
- wrapper = createWrapper(withActions);
- });
-
- it('should render the provided actions', () => {
- expect(wrapper.find('.js-pipelines-retry-button').exists()).toBe(true);
- expect(wrapper.find('.js-pipelines-retry-button').attributes('title')).toMatch('Retry');
- expect(wrapper.find('.js-pipelines-cancel-button').exists()).toBe(true);
- expect(wrapper.find('.js-pipelines-cancel-button').attributes('title')).toMatch('Cancel');
- });
-
- it('should render the manual actions', async () => {
- const manualActions = wrapper.find('[data-testid="pipelines-manual-actions-dropdown"]');
-
- // Click on the dropdown and wait for `lazy` dropdown items
- manualActions.find('.dropdown-toggle').trigger('click');
- await waitForPromises();
-
- expect(manualActions.text()).toContain(scheduledJobAction.name);
- });
-
- it('emits `retryPipeline` event when retry button is clicked and toggles loading', () => {
- eventHub.$on('retryPipeline', (endpoint) => {
- expect(endpoint).toBe('/retry');
- });
-
- wrapper.find('.js-pipelines-retry-button').trigger('click');
- expect(wrapper.vm.isRetrying).toBe(true);
- });
-
- it('emits `openConfirmationModal` event when cancel button is clicked and toggles loading', () => {
- eventHub.$once('openConfirmationModal', (data) => {
- const { id, ref, commit } = pipeline;
-
- expect(data.endpoint).toBe('/cancel');
- expect(data.pipeline).toEqual(
- expect.objectContaining({
- id,
- ref,
- commit,
- }),
- );
- });
-
- wrapper.find('.js-pipelines-cancel-button').trigger('click');
- });
-
- it('renders a loading icon when `cancelingPipeline` matches pipeline id', (done) => {
- wrapper.setProps({ cancelingPipeline: pipeline.id });
- wrapper.vm
- .$nextTick()
- .then(() => {
- expect(wrapper.vm.isCancelling).toBe(true);
- })
- .then(done)
- .catch(done.fail);
- });
- });
-});
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index 952bea81052..70e47b98575 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -30,23 +30,17 @@ describe('Pipelines Table', () => {
return pipelines.find((p) => p.user !== null && p.commit !== null);
};
- const createComponent = (props = {}, flagState = false) => {
+ const createComponent = (props = {}) => {
wrapper = extendedWrapper(
mount(PipelinesTable, {
propsData: {
...defaultProps,
...props,
},
- provide: {
- glFeatures: {
- newPipelinesTable: flagState,
- },
- },
}),
);
};
- const findRows = () => wrapper.findAll('.commit.gl-responsive-table-row');
const findGlTable = () => wrapper.findComponent(GlTable);
const findStatusBadge = () => wrapper.findComponent(PipelinesStatusBadge);
const findPipelineInfo = () => wrapper.findComponent(PipelineUrl);
@@ -56,8 +50,7 @@ describe('Pipelines Table', () => {
const findTimeAgo = () => wrapper.findComponent(PipelinesTimeago);
const findActions = () => wrapper.findComponent(PipelineOperations);
- const findLegacyTable = () => wrapper.findByTestId('legacy-ci-table');
- const findTableRows = () => wrapper.findAll('[data-testid="pipeline-table-row"]');
+ const findTableRows = () => wrapper.findAllByTestId('pipeline-table-row');
const findStatusTh = () => wrapper.findByTestId('status-th');
const findPipelineTh = () => wrapper.findByTestId('pipeline-th');
const findTriggererTh = () => wrapper.findByTestId('triggerer-th');
@@ -75,52 +68,13 @@ describe('Pipelines Table', () => {
wrapper = null;
});
- describe('table with feature flag off', () => {
- describe('renders the table correctly', () => {
- beforeEach(() => {
- createComponent();
- });
-
- it('should render a table', () => {
- expect(wrapper.classes()).toContain('ci-table');
- });
-
- it('should render table head with correct columns', () => {
- expect(wrapper.find('.table-section.js-pipeline-status').text()).toEqual('Status');
-
- expect(wrapper.find('.table-section.js-pipeline-info').text()).toEqual('Pipeline');
-
- expect(wrapper.find('.table-section.js-pipeline-commit').text()).toEqual('Commit');
-
- expect(wrapper.find('.table-section.js-pipeline-stages').text()).toEqual('Stages');
- });
- });
-
- describe('without data', () => {
- it('should render an empty table', () => {
- createComponent();
-
- expect(findRows()).toHaveLength(0);
- });
- });
-
- describe('with data', () => {
- it('should render rows', () => {
- createComponent({ pipelines: [pipeline], viewType: 'root' });
-
- expect(findRows()).toHaveLength(1);
- });
- });
- });
-
- describe('table with feature flag on', () => {
+ describe('Pipelines Table', () => {
beforeEach(() => {
- createComponent({ pipelines: [pipeline], viewType: 'root' }, true);
+ createComponent({ pipelines: [pipeline], viewType: 'root' });
});
- it('displays new table', () => {
+ it('displays table', () => {
expect(findGlTable().exists()).toBe(true);
- expect(findLegacyTable().exists()).toBe(false);
});
it('should render table head with correct columns', () => {
diff --git a/spec/frontend/pipelines/time_ago_spec.js b/spec/frontend/pipelines/time_ago_spec.js
index 93aeb049434..3de7995b476 100644
--- a/spec/frontend/pipelines/time_ago_spec.js
+++ b/spec/frontend/pipelines/time_ago_spec.js
@@ -1,25 +1,33 @@
import { GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import TimeAgo from '~/pipelines/components/pipelines_list/time_ago.vue';
describe('Timeago component', () => {
let wrapper;
- const createComponent = (props = {}) => {
- wrapper = shallowMount(TimeAgo, {
- propsData: {
- pipeline: {
- details: {
- ...props,
+ const defaultProps = { duration: 0, finished_at: '' };
+
+ const createComponent = (props = defaultProps, stuck = false) => {
+ wrapper = extendedWrapper(
+ shallowMount(TimeAgo, {
+ propsData: {
+ pipeline: {
+ details: {
+ ...props,
+ },
+ flags: {
+ stuck,
+ },
},
},
- },
- data() {
- return {
- iconTimerSvg: `<svg></svg>`,
- };
- },
- });
+ data() {
+ return {
+ iconTimerSvg: `<svg></svg>`,
+ };
+ },
+ }),
+ );
};
afterEach(() => {
@@ -29,7 +37,10 @@ describe('Timeago component', () => {
const duration = () => wrapper.find('.duration');
const finishedAt = () => wrapper.find('.finished-at');
- const findInProgress = () => wrapper.find('[data-testid="pipeline-in-progress"]');
+ const findInProgress = () => wrapper.findByTestId('pipeline-in-progress');
+ const findSkipped = () => wrapper.findByTestId('pipeline-skipped');
+ const findHourGlassIcon = () => wrapper.findByTestId('hourglass-icon');
+ const findWarningIcon = () => wrapper.findByTestId('warning-icon');
describe('with duration', () => {
beforeEach(() => {
@@ -46,7 +57,7 @@ describe('Timeago component', () => {
describe('without duration', () => {
beforeEach(() => {
- createComponent({ duration: 0, finished_at: '' });
+ createComponent();
});
it('should not render duration and timer svg', () => {
@@ -71,7 +82,7 @@ describe('Timeago component', () => {
describe('without finishedTime', () => {
beforeEach(() => {
- createComponent({ duration: 0, finished_at: '' });
+ createComponent();
});
it('should not render time and calendar icon', () => {
@@ -89,10 +100,34 @@ describe('Timeago component', () => {
`(
'progress state shown: $shouldShow when pipeline duration is $durationTime and finished_at is $finishedAtTime',
({ durationTime, finishedAtTime, shouldShow }) => {
- createComponent({ duration: durationTime, finished_at: finishedAtTime });
+ createComponent({
+ duration: durationTime,
+ finished_at: finishedAtTime,
+ });
expect(findInProgress().exists()).toBe(shouldShow);
+ expect(findSkipped().exists()).toBe(false);
},
);
+
+ it('should show warning icon beside in progress if pipeline is stuck', () => {
+ const stuck = true;
+
+ createComponent(defaultProps, stuck);
+
+ expect(findWarningIcon().exists()).toBe(true);
+ expect(findHourGlassIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('skipped', () => {
+ it('should show skipped if pipeline was skipped', () => {
+ createComponent({
+ status: { label: 'skipped' },
+ });
+
+ expect(findSkipped().exists()).toBe(true);
+ expect(findInProgress().exists()).toBe(false);
+ });
});
});
diff --git a/spec/frontend/pipelines/unwrapping_utils_spec.js b/spec/frontend/pipelines/unwrapping_utils_spec.js
index cd16ed7262e..a6ce7d4049f 100644
--- a/spec/frontend/pipelines/unwrapping_utils_spec.js
+++ b/spec/frontend/pipelines/unwrapping_utils_spec.js
@@ -96,11 +96,11 @@ const completeMock = [
describe('Shared pipeline unwrapping utils', () => {
describe('unwrapGroups', () => {
it('takes stages without nodes and returns the unwrapped groups', () => {
- expect(unwrapGroups(stagesAndGroups)[0].groups).toEqual(groupsArray);
+ expect(unwrapGroups(stagesAndGroups)[0].node.groups).toEqual(groupsArray);
});
it('keeps other stage properties intact', () => {
- expect(unwrapGroups(stagesAndGroups)[0]).toMatchObject(basicStageInfo);
+ expect(unwrapGroups(stagesAndGroups)[0].node).toMatchObject(basicStageInfo);
});
});
diff --git a/spec/frontend/projects/commit/components/branches_dropdown_spec.js b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
index 7686c28c7fc..ab84c3768d0 100644
--- a/spec/frontend/projects/commit/components/branches_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
@@ -15,7 +15,7 @@ describe('BranchesDropdown', () => {
const createComponent = (term, state = { isFetching: false }) => {
store = new Vuex.Store({
getters: {
- joinedBranches: () => ['_master_', '_branch_1_', '_branch_2_'],
+ joinedBranches: () => ['_main_', '_branch_1_', '_branch_2_'],
},
actions: {
fetchBranches: spyFetchBranches,
@@ -94,13 +94,13 @@ describe('BranchesDropdown', () => {
it('renders all branches when search term is empty', () => {
expect(findAllDropdownItems()).toHaveLength(3);
- expect(findDropdownItemByIndex(0).text()).toBe('_master_');
+ expect(findDropdownItemByIndex(0).text()).toBe('_main_');
expect(findDropdownItemByIndex(1).text()).toBe('_branch_1_');
expect(findDropdownItemByIndex(2).text()).toBe('_branch_2_');
});
it('should not be selected on the inactive branch', () => {
- expect(wrapper.vm.isSelected('_master_')).toBe(false);
+ expect(wrapper.vm.isSelected('_main_')).toBe(false);
});
});
diff --git a/spec/frontend/projects/commit/components/commit_comments_button_spec.js b/spec/frontend/projects/commit/components/commit_comments_button_spec.js
new file mode 100644
index 00000000000..873270c5be1
--- /dev/null
+++ b/spec/frontend/projects/commit/components/commit_comments_button_spec.js
@@ -0,0 +1,42 @@
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CommitCommentsButton from '~/projects/commit/components/commit_comments_button.vue';
+
+describe('CommitCommentsButton', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(CommitCommentsButton, {
+ propsData: {
+ commentsCount: 1,
+ ...props,
+ },
+ }),
+ );
+ };
+
+ const tooltip = () => wrapper.findByTestId('comment-button-wrapper');
+
+ describe('Comment Button', () => {
+ it('has proper tooltip and button attributes for 1 comment', () => {
+ createComponent();
+
+ expect(tooltip().attributes('title')).toBe('1 comment on this commit');
+ expect(tooltip().text()).toBe('1');
+ });
+
+ it('has proper tooltip and button attributes for multiple comments', () => {
+ createComponent({ commentsCount: 2 });
+
+ expect(tooltip().attributes('title')).toBe('2 comments on this commit');
+ expect(tooltip().text()).toBe('2');
+ });
+
+ it('does not show when there are no comments', () => {
+ createComponent({ commentsCount: 0 });
+
+ expect(tooltip().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/projects/commit/components/commit_options_dropdown_spec.js b/spec/frontend/projects/commit/components/commit_options_dropdown_spec.js
new file mode 100644
index 00000000000..70491405986
--- /dev/null
+++ b/spec/frontend/projects/commit/components/commit_options_dropdown_spec.js
@@ -0,0 +1,123 @@
+import { GlDropdownDivider, GlDropdownSectionHeader } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import CommitOptionsDropdown from '~/projects/commit/components/commit_options_dropdown.vue';
+import { OPEN_REVERT_MODAL, OPEN_CHERRY_PICK_MODAL } from '~/projects/commit/constants';
+import eventHub from '~/projects/commit/event_hub';
+
+describe('BranchesDropdown', () => {
+ let wrapper;
+ const provide = {
+ newProjectTagPath: '_new_project_tag_path_',
+ emailPatchesPath: '_email_patches_path_',
+ plainDiffPath: '_plain_diff_path_',
+ };
+
+ const createComponent = (props = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(CommitOptionsDropdown, {
+ provide,
+ propsData: {
+ canRevert: true,
+ canCherryPick: true,
+ canTag: true,
+ canEmailPatches: true,
+ ...props,
+ },
+ }),
+ );
+ };
+
+ const findRevertLink = () => wrapper.findByTestId('revert-link');
+ const findCherryPickLink = () => wrapper.findByTestId('cherry-pick-link');
+ const findTagItem = () => wrapper.findByTestId('tag-link');
+ const findEmailPatchesItem = () => wrapper.findByTestId('email-patches-link');
+ const findPlainDiffItem = () => wrapper.findByTestId('plain-diff-link');
+ const findDivider = () => wrapper.findComponent(GlDropdownDivider);
+ const findSectionHeader = () => wrapper.findComponent(GlDropdownSectionHeader);
+
+ describe('Everything enabled', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('has expected dropdown button text', () => {
+ expect(wrapper.attributes('text')).toBe('Options');
+ });
+
+ it('has expected items', () => {
+ expect(
+ [
+ findRevertLink().exists(),
+ findCherryPickLink().exists(),
+ findTagItem().exists(),
+ findDivider().exists(),
+ findSectionHeader().exists(),
+ findEmailPatchesItem().exists(),
+ findPlainDiffItem().exists(),
+ ].every((exists) => exists),
+ ).toBe(true);
+ });
+
+ it('has expected href links', () => {
+ expect(findTagItem().attributes('href')).toBe(provide.newProjectTagPath);
+ expect(findEmailPatchesItem().attributes('href')).toBe(provide.emailPatchesPath);
+ expect(findPlainDiffItem().attributes('href')).toBe(provide.plainDiffPath);
+ });
+ });
+
+ describe('Different dropdown item permutations', () => {
+ it('does not have a revert option', () => {
+ createComponent({ canRevert: false });
+
+ expect(findRevertLink().exists()).toBe(false);
+ });
+
+ it('does not have a cherry-pick option', () => {
+ createComponent({ canCherryPick: false });
+
+ expect(findCherryPickLink().exists()).toBe(false);
+ });
+
+ it('does not have a tag option', () => {
+ createComponent({ canTag: false });
+
+ expect(findTagItem().exists()).toBe(false);
+ });
+
+ it('does not have a email patches options', () => {
+ createComponent({ canEmailPatches: false });
+
+ expect(findEmailPatchesItem().exists()).toBe(false);
+ });
+
+ it('only has the download items', () => {
+ createComponent({ canRevert: false, canCherryPick: false, canTag: false });
+
+ expect(findDivider().exists()).toBe(false);
+ expect(findEmailPatchesItem().exists()).toBe(true);
+ expect(findPlainDiffItem().exists()).toBe(true);
+ });
+ });
+
+ describe('Modal triggering', () => {
+ let spy;
+
+ beforeEach(() => {
+ spy = jest.spyOn(eventHub, '$emit');
+ createComponent();
+ });
+
+ it('emits openModal for revert', () => {
+ findRevertLink().vm.$emit('click');
+
+ expect(spy).toHaveBeenCalledWith(OPEN_REVERT_MODAL);
+ });
+
+ it('emits openModal for cherry-pick', () => {
+ findCherryPickLink().vm.$emit('click');
+
+ expect(spy).toHaveBeenCalledWith(OPEN_CHERRY_PICK_MODAL);
+ });
+ });
+});
diff --git a/spec/frontend/projects/commit/components/form_modal_spec.js b/spec/frontend/projects/commit/components/form_modal_spec.js
index 708644cb7ee..9688cb47799 100644
--- a/spec/frontend/projects/commit/components/form_modal_spec.js
+++ b/spec/frontend/projects/commit/components/form_modal_spec.js
@@ -17,15 +17,14 @@ describe('CommitFormModal', () => {
let store;
let axiosMock;
- const createComponent = (method, state = {}, provide = {}) => {
+ const createComponent = (method, state = {}, provide = {}, propsData = {}) => {
store = createStore({ ...mockData.mockModal, ...state });
wrapper = extendedWrapper(
method(CommitFormModal, {
provide: {
...provide,
- glFeatures: { pickIntoProject: true },
},
- propsData: { ...mockData.modalPropsData },
+ propsData: { ...mockData.modalPropsData, ...propsData },
store,
attrs: {
static: true,
@@ -160,6 +159,12 @@ describe('CommitFormModal', () => {
});
it('Changes the target_project_id input value', async () => {
+ createComponent(
+ shallowMount,
+ {},
+ { glFeatures: { pickIntoProject: true } },
+ { isCherryPick: true },
+ );
findProjectsDropdown().vm.$emit('selectProject', '_changed_project_value_');
await wrapper.vm.$nextTick();
diff --git a/spec/frontend/projects/commit/components/form_trigger_spec.js b/spec/frontend/projects/commit/components/form_trigger_spec.js
deleted file mode 100644
index 4503493c0a6..00000000000
--- a/spec/frontend/projects/commit/components/form_trigger_spec.js
+++ /dev/null
@@ -1,44 +0,0 @@
-import { GlLink } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import FormTrigger from '~/projects/commit/components/form_trigger.vue';
-import eventHub from '~/projects/commit/event_hub';
-
-const displayText = '_display_text_';
-
-const createComponent = () => {
- return shallowMount(FormTrigger, {
- provide: { displayText },
- propsData: { openModal: '_open_modal_' },
- });
-};
-
-describe('FormTrigger', () => {
- let wrapper;
- let spy;
-
- beforeEach(() => {
- spy = jest.spyOn(eventHub, '$emit');
- wrapper = createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
- const findLink = () => wrapper.find(GlLink);
-
- describe('displayText', () => {
- it('includes the correct displayText for the link', () => {
- expect(findLink().text()).toBe(displayText);
- });
- });
-
- describe('clicking the link', () => {
- it('emits openModal', () => {
- findLink().vm.$emit('click');
-
- expect(spy).toHaveBeenCalledWith('_open_modal_');
- });
- });
-});
diff --git a/spec/frontend/projects/commit/mock_data.js b/spec/frontend/projects/commit/mock_data.js
index e4dcb24c4c0..34e9c400af4 100644
--- a/spec/frontend/projects/commit/mock_data.js
+++ b/spec/frontend/projects/commit/mock_data.js
@@ -23,6 +23,6 @@ export default {
modalId: '_modal_id_',
openModal: '_open_modal_',
},
- mockBranches: ['_branch_1', '_abc_', '_master_'],
+ mockBranches: ['_branch_1', '_abc_', '_main_'],
mockProjects: ['_project_1', '_abc_', '_project_'],
};
diff --git a/spec/frontend/projects/commit/store/mutations_spec.js b/spec/frontend/projects/commit/store/mutations_spec.js
index 8989e769772..60abf0fddad 100644
--- a/spec/frontend/projects/commit/store/mutations_spec.js
+++ b/spec/frontend/projects/commit/store/mutations_spec.js
@@ -27,7 +27,7 @@ describe('Commit form modal mutations', () => {
describe('CLEAR_MODAL', () => {
it('should clear modal state ', () => {
- stateCopy = { branch: '_master_', defaultBranch: '_default_branch_' };
+ stateCopy = { branch: '_main_', defaultBranch: '_default_branch_' };
mutations[types.CLEAR_MODAL](stateCopy);
@@ -47,7 +47,7 @@ describe('Commit form modal mutations', () => {
describe('SET_BRANCH', () => {
it('should set branch', () => {
- stateCopy = { branch: '_master_' };
+ stateCopy = { branch: '_main_' };
mutations[types.SET_BRANCH](stateCopy, '_changed_branch_');
@@ -57,7 +57,7 @@ describe('Commit form modal mutations', () => {
describe('SET_SELECTED_BRANCH', () => {
it('should set selectedBranch', () => {
- stateCopy = { selectedBranch: '_master_' };
+ stateCopy = { selectedBranch: '_main_' };
mutations[types.SET_SELECTED_BRANCH](stateCopy, '_changed_branch_');
diff --git a/spec/frontend/projects/commit_box/info/load_branches_spec.js b/spec/frontend/projects/commit_box/info/load_branches_spec.js
index 8100200cbdd..9456e6ef5f5 100644
--- a/spec/frontend/projects/commit_box/info/load_branches_spec.js
+++ b/spec/frontend/projects/commit_box/info/load_branches_spec.js
@@ -1,66 +1,73 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
+import { setHTMLFixture } from 'helpers/fixtures';
import waitForPromises from 'helpers/wait_for_promises';
import { loadBranches } from '~/projects/commit_box/info/load_branches';
const mockCommitPath = '/commit/abcd/branches';
const mockBranchesRes =
- '<a href="/-/commits/master">master</a><span><a href="/-/commits/my-branch">my-branch</a></span>';
+ '<a href="/-/commits/main">main</a><span><a href="/-/commits/my-branch">my-branch</a></span>';
describe('~/projects/commit_box/info/load_branches', () => {
let mock;
- let el;
+
+ const getElInnerHtml = () => document.querySelector('.js-commit-box-info').innerHTML;
beforeEach(() => {
+ setHTMLFixture(`
+ <div class="js-commit-box-info" data-commit-path="${mockCommitPath}">
+ <div class="commit-info branches">
+ <span class="spinner"/>
+ </div>
+ </div>`);
+
mock = new MockAdapter(axios);
mock.onGet(mockCommitPath).reply(200, mockBranchesRes);
-
- el = document.createElement('div');
- el.dataset.commitPath = mockCommitPath;
- el.innerHTML = '<div class="commit-info branches"><span class="spinner"/></div>';
});
it('loads and renders branches info', async () => {
- loadBranches(el);
+ loadBranches();
await waitForPromises();
- expect(el.innerHTML).toBe(`<div class="commit-info branches">${mockBranchesRes}</div>`);
+ expect(getElInnerHtml()).toMatchInterpolatedText(
+ `<div class="commit-info branches">${mockBranchesRes}</div>`,
+ );
});
it('does not load when no container is provided', async () => {
- loadBranches(null);
+ loadBranches('.js-another-class');
await waitForPromises();
expect(mock.history.get).toHaveLength(0);
});
- describe('when braches request returns unsafe content', () => {
+ describe('when branches request returns unsafe content', () => {
beforeEach(() => {
mock
.onGet(mockCommitPath)
- .reply(200, '<a onload="alert(\'xss!\');" href="/-/commits/master">master</a>');
+ .reply(200, '<a onload="alert(\'xss!\');" href="/-/commits/main">main</a>');
});
it('displays sanitized html', async () => {
- loadBranches(el);
+ loadBranches();
await waitForPromises();
- expect(el.innerHTML).toBe(
- '<div class="commit-info branches"><a href="/-/commits/master">master</a></div>',
+ expect(getElInnerHtml()).toMatchInterpolatedText(
+ '<div class="commit-info branches"><a href="/-/commits/main">main</a></div>',
);
});
});
- describe('when braches request fails', () => {
+ describe('when branches request fails', () => {
beforeEach(() => {
mock.onGet(mockCommitPath).reply(500, 'Error!');
});
it('attempts to load and renders an error', async () => {
- loadBranches(el);
+ loadBranches();
await waitForPromises();
- expect(el.innerHTML).toBe(
+ expect(getElInnerHtml()).toMatchInterpolatedText(
'<div class="commit-info branches">Failed to load branches. Please try again.</div>',
);
});
diff --git a/spec/frontend/projects/compare/components/app_legacy_spec.js b/spec/frontend/projects/compare/components/app_legacy_spec.js
index 4c7f0d5cccc..93e96c8b9f7 100644
--- a/spec/frontend/projects/compare/components/app_legacy_spec.js
+++ b/spec/frontend/projects/compare/components/app_legacy_spec.js
@@ -8,7 +8,7 @@ jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
const projectCompareIndexPath = 'some/path';
const refsProjectPath = 'some/refs/path';
const paramsFrom = 'master';
-const paramsTo = 'master';
+const paramsTo = 'some-other-branch';
describe('CompareApp component', () => {
let wrapper;
@@ -36,6 +36,9 @@ describe('CompareApp component', () => {
createComponent();
});
+ const findSourceDropdown = () => wrapper.find('[data-testid="sourceRevisionDropdown"]');
+ const findTargetDropdown = () => wrapper.find('[data-testid="targetRevisionDropdown"]');
+
it('renders component with prop', () => {
expect(wrapper.props()).toEqual(
expect.objectContaining({
@@ -62,12 +65,31 @@ describe('CompareApp component', () => {
expect(wrapper.find('[data-testid="ellipsis"]').exists()).toBe(true);
});
- it('render Source and Target BranchDropdown components', () => {
- const branchDropdowns = wrapper.findAll(RevisionDropdown);
+ describe('Source and Target BranchDropdown components', () => {
+ const findAllBranchDropdowns = () => wrapper.findAll(RevisionDropdown);
+
+ it('renders the components with the correct props', () => {
+ expect(findAllBranchDropdowns().length).toBe(2);
+ expect(findSourceDropdown().props('revisionText')).toBe('Source');
+ expect(findTargetDropdown().props('revisionText')).toBe('Target');
+ });
+
+ it('sets the revision when the "selectRevision" event is emitted', async () => {
+ findSourceDropdown().vm.$emit('selectRevision', {
+ direction: 'to',
+ revision: 'some-source-revision',
+ });
+
+ findTargetDropdown().vm.$emit('selectRevision', {
+ direction: 'from',
+ revision: 'some-target-revision',
+ });
+
+ await wrapper.vm.$nextTick();
- expect(branchDropdowns.length).toBe(2);
- expect(branchDropdowns.at(0).props('revisionText')).toBe('Source');
- expect(branchDropdowns.at(1).props('revisionText')).toBe('Target');
+ expect(findTargetDropdown().props('paramsBranch')).toBe('some-target-revision');
+ expect(findSourceDropdown().props('paramsBranch')).toBe('some-source-revision');
+ });
});
describe('compare button', () => {
@@ -87,6 +109,27 @@ describe('CompareApp component', () => {
});
});
+ describe('swap revisions button', () => {
+ const findSwapRevisionsButton = () => wrapper.find('[data-testid="swapRevisionsButton"]');
+
+ it('renders the swap revisions button', () => {
+ expect(findSwapRevisionsButton().exists()).toBe(true);
+ });
+
+ it('has the correct text', () => {
+ expect(findSwapRevisionsButton().text()).toBe('Swap revisions');
+ });
+
+ it('swaps revisions when clicked', async () => {
+ findSwapRevisionsButton().vm.$emit('click');
+
+ await wrapper.vm.$nextTick();
+
+ expect(findTargetDropdown().props('paramsBranch')).toBe(paramsTo);
+ expect(findSourceDropdown().props('paramsBranch')).toBe(paramsFrom);
+ });
+ });
+
describe('merge request buttons', () => {
const findProjectMrButton = () => wrapper.find('[data-testid="projectMrButton"]');
const findCreateMrButton = () => wrapper.find('[data-testid="createMrButton"]');
diff --git a/spec/frontend/projects/compare/components/repo_dropdown_spec.js b/spec/frontend/projects/compare/components/repo_dropdown_spec.js
index af76632515c..df8fea8fd32 100644
--- a/spec/frontend/projects/compare/components/repo_dropdown_spec.js
+++ b/spec/frontend/projects/compare/components/repo_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import RepoDropdown from '~/projects/compare/components/repo_dropdown.vue';
@@ -69,12 +69,12 @@ describe('RepoDropdown component', () => {
createComponent({ paramsName: 'from' });
});
- it('set hidden input of the first project', () => {
- expect(findHiddenInput().attributes('value')).toBe(projectFromId);
+ it('set hidden input of the selected project', () => {
+ expect(findHiddenInput().attributes('value')).toBe(projectToId);
});
- it('displays the first project name initially in the dropdown', () => {
- expect(findGlDropdown().props('text')).toBe(projectFromName);
+ it('displays matching project name of the source revision initially in the dropdown', () => {
+ expect(findGlDropdown().props('text')).toBe(projectToName);
});
it('updates the hiddin input value when onClick method is triggered', async () => {
@@ -84,15 +84,13 @@ describe('RepoDropdown component', () => {
expect(findHiddenInput().attributes('value')).toBe(repoId);
});
- it('emits initial `changeTargetProject` event with target project', () => {
- expect(wrapper.emitted('changeTargetProject')).toEqual([[projectFromName]]);
- });
-
it('emits `changeTargetProject` event when another target project is selected', async () => {
- const newTargetProject = 'new-from-name';
- wrapper.vm.$emit('changeTargetProject', newTargetProject);
+ const index = 1;
+ const { projectsFrom } = defaultProvide;
+ findGlDropdown().findAll(GlDropdownItem).at(index).vm.$emit('click');
await wrapper.vm.$nextTick();
- expect(wrapper.emitted('changeTargetProject')[1]).toEqual([newTargetProject]);
+
+ expect(wrapper.emitted('changeTargetProject')[0][0]).toEqual(projectsFrom[index].name);
});
});
});
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
index 270c89e674c..ca208395e82 100644
--- a/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
+++ b/spec/frontend/projects/compare/components/revision_dropdown_legacy_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown } from '@gitlab/ui';
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import createFlash from '~/flash';
@@ -29,6 +29,7 @@ describe('RevisionDropdown component', () => {
beforeEach(() => {
axiosMock = new AxiosMockAdapter(axios);
+ createComponent();
});
afterEach(() => {
@@ -39,7 +40,6 @@ describe('RevisionDropdown component', () => {
const findGlDropdown = () => wrapper.find(GlDropdown);
it('sets hidden input', () => {
- createComponent();
expect(wrapper.find('input[type="hidden"]').attributes('value')).toBe(
defaultProps.paramsBranch,
);
@@ -68,8 +68,6 @@ describe('RevisionDropdown component', () => {
Tags: undefined,
});
- createComponent();
-
await axios.waitForAll();
expect(wrapper.vm.branches).toEqual([]);
@@ -79,15 +77,12 @@ describe('RevisionDropdown component', () => {
it('shows flash message on error', async () => {
axiosMock.onGet('some/invalid/path').replyOnce(404);
- createComponent();
-
await wrapper.vm.fetchBranchesAndTags();
expect(createFlash).toHaveBeenCalled();
});
describe('GlDropdown component', () => {
it('renders props', () => {
- createComponent();
expect(wrapper.props()).toEqual(expect.objectContaining(defaultProps));
});
@@ -99,8 +94,22 @@ describe('RevisionDropdown component', () => {
});
it('display params branch text', () => {
- createComponent();
expect(findGlDropdown().props('text')).toBe(defaultProps.paramsBranch);
});
+
+ it('emits a "selectRevision" event when a revision is selected', async () => {
+ const findGlDropdownItems = () => wrapper.findAll(GlDropdownItem);
+ const findFirstGlDropdownItem = () => findGlDropdownItems().at(0);
+
+ wrapper.setData({ branches: ['some-branch'] });
+
+ await wrapper.vm.$nextTick();
+
+ findFirstGlDropdownItem().vm.$emit('click');
+
+ expect(wrapper.emitted()).toEqual({
+ selectRevision: [[{ direction: 'from', revision: 'some-branch' }]],
+ });
+ });
});
});
diff --git a/spec/frontend/projects/compare/components/revision_dropdown_spec.js b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
index 69d3167c99c..aab9607ceae 100644
--- a/spec/frontend/projects/compare/components/revision_dropdown_spec.js
+++ b/spec/frontend/projects/compare/components/revision_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown } from '@gitlab/ui';
+import { GlDropdown, GlSearchBoxByType } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import createFlash from '~/flash';
@@ -23,6 +23,10 @@ describe('RevisionDropdown component', () => {
...defaultProps,
...props,
},
+ stubs: {
+ GlDropdown,
+ GlSearchBoxByType,
+ },
});
};
@@ -36,6 +40,7 @@ describe('RevisionDropdown component', () => {
});
const findGlDropdown = () => wrapper.find(GlDropdown);
+ const findSearchBox = () => wrapper.find(GlSearchBoxByType);
it('sets hidden input', () => {
createComponent();
@@ -85,6 +90,40 @@ describe('RevisionDropdown component', () => {
expect(axios.get).toHaveBeenLastCalledWith(newRefsProjectPath);
});
+ describe('search', () => {
+ it('shows flash message on error', async () => {
+ axiosMock.onGet('some/invalid/path').replyOnce(404);
+
+ createComponent();
+
+ await wrapper.vm.searchBranchesAndTags();
+ expect(createFlash).toHaveBeenCalled();
+ });
+
+ it('makes request with search param', async () => {
+ jest.spyOn(axios, 'get').mockResolvedValue({
+ data: {
+ Branches: [],
+ Tags: [],
+ },
+ });
+
+ const mockSearchTerm = 'foobar';
+ createComponent();
+ findSearchBox().vm.$emit('input', mockSearchTerm);
+ await axios.waitForAll();
+
+ expect(axios.get).toHaveBeenCalledWith(
+ defaultProps.refsProjectPath,
+ expect.objectContaining({
+ params: {
+ search: mockSearchTerm,
+ },
+ }),
+ );
+ });
+ });
+
describe('GlDropdown component', () => {
it('renders props', () => {
createComponent();
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
index b4ae50341d4..204e7a7c394 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
+++ b/spec/frontend/projects/experiment_new_project_creation/components/app_spec.js
@@ -1,5 +1,6 @@
import { GlBreadcrumb } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { assignGitlabExperiment } from 'helpers/experimentation_helper';
import App from '~/projects/experiment_new_project_creation/components/app.vue';
import LegacyContainer from '~/projects/experiment_new_project_creation/components/legacy_container.vue';
import WelcomePage from '~/projects/experiment_new_project_creation/components/welcome.vue';
@@ -17,6 +18,57 @@ describe('Experimental new project creation app', () => {
wrapper = null;
});
+ const findWelcomePage = () => wrapper.findComponent(WelcomePage);
+ const findPanel = (panelName) =>
+ findWelcomePage()
+ .props()
+ .panels.find((p) => p.name === panelName);
+ const findPanelHeader = () => wrapper.find('h4');
+
+ describe('new_repo experiment', () => {
+ describe('when in the candidate variant', () => {
+ assignGitlabExperiment('new_repo', 'candidate');
+
+ it('has "repository" in the panel title', () => {
+ createComponent();
+
+ expect(findPanel('blank_project').title).toBe('Create blank project/repository');
+ });
+
+ describe('when hash is not empty on load', () => {
+ beforeEach(() => {
+ window.location.hash = '#blank_project';
+ createComponent();
+ });
+
+ it('renders "project/repository"', () => {
+ expect(findPanelHeader().text()).toBe('Create blank project/repository');
+ });
+ });
+ });
+
+ describe('when in the control variant', () => {
+ assignGitlabExperiment('new_repo', 'control');
+
+ it('has "project" in the panel title', () => {
+ createComponent();
+
+ expect(findPanel('blank_project').title).toBe('Create blank project');
+ });
+
+ describe('when hash is not empty on load', () => {
+ beforeEach(() => {
+ window.location.hash = '#blank_project';
+ createComponent();
+ });
+
+ it('renders "project"', () => {
+ expect(findPanelHeader().text()).toBe('Create blank project');
+ });
+ });
+ });
+ });
+
describe('with empty hash', () => {
beforeEach(() => {
createComponent();
diff --git a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js b/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
index f26d1a6d2a3..9fd1230806e 100644
--- a/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
+++ b/spec/frontend/projects/experiment_new_project_creation/components/welcome_spec.js
@@ -1,8 +1,13 @@
import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
import { mockTracking } from 'helpers/tracking_helper';
+import { TRACKING_CONTEXT_SCHEMA } from '~/experimentation/constants';
+import { getExperimentData } from '~/experimentation/utils';
import NewProjectPushTipPopover from '~/projects/experiment_new_project_creation/components/new_project_push_tip_popover.vue';
import WelcomePage from '~/projects/experiment_new_project_creation/components/welcome.vue';
+jest.mock('~/experimentation/utils', () => ({ getExperimentData: jest.fn() }));
+
describe('Welcome page', () => {
let wrapper;
let trackingSpy;
@@ -14,6 +19,7 @@ describe('Welcome page', () => {
beforeEach(() => {
trackingSpy = mockTracking('_category_', document, jest.spyOn);
trackingSpy.mockImplementation(() => {});
+ getExperimentData.mockReturnValue(undefined);
});
afterEach(() => {
@@ -22,14 +28,35 @@ describe('Welcome page', () => {
wrapper = null;
});
- it('tracks link clicks', () => {
+ it('tracks link clicks', async () => {
createComponent({ panels: [{ name: 'test', href: '#' }] });
- wrapper.find('a').trigger('click');
+ const link = wrapper.find('a');
+ link.trigger('click');
+ await nextTick();
return wrapper.vm.$nextTick().then(() => {
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_tab', { label: 'test' });
});
});
+ it('adds new_repo experiment data if in experiment', async () => {
+ const mockExperimentData = 'data';
+ getExperimentData.mockReturnValue(mockExperimentData);
+
+ createComponent({ panels: [{ name: 'test', href: '#' }] });
+ const link = wrapper.find('a');
+ link.trigger('click');
+ await nextTick();
+ return wrapper.vm.$nextTick().then(() => {
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_tab', {
+ label: 'test',
+ context: {
+ data: mockExperimentData,
+ schema: TRACKING_CONTEXT_SCHEMA,
+ },
+ });
+ });
+ });
+
it('renders new project push tip popover', () => {
createComponent({ panels: [{ name: 'test', href: '#' }] });
diff --git a/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
index fc51825f15b..c37f6415898 100644
--- a/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
+++ b/spec/frontend/projects/pipelines/charts/components/__snapshots__/ci_cd_analytics_area_chart_spec.js.snap
@@ -21,7 +21,11 @@ exports[`CiCdAnalyticsAreaChart matches the snapshot 1`] = `
option="[object Object]"
thresholds=""
width="0"
- />
+ >
+ <template />
+
+ <template />
+ </glareachart-stub>
</div>
</div>
`;
diff --git a/spec/frontend/projects/pipelines/charts/components/app_spec.js b/spec/frontend/projects/pipelines/charts/components/app_spec.js
index e8aace14db4..0cf05d4ac37 100644
--- a/spec/frontend/projects/pipelines/charts/components/app_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/app_spec.js
@@ -10,6 +10,7 @@ import PipelineCharts from '~/projects/pipelines/charts/components/pipeline_char
jest.mock('~/lib/utils/url_utility');
const DeploymentFrequencyChartsStub = { name: 'DeploymentFrequencyCharts', render: () => {} };
+const LeadTimeChartsStub = { name: 'LeadTimeCharts', render: () => {} };
describe('ProjectsPipelinesChartsApp', () => {
let wrapper;
@@ -21,10 +22,11 @@ describe('ProjectsPipelinesChartsApp', () => {
{},
{
provide: {
- shouldRenderDeploymentFrequencyCharts: false,
+ shouldRenderDoraCharts: true,
},
stubs: {
DeploymentFrequencyCharts: DeploymentFrequencyChartsStub,
+ LeadTimeCharts: LeadTimeChartsStub,
},
},
mountOptions,
@@ -32,37 +34,42 @@ describe('ProjectsPipelinesChartsApp', () => {
);
}
- beforeEach(() => {
- createComponent();
- });
-
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
const findGlTabs = () => wrapper.find(GlTabs);
- const findAllGlTab = () => wrapper.findAll(GlTab);
- const findGlTabAt = (i) => findAllGlTab().at(i);
+ const findAllGlTabs = () => wrapper.findAll(GlTab);
+ const findGlTabAtIndex = (index) => findAllGlTabs().at(index);
+ const findLeadTimeCharts = () => wrapper.find(LeadTimeChartsStub);
const findDeploymentFrequencyCharts = () => wrapper.find(DeploymentFrequencyChartsStub);
const findPipelineCharts = () => wrapper.find(PipelineCharts);
- it('renders the pipeline charts', () => {
- expect(findPipelineCharts().exists()).toBe(true);
- });
-
- describe('when shouldRenderDeploymentFrequencyCharts is true', () => {
+ describe('when all charts are available', () => {
beforeEach(() => {
- createComponent({ provide: { shouldRenderDeploymentFrequencyCharts: true } });
+ createComponent();
});
- it('renders the deployment frequency charts in a tab', () => {
+ it('renders tabs', () => {
expect(findGlTabs().exists()).toBe(true);
- expect(findGlTabAt(0).attributes('title')).toBe('Pipelines');
- expect(findGlTabAt(1).attributes('title')).toBe('Deployments');
+
+ expect(findGlTabAtIndex(0).attributes('title')).toBe('Pipelines');
+ expect(findGlTabAtIndex(1).attributes('title')).toBe('Deployments');
+ expect(findGlTabAtIndex(2).attributes('title')).toBe('Lead Time');
+ });
+
+ it('renders the pipeline charts', () => {
+ expect(findPipelineCharts().exists()).toBe(true);
+ });
+
+ it('renders the deployment frequency charts', () => {
expect(findDeploymentFrequencyCharts().exists()).toBe(true);
});
+ it('renders the lead time charts', () => {
+ expect(findLeadTimeCharts().exists()).toBe(true);
+ });
+
it('sets the tab and url when a tab is clicked', async () => {
let chartsPath;
setWindowLocation(`${TEST_HOST}/gitlab-org/gitlab-test/-/pipelines/charts`);
@@ -108,6 +115,7 @@ describe('ProjectsPipelinesChartsApp', () => {
describe('when provided with a query param', () => {
it.each`
chart | tab
+ ${'lead-time'} | ${'2'}
${'deployments'} | ${'1'}
${'pipelines'} | ${'0'}
${'fake'} | ${'0'}
@@ -118,7 +126,7 @@ describe('ProjectsPipelinesChartsApp', () => {
expect(name).toBe('chart');
return chart ? [chart] : [];
});
- createComponent({ provide: { shouldRenderDeploymentFrequencyCharts: true } });
+ createComponent();
expect(findGlTabs().attributes('value')).toBe(tab);
});
@@ -138,7 +146,7 @@ describe('ProjectsPipelinesChartsApp', () => {
return [];
});
- createComponent({ provide: { shouldRenderDeploymentFrequencyCharts: true } });
+ createComponent();
expect(findGlTabs().attributes('value')).toBe('0');
@@ -155,14 +163,17 @@ describe('ProjectsPipelinesChartsApp', () => {
});
});
- describe('when shouldRenderDeploymentFrequencyCharts is false', () => {
+ describe('when the dora charts are not available', () => {
beforeEach(() => {
- createComponent({ provide: { shouldRenderDeploymentFrequencyCharts: false } });
+ createComponent({ provide: { shouldRenderDoraCharts: false } });
});
- it('does not render the deployment frequency charts in a tab', () => {
+ it('does not render tabs', () => {
expect(findGlTabs().exists()).toBe(false);
- expect(findDeploymentFrequencyCharts().exists()).toBe(false);
+ });
+
+ it('renders the pipeline charts', () => {
+ expect(findPipelineCharts().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/registry/explorer/pages/list_spec.js b/spec/frontend/registry/explorer/pages/list_spec.js
index f7f207cc183..48acc06792d 100644
--- a/spec/frontend/registry/explorer/pages/list_spec.js
+++ b/spec/frontend/registry/explorer/pages/list_spec.js
@@ -1,9 +1,11 @@
import { GlSkeletonLoader, GlSprintf, GlAlert } from '@gitlab/ui';
import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import getContainerRepositoriesQuery from 'shared_queries/container_registry/get_container_repositories.query.graphql';
+import { FILTERED_SEARCH_TERM } from '~/packages_and_registries/shared/constants';
import DeleteImage from '~/registry/explorer/components/delete_image.vue';
import CliCommands from '~/registry/explorer/components/list_page/cli_commands.vue';
import GroupEmptyState from '~/registry/explorer/components/list_page/group_empty_state.vue';
@@ -60,7 +62,7 @@ describe('List Page', () => {
const waitForApolloRequestRender = async () => {
jest.runOnlyPendingTimers();
await waitForPromises();
- await wrapper.vm.$nextTick();
+ await nextTick();
};
const mountComponent = ({
@@ -69,6 +71,7 @@ describe('List Page', () => {
detailsResolver = jest.fn().mockResolvedValue(graphQLProjectImageRepositoriesDetailsMock),
mutationResolver = jest.fn().mockResolvedValue(graphQLImageDeleteMock),
config = { isGroupPage: false },
+ query = {},
} = {}) => {
localVue.use(VueApollo);
@@ -95,6 +98,7 @@ describe('List Page', () => {
$toast,
$route: {
name: 'foo',
+ query,
},
...mocks,
},
@@ -158,9 +162,11 @@ describe('List Page', () => {
});
describe('isLoading is true', () => {
- it('shows the skeleton loader', () => {
+ it('shows the skeleton loader', async () => {
mountComponent();
+ await nextTick();
+
expect(findSkeletonLoader().exists()).toBe(true);
});
@@ -176,9 +182,11 @@ describe('List Page', () => {
expect(findCliCommands().exists()).toBe(false);
});
- it('title has the metadataLoading props set to true', () => {
+ it('title has the metadataLoading props set to true', async () => {
mountComponent();
+ await nextTick();
+
expect(findRegistryHeader().props('metadataLoading')).toBe(true);
});
});
@@ -311,7 +319,7 @@ describe('List Page', () => {
await selectImageForDeletion();
findDeleteImage().vm.$emit('success');
- await wrapper.vm.$nextTick();
+ await nextTick();
const alert = findDeleteAlert();
expect(alert.exists()).toBe(true);
@@ -327,7 +335,7 @@ describe('List Page', () => {
await selectImageForDeletion();
findDeleteImage().vm.$emit('error');
- await wrapper.vm.$nextTick();
+ await nextTick();
const alert = findDeleteAlert();
expect(alert.exists()).toBe(true);
@@ -343,12 +351,12 @@ describe('List Page', () => {
const doSearch = async () => {
await waitForApolloRequestRender();
findRegistrySearch().vm.$emit('filter:changed', [
- { type: 'filtered-search-term', value: { data: 'centos6' } },
+ { type: FILTERED_SEARCH_TERM, value: { data: 'centos6' } },
]);
findRegistrySearch().vm.$emit('filter:submit');
- await wrapper.vm.$nextTick();
+ await nextTick();
};
it('has a search box element', async () => {
@@ -373,7 +381,7 @@ describe('List Page', () => {
await waitForApolloRequestRender();
findRegistrySearch().vm.$emit('sorting:changed', { sort: 'asc' });
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(resolver).toHaveBeenCalledWith(expect.objectContaining({ sort: 'UPDATED_DESC' }));
});
@@ -416,7 +424,7 @@ describe('List Page', () => {
await waitForApolloRequestRender();
findImageList().vm.$emit('prev-page');
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({ before: pageInfo.startCursor }),
@@ -436,7 +444,7 @@ describe('List Page', () => {
await waitForApolloRequestRender();
findImageList().vm.$emit('next-page');
- await wrapper.vm.$nextTick();
+ await nextTick();
expect(resolver).toHaveBeenCalledWith(
expect.objectContaining({ after: pageInfo.endCursor }),
@@ -457,11 +465,10 @@ describe('List Page', () => {
expect(findDeleteModal().exists()).toBe(true);
});
- it('contains a description with the path of the item to delete', () => {
+ it('contains a description with the path of the item to delete', async () => {
findImageList().vm.$emit('delete', { path: 'foo' });
- return wrapper.vm.$nextTick().then(() => {
- expect(findDeleteModal().html()).toContain('foo');
- });
+ await nextTick();
+ expect(findDeleteModal().html()).toContain('foo');
});
});
@@ -497,4 +504,60 @@ describe('List Page', () => {
testTrackingCall('confirm_delete');
});
});
+
+ describe('url query string handling', () => {
+ const defaultQueryParams = {
+ search: [1, 2],
+ sort: 'asc',
+ orderBy: 'CREATED',
+ };
+ const queryChangePayload = 'foo';
+
+ it('query:updated event pushes the new query to the router', async () => {
+ const push = jest.fn();
+ mountComponent({ mocks: { $router: { push } } });
+
+ await nextTick();
+
+ findRegistrySearch().vm.$emit('query:changed', queryChangePayload);
+
+ expect(push).toHaveBeenCalledWith({ query: queryChangePayload });
+ });
+
+ it('graphql API call has the variables set from the URL', async () => {
+ const resolver = jest.fn().mockResolvedValue(graphQLImageListMock);
+ mountComponent({ query: defaultQueryParams, resolver });
+
+ await nextTick();
+
+ expect(resolver).toHaveBeenCalledWith(
+ expect.objectContaining({
+ name: 1,
+ sort: 'CREATED_ASC',
+ }),
+ );
+ });
+
+ it.each`
+ sort | orderBy | search | payload
+ ${'ASC'} | ${undefined} | ${undefined} | ${{ sort: 'UPDATED_ASC' }}
+ ${undefined} | ${'bar'} | ${undefined} | ${{ sort: 'BAR_DESC' }}
+ ${'ASC'} | ${'bar'} | ${undefined} | ${{ sort: 'BAR_ASC' }}
+ ${undefined} | ${undefined} | ${undefined} | ${{}}
+ ${undefined} | ${undefined} | ${['one']} | ${{ name: 'one' }}
+ ${undefined} | ${undefined} | ${['one', 'two']} | ${{ name: 'one' }}
+ ${undefined} | ${'UPDATED'} | ${['one', 'two']} | ${{ name: 'one', sort: 'UPDATED_DESC' }}
+ ${'ASC'} | ${'UPDATED'} | ${['one', 'two']} | ${{ name: 'one', sort: 'UPDATED_ASC' }}
+ `(
+ 'with sort equal to $sort, orderBy equal to $orderBy, search set to $search API call has the variables set as $payload',
+ async ({ sort, orderBy, search, payload }) => {
+ const resolver = jest.fn().mockResolvedValue({ sort, orderBy });
+ mountComponent({ query: { sort, orderBy, search }, resolver });
+
+ await nextTick();
+
+ expect(resolver).toHaveBeenCalledWith(expect.objectContaining(payload));
+ },
+ );
+ });
});
diff --git a/spec/frontend/registry/settings/components/settings_form_spec.js b/spec/frontend/registry/settings/components/settings_form_spec.js
index 7527910ad59..ad94da6ca66 100644
--- a/spec/frontend/registry/settings/components/settings_form_spec.js
+++ b/spec/frontend/registry/settings/components/settings_form_spec.js
@@ -77,33 +77,47 @@ describe('Settings Form', () => {
});
};
- const mountComponentWithApollo = ({ provide = defaultProvidedValues, resolver } = {}) => {
+ const mountComponentWithApollo = ({
+ provide = defaultProvidedValues,
+ mutationResolver,
+ queryPayload = expirationPolicyPayload(),
+ } = {}) => {
localVue.use(VueApollo);
const requestHandlers = [
- [updateContainerExpirationPolicyMutation, resolver],
- [expirationPolicyQuery, jest.fn().mockResolvedValue(expirationPolicyPayload())],
+ [updateContainerExpirationPolicyMutation, mutationResolver],
+ [expirationPolicyQuery, jest.fn().mockResolvedValue(queryPayload)],
];
fakeApollo = createMockApollo(requestHandlers);
+ // This component does not do the query directly, but we need a proper cache to update
fakeApollo.defaultClient.cache.writeQuery({
query: expirationPolicyQuery,
variables: {
projectPath: provide.projectPath,
},
- ...expirationPolicyPayload(),
+ ...queryPayload,
});
+ // we keep in sync what prop we pass to the component with the cache
+ const {
+ data: {
+ project: { containerExpirationPolicy: value },
+ },
+ } = queryPayload;
+
mountComponent({
provide,
+ props: {
+ ...defaultProps,
+ value,
+ },
config: {
localVue,
apolloProvider: fakeApollo,
},
});
-
- return requestHandlers.map((resolvers) => resolvers[1]);
};
beforeEach(() => {
@@ -253,19 +267,44 @@ describe('Settings Form', () => {
expect(findSaveButton().attributes('type')).toBe('submit');
});
- it('dispatches the correct apollo mutation', async () => {
- const [expirationPolicyMutationResolver] = mountComponentWithApollo({
- resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ it('dispatches the correct apollo mutation', () => {
+ const mutationResolver = jest.fn().mockResolvedValue(expirationPolicyMutationPayload());
+ mountComponentWithApollo({
+ mutationResolver,
});
findForm().trigger('submit');
- await expirationPolicyMutationResolver();
- expect(expirationPolicyMutationResolver).toHaveBeenCalled();
+
+ expect(mutationResolver).toHaveBeenCalled();
+ });
+
+ it('saves the default values when a value is missing did not change the default options', async () => {
+ const mutationResolver = jest.fn().mockResolvedValue(expirationPolicyMutationPayload());
+ mountComponentWithApollo({
+ mutationResolver,
+ queryPayload: expirationPolicyPayload({ keepN: null, cadence: null, olderThan: null }),
+ });
+
+ await waitForPromises();
+
+ findForm().trigger('submit');
+
+ expect(mutationResolver).toHaveBeenCalledWith({
+ input: {
+ cadence: 'EVERY_DAY',
+ enabled: true,
+ keepN: 'TEN_TAGS',
+ nameRegex: 'asdasdssssdfdf',
+ nameRegexKeep: 'sss',
+ olderThan: 'NINETY_DAYS',
+ projectPath: 'path',
+ },
+ });
});
it('tracks the submit event', () => {
mountComponentWithApollo({
- resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ mutationResolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
});
findForm().trigger('submit');
@@ -274,12 +313,12 @@ describe('Settings Form', () => {
});
it('show a success toast when submit succeed', async () => {
- const handlers = mountComponentWithApollo({
- resolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
+ mountComponentWithApollo({
+ mutationResolver: jest.fn().mockResolvedValue(expirationPolicyMutationPayload()),
});
findForm().trigger('submit');
- await Promise.all(handlers);
+ await waitForPromises();
await wrapper.vm.$nextTick();
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_SUCCESS_MESSAGE, {
@@ -290,14 +329,14 @@ describe('Settings Form', () => {
describe('when submit fails', () => {
describe('user recoverable errors', () => {
it('when there is an error is shown in a toast', async () => {
- const handlers = mountComponentWithApollo({
- resolver: jest
+ mountComponentWithApollo({
+ mutationResolver: jest
.fn()
.mockResolvedValue(expirationPolicyMutationPayload({ errors: ['foo'] })),
});
findForm().trigger('submit');
- await Promise.all(handlers);
+ await waitForPromises();
await wrapper.vm.$nextTick();
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith('foo', {
@@ -308,13 +347,12 @@ describe('Settings Form', () => {
describe('global errors', () => {
it('shows an error', async () => {
- const handlers = mountComponentWithApollo({
- resolver: jest.fn().mockRejectedValue(expirationPolicyMutationPayload()),
+ mountComponentWithApollo({
+ mutationResolver: jest.fn().mockRejectedValue(expirationPolicyMutationPayload()),
});
findForm().trigger('submit');
- await Promise.all(handlers);
- await wrapper.vm.$nextTick();
+ await waitForPromises();
await wrapper.vm.$nextTick();
expect(wrapper.vm.$toast.show).toHaveBeenCalledWith(UPDATE_SETTINGS_ERROR_MESSAGE, {
diff --git a/spec/frontend/releases/components/app_edit_new_spec.js b/spec/frontend/releases/components/app_edit_new_spec.js
index 1e55ab8f9e4..65ed6d6166f 100644
--- a/spec/frontend/releases/components/app_edit_new_spec.js
+++ b/spec/frontend/releases/components/app_edit_new_spec.js
@@ -50,7 +50,7 @@ describe('Release edit/new component', () => {
merge(
{
modules: {
- detail: {
+ editNew: {
namespaced: true,
actions,
state,
@@ -112,7 +112,7 @@ describe('Release edit/new component', () => {
it('renders the description text at the top of the page', () => {
expect(wrapper.find('.js-subtitle-text').text()).toBe(
- 'Releases are based on Git tags. We recommend tags that use semantic versioning, for example v1.0, v2.0-pre.',
+ 'Releases are based on Git tags. We recommend tags that use semantic versioning, for example v1.0.0, v2.1.0-pre.',
);
});
@@ -168,7 +168,7 @@ describe('Release edit/new component', () => {
await factory({
store: {
modules: {
- detail: {
+ editNew: {
getters: {
isExistingRelease: () => false,
},
@@ -207,7 +207,7 @@ describe('Release edit/new component', () => {
await factory({
store: {
modules: {
- detail: {
+ editNew: {
getters: {
isValid: () => true,
},
@@ -227,7 +227,7 @@ describe('Release edit/new component', () => {
await factory({
store: {
modules: {
- detail: {
+ editNew: {
getters: {
isValid: () => false,
},
diff --git a/spec/frontend/releases/components/app_index_spec.js b/spec/frontend/releases/components/app_index_spec.js
index 2b5270e29d6..7955b079cbc 100644
--- a/spec/frontend/releases/components/app_index_spec.js
+++ b/spec/frontend/releases/components/app_index_spec.js
@@ -8,7 +8,7 @@ import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import ReleasesApp from '~/releases/components/app_index.vue';
import ReleasesPagination from '~/releases/components/releases_pagination.vue';
import createStore from '~/releases/stores';
-import createListModule from '~/releases/stores/modules/list';
+import createIndexModule from '~/releases/stores/modules/index';
import { pageInfoHeadersWithoutPagination, pageInfoHeadersWithPagination } from '../mock_data';
jest.mock('~/lib/utils/common_utils', () => ({
@@ -41,15 +41,15 @@ describe('Releases App ', () => {
};
const createComponent = (stateUpdates = {}) => {
- const listModule = createListModule({
+ const indexModule = createIndexModule({
...defaultInitialState,
...stateUpdates,
});
- fetchReleaseSpy = jest.spyOn(listModule.actions, 'fetchReleases');
+ fetchReleaseSpy = jest.spyOn(indexModule.actions, 'fetchReleases');
const store = createStore({
- modules: { list: listModule },
+ modules: { index: indexModule },
featureFlags: {
graphqlReleaseData: true,
graphqlReleasesPage: false,
diff --git a/spec/frontend/releases/components/app_show_spec.js b/spec/frontend/releases/components/app_show_spec.js
index 5caea395f0a..425cb9d0059 100644
--- a/spec/frontend/releases/components/app_show_spec.js
+++ b/spec/frontend/releases/components/app_show_spec.js
@@ -1,63 +1,176 @@
import { shallowMount } from '@vue/test-utils';
-import Vuex from 'vuex';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
import { getJSONFixture } from 'helpers/fixtures';
-import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import createFlash from '~/flash';
import ReleaseShowApp from '~/releases/components/app_show.vue';
import ReleaseBlock from '~/releases/components/release_block.vue';
import ReleaseSkeletonLoader from '~/releases/components/release_skeleton_loader.vue';
+import oneReleaseQuery from '~/releases/queries/one_release.query.graphql';
-const originalRelease = getJSONFixture('api/releases/release.json');
+jest.mock('~/flash');
+
+const oneReleaseQueryResponse = getJSONFixture(
+ 'graphql/releases/queries/one_release.query.graphql.json',
+);
+
+Vue.use(VueApollo);
+
+const EXPECTED_ERROR_MESSAGE = 'Something went wrong while getting the release details.';
+const MOCK_FULL_PATH = 'project/full/path';
+const MOCK_TAG_NAME = 'test-tag-name';
describe('Release show component', () => {
let wrapper;
- let release;
- let actions;
- beforeEach(() => {
- release = convertObjectPropsToCamelCase(originalRelease);
- });
-
- const factory = (state) => {
- actions = {
- fetchRelease: jest.fn(),
- };
-
- const store = new Vuex.Store({
- modules: {
- detail: {
- namespaced: true,
- actions,
- state,
- },
+ const createComponent = ({ apolloProvider }) => {
+ wrapper = shallowMount(ReleaseShowApp, {
+ provide: {
+ fullPath: MOCK_FULL_PATH,
+ tagName: MOCK_TAG_NAME,
},
+ apolloProvider,
});
-
- wrapper = shallowMount(ReleaseShowApp, { store });
};
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ });
+
const findLoadingSkeleton = () => wrapper.find(ReleaseSkeletonLoader);
const findReleaseBlock = () => wrapper.find(ReleaseBlock);
- it('calls fetchRelease when the component is created', () => {
- factory({ release });
- expect(actions.fetchRelease).toHaveBeenCalledTimes(1);
+ const expectLoadingIndicator = () => {
+ it('renders a loading indicator', () => {
+ expect(findLoadingSkeleton().exists()).toBe(true);
+ });
+ };
+
+ const expectNoLoadingIndicator = () => {
+ it('does not render a loading indicator', () => {
+ expect(findLoadingSkeleton().exists()).toBe(false);
+ });
+ };
+
+ const expectNoFlash = () => {
+ it('does not show a flash message', () => {
+ expect(createFlash).not.toHaveBeenCalled();
+ });
+ };
+
+ const expectFlashWithMessage = (message) => {
+ it(`shows a flash message that reads "${message}"`, () => {
+ expect(createFlash).toHaveBeenCalledTimes(1);
+ expect(createFlash).toHaveBeenCalledWith({
+ message,
+ captureError: true,
+ error: expect.any(Error),
+ });
+ });
+ };
+
+ const expectReleaseBlock = () => {
+ it('renders a release block', () => {
+ expect(findReleaseBlock().exists()).toBe(true);
+ });
+ };
+
+ const expectNoReleaseBlock = () => {
+ it('does not render a release block', () => {
+ expect(findReleaseBlock().exists()).toBe(false);
+ });
+ };
+
+ describe('GraphQL query variables', () => {
+ const queryHandler = jest.fn().mockResolvedValueOnce(oneReleaseQueryResponse);
+
+ beforeEach(() => {
+ const apolloProvider = createMockApollo([[oneReleaseQuery, queryHandler]]);
+
+ createComponent({ apolloProvider });
+ });
+
+ it('builds a GraphQL with the expected variables', () => {
+ expect(queryHandler).toHaveBeenCalledTimes(1);
+ expect(queryHandler).toHaveBeenCalledWith({
+ fullPath: MOCK_FULL_PATH,
+ tagName: MOCK_TAG_NAME,
+ });
+ });
});
- it('shows a loading skeleton and hides the release block while the API call is in progress', () => {
- factory({ isFetchingRelease: true });
- expect(findLoadingSkeleton().exists()).toBe(true);
- expect(findReleaseBlock().exists()).toBe(false);
+ describe('when the component is loading data', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApollo([
+ [oneReleaseQuery, jest.fn().mockReturnValueOnce(new Promise(() => {}))],
+ ]);
+
+ createComponent({ apolloProvider });
+ });
+
+ expectLoadingIndicator();
+ expectNoFlash();
+ expectNoReleaseBlock();
});
- it('hides the loading skeleton and shows the release block when the API call finishes successfully', () => {
- factory({ isFetchingRelease: false });
- expect(findLoadingSkeleton().exists()).toBe(false);
- expect(findReleaseBlock().exists()).toBe(true);
+ describe('when the component has successfully loaded the release', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApollo([
+ [oneReleaseQuery, jest.fn().mockResolvedValueOnce(oneReleaseQueryResponse)],
+ ]);
+
+ createComponent({ apolloProvider });
+ });
+
+ expectNoLoadingIndicator();
+ expectNoFlash();
+ expectReleaseBlock();
});
- it('hides both the loading skeleton and the release block when the API call fails', () => {
- factory({ fetchError: new Error('Uh oh') });
- expect(findLoadingSkeleton().exists()).toBe(false);
- expect(findReleaseBlock().exists()).toBe(false);
+ describe('when the request succeeded, but the returned "project" key was null', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApollo([
+ [oneReleaseQuery, jest.fn().mockResolvedValueOnce({ data: { project: null } })],
+ ]);
+
+ createComponent({ apolloProvider });
+ });
+
+ expectNoLoadingIndicator();
+ expectFlashWithMessage(EXPECTED_ERROR_MESSAGE);
+ expectNoReleaseBlock();
+ });
+
+ describe('when the request succeeded, but the returned "project.release" key was null', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApollo([
+ [
+ oneReleaseQuery,
+ jest.fn().mockResolvedValueOnce({ data: { project: { release: null } } }),
+ ],
+ ]);
+
+ createComponent({ apolloProvider });
+ });
+
+ expectNoLoadingIndicator();
+ expectFlashWithMessage(EXPECTED_ERROR_MESSAGE);
+ expectNoReleaseBlock();
+ });
+
+ describe('when an error occurs while loading the release', () => {
+ beforeEach(() => {
+ const apolloProvider = createMockApollo([
+ [oneReleaseQuery, jest.fn().mockRejectedValueOnce('An error occurred!')],
+ ]);
+
+ createComponent({ apolloProvider });
+ });
+
+ expectNoLoadingIndicator();
+ expectFlashWithMessage(EXPECTED_ERROR_MESSAGE);
+ expectNoReleaseBlock();
});
});
diff --git a/spec/frontend/releases/components/asset_links_form_spec.js b/spec/frontend/releases/components/asset_links_form_spec.js
index bbaa4e9dc94..460007e48ef 100644
--- a/spec/frontend/releases/components/asset_links_form_spec.js
+++ b/spec/frontend/releases/components/asset_links_form_spec.js
@@ -44,7 +44,7 @@ describe('Release edit component', () => {
const store = new Vuex.Store({
modules: {
- detail: {
+ editNew: {
namespaced: true,
actions,
state,
diff --git a/spec/frontend/releases/components/release_block_milestone_info_spec.js b/spec/frontend/releases/components/release_block_milestone_info_spec.js
index 47fe10af946..a2bf45c7861 100644
--- a/spec/frontend/releases/components/release_block_milestone_info_spec.js
+++ b/spec/frontend/releases/components/release_block_milestone_info_spec.js
@@ -199,7 +199,7 @@ describe('Release block milestone info', () => {
it('renders merge request stats', () => {
expect(trimText(mergeRequestsContainer().text())).toBe(
- 'Merge Requests 30 Open: 4 • Merged: 24 • Closed: 2',
+ 'Merge requests 30 Open: 4 • Merged: 24 • Closed: 2',
);
});
});
diff --git a/spec/frontend/releases/components/releases_pagination_graphql_spec.js b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
index de80d82e93c..5b2dd4bc784 100644
--- a/spec/frontend/releases/components/releases_pagination_graphql_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_graphql_spec.js
@@ -3,7 +3,7 @@ import Vuex from 'vuex';
import { historyPushState } from '~/lib/utils/common_utils';
import ReleasesPaginationGraphql from '~/releases/components/releases_pagination_graphql.vue';
import createStore from '~/releases/stores';
-import createListModule from '~/releases/stores/modules/list';
+import createIndexModule from '~/releases/stores/modules/index';
jest.mock('~/lib/utils/common_utils', () => ({
...jest.requireActual('~/lib/utils/common_utils'),
@@ -15,7 +15,7 @@ localVue.use(Vuex);
describe('~/releases/components/releases_pagination_graphql.vue', () => {
let wrapper;
- let listModule;
+ let indexModule;
const cursors = {
startCursor: 'startCursor',
@@ -25,16 +25,16 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
const projectPath = 'my/project';
const createComponent = (pageInfo) => {
- listModule = createListModule({ projectPath });
+ indexModule = createIndexModule({ projectPath });
- listModule.state.graphQlPageInfo = pageInfo;
+ indexModule.state.graphQlPageInfo = pageInfo;
- listModule.actions.fetchReleases = jest.fn();
+ indexModule.actions.fetchReleases = jest.fn();
wrapper = mount(ReleasesPaginationGraphql, {
store: createStore({
modules: {
- list: listModule,
+ index: indexModule,
},
featureFlags: {},
}),
@@ -142,7 +142,7 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
});
it('calls fetchReleases with the correct after cursor', () => {
- expect(listModule.actions.fetchReleases.mock.calls).toEqual([
+ expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { after: cursors.endCursor }],
]);
});
@@ -160,7 +160,7 @@ describe('~/releases/components/releases_pagination_graphql.vue', () => {
});
it('calls fetchReleases with the correct before cursor', () => {
- expect(listModule.actions.fetchReleases.mock.calls).toEqual([
+ expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { before: cursors.startCursor }],
]);
});
diff --git a/spec/frontend/releases/components/releases_pagination_rest_spec.js b/spec/frontend/releases/components/releases_pagination_rest_spec.js
index 6f2690f5322..7d45176967b 100644
--- a/spec/frontend/releases/components/releases_pagination_rest_spec.js
+++ b/spec/frontend/releases/components/releases_pagination_rest_spec.js
@@ -4,7 +4,7 @@ import Vuex from 'vuex';
import * as commonUtils from '~/lib/utils/common_utils';
import ReleasesPaginationRest from '~/releases/components/releases_pagination_rest.vue';
import createStore from '~/releases/stores';
-import createListModule from '~/releases/stores/modules/list';
+import createIndexModule from '~/releases/stores/modules/index';
commonUtils.historyPushState = jest.fn();
@@ -13,21 +13,21 @@ localVue.use(Vuex);
describe('~/releases/components/releases_pagination_rest.vue', () => {
let wrapper;
- let listModule;
+ let indexModule;
const projectId = 19;
const createComponent = (pageInfo) => {
- listModule = createListModule({ projectId });
+ indexModule = createIndexModule({ projectId });
- listModule.state.restPageInfo = pageInfo;
+ indexModule.state.restPageInfo = pageInfo;
- listModule.actions.fetchReleases = jest.fn();
+ indexModule.actions.fetchReleases = jest.fn();
wrapper = mount(ReleasesPaginationRest, {
store: createStore({
modules: {
- list: listModule,
+ index: indexModule,
},
featureFlags: {},
}),
@@ -58,7 +58,7 @@ describe('~/releases/components/releases_pagination_rest.vue', () => {
});
it('calls fetchReleases with the correct page', () => {
- expect(listModule.actions.fetchReleases.mock.calls).toEqual([
+ expect(indexModule.actions.fetchReleases.mock.calls).toEqual([
[expect.anything(), { page: newPage }],
]);
});
diff --git a/spec/frontend/releases/components/releases_sort_spec.js b/spec/frontend/releases/components/releases_sort_spec.js
index f17c6678592..b16f80b9c73 100644
--- a/spec/frontend/releases/components/releases_sort_spec.js
+++ b/spec/frontend/releases/components/releases_sort_spec.js
@@ -3,7 +3,7 @@ import { shallowMount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import ReleasesSort from '~/releases/components/releases_sort.vue';
import createStore from '~/releases/stores';
-import createListModule from '~/releases/stores/modules/list';
+import createIndexModule from '~/releases/stores/modules/index';
const localVue = createLocalVue();
localVue.use(Vuex);
@@ -11,15 +11,15 @@ localVue.use(Vuex);
describe('~/releases/components/releases_sort.vue', () => {
let wrapper;
let store;
- let listModule;
+ let indexModule;
const projectId = 8;
const createComponent = () => {
- listModule = createListModule({ projectId });
+ indexModule = createIndexModule({ projectId });
store = createStore({
modules: {
- list: listModule,
+ index: indexModule,
},
});
@@ -52,7 +52,7 @@ describe('~/releases/components/releases_sort.vue', () => {
it('on sort change set sorting in vuex and emit event', () => {
findReleasesSorting().vm.$emit('sortDirectionChange');
- expect(store.dispatch).toHaveBeenCalledWith('list/setSorting', { sort: 'asc' });
+ expect(store.dispatch).toHaveBeenCalledWith('index/setSorting', { sort: 'asc' });
expect(wrapper.emitted('sort:changed')).toBeTruthy();
});
@@ -60,7 +60,7 @@ describe('~/releases/components/releases_sort.vue', () => {
const item = findSortingItems().at(0);
const { orderBy } = wrapper.vm.sortOptions[0];
item.vm.$emit('click');
- expect(store.dispatch).toHaveBeenCalledWith('list/setSorting', { orderBy });
+ expect(store.dispatch).toHaveBeenCalledWith('index/setSorting', { orderBy });
expect(wrapper.emitted('sort:changed')).toBeTruthy();
});
});
diff --git a/spec/frontend/releases/components/tag_field_exsting_spec.js b/spec/frontend/releases/components/tag_field_exsting_spec.js
index cef7a0272a6..294538086b4 100644
--- a/spec/frontend/releases/components/tag_field_exsting_spec.js
+++ b/spec/frontend/releases/components/tag_field_exsting_spec.js
@@ -3,7 +3,7 @@ import { shallowMount, mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
import TagFieldExisting from '~/releases/components/tag_field_existing.vue';
import createStore from '~/releases/stores';
-import createDetailModule from '~/releases/stores/modules/detail';
+import createEditNewModule from '~/releases/stores/modules/edit_new';
const TEST_TAG_NAME = 'test-tag-name';
@@ -27,13 +27,13 @@ describe('releases/components/tag_field_existing', () => {
beforeEach(() => {
store = createStore({
modules: {
- detail: createDetailModule({
+ editNew: createEditNewModule({
tagName: TEST_TAG_NAME,
}),
},
});
- store.state.detail.release = {
+ store.state.editNew.release = {
tagName: TEST_TAG_NAME,
};
});
diff --git a/spec/frontend/releases/components/tag_field_new_spec.js b/spec/frontend/releases/components/tag_field_new_spec.js
index 387217c2a8e..f1608ca31b4 100644
--- a/spec/frontend/releases/components/tag_field_new_spec.js
+++ b/spec/frontend/releases/components/tag_field_new_spec.js
@@ -3,7 +3,7 @@ import { mount, shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import TagFieldNew from '~/releases/components/tag_field_new.vue';
import createStore from '~/releases/stores';
-import createDetailModule from '~/releases/stores/modules/detail';
+import createEditNewModule from '~/releases/stores/modules/edit_new';
const TEST_TAG_NAME = 'test-tag-name';
const TEST_PROJECT_ID = '1234';
@@ -44,15 +44,15 @@ describe('releases/components/tag_field_new', () => {
beforeEach(() => {
store = createStore({
modules: {
- detail: createDetailModule({
+ editNew: createEditNewModule({
projectId: TEST_PROJECT_ID,
}),
},
});
- store.state.detail.createFrom = TEST_CREATE_FROM;
+ store.state.editNew.createFrom = TEST_CREATE_FROM;
- store.state.detail.release = {
+ store.state.editNew.release = {
tagName: TEST_TAG_NAME,
assets: {
links: [],
@@ -89,7 +89,7 @@ describe('releases/components/tag_field_new', () => {
});
it("updates the store's release.tagName property", () => {
- expect(store.state.detail.release.tagName).toBe(NONEXISTENT_TAG_NAME);
+ expect(store.state.editNew.release.tagName).toBe(NONEXISTENT_TAG_NAME);
});
it('hides the "Create from" field', () => {
@@ -107,7 +107,7 @@ describe('releases/components/tag_field_new', () => {
});
it("updates the store's release.tagName property", () => {
- expect(store.state.detail.release.tagName).toBe(updatedTagName);
+ expect(store.state.editNew.release.tagName).toBe(updatedTagName);
});
it('shows the "Create from" field', () => {
@@ -178,7 +178,7 @@ describe('releases/components/tag_field_new', () => {
await wrapper.vm.$nextTick();
- expect(store.state.detail.createFrom).toBe(updatedCreateFrom);
+ expect(store.state.editNew.createFrom).toBe(updatedCreateFrom);
});
});
});
diff --git a/spec/frontend/releases/components/tag_field_spec.js b/spec/frontend/releases/components/tag_field_spec.js
index 2cf5944f9e6..db08f874959 100644
--- a/spec/frontend/releases/components/tag_field_spec.js
+++ b/spec/frontend/releases/components/tag_field_spec.js
@@ -3,7 +3,7 @@ import TagField from '~/releases/components/tag_field.vue';
import TagFieldExisting from '~/releases/components/tag_field_existing.vue';
import TagFieldNew from '~/releases/components/tag_field_new.vue';
import createStore from '~/releases/stores';
-import createDetailModule from '~/releases/stores/modules/detail';
+import createEditNewModule from '~/releases/stores/modules/edit_new';
describe('releases/components/tag_field', () => {
let store;
@@ -12,11 +12,11 @@ describe('releases/components/tag_field', () => {
const createComponent = ({ tagName }) => {
store = createStore({
modules: {
- detail: createDetailModule({}),
+ editNew: createEditNewModule({}),
},
});
- store.state.detail.tagName = tagName;
+ store.state.editNew.tagName = tagName;
wrapper = shallowMount(TagField, { store });
};
diff --git a/spec/frontend/releases/stores/modules/detail/actions_spec.js b/spec/frontend/releases/stores/modules/detail/actions_spec.js
index 9c125fbb87b..b116d601ca4 100644
--- a/spec/frontend/releases/stores/modules/detail/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/actions_spec.js
@@ -9,9 +9,9 @@ import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import httpStatus from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility';
import { ASSET_LINK_TYPE } from '~/releases/constants';
-import * as actions from '~/releases/stores/modules/detail/actions';
-import * as types from '~/releases/stores/modules/detail/mutation_types';
-import createState from '~/releases/stores/modules/detail/state';
+import * as actions from '~/releases/stores/modules/edit_new/actions';
+import * as types from '~/releases/stores/modules/edit_new/mutation_types';
+import createState from '~/releases/stores/modules/edit_new/state';
import { releaseToApiJson, apiJsonToRelease } from '~/releases/util';
jest.mock('~/flash');
@@ -23,7 +23,7 @@ jest.mock('~/lib/utils/url_utility', () => ({
const originalRelease = getJSONFixture('api/releases/release.json');
-describe('Release detail actions', () => {
+describe('Release edit/new actions', () => {
let state;
let release;
let mock;
@@ -163,7 +163,7 @@ describe('Release detail actions', () => {
return actions.fetchRelease({ commit: jest.fn(), state, rootState: state }).then(() => {
expect(createFlash).toHaveBeenCalledTimes(1);
expect(createFlash).toHaveBeenCalledWith(
- 'Something went wrong while getting the release details',
+ 'Something went wrong while getting the release details.',
);
});
});
diff --git a/spec/frontend/releases/stores/modules/detail/getters_spec.js b/spec/frontend/releases/stores/modules/detail/getters_spec.js
index 2d9f35428f2..1449c064d77 100644
--- a/spec/frontend/releases/stores/modules/detail/getters_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/getters_spec.js
@@ -1,6 +1,6 @@
-import * as getters from '~/releases/stores/modules/detail/getters';
+import * as getters from '~/releases/stores/modules/edit_new/getters';
-describe('Release detail getters', () => {
+describe('Release edit/new getters', () => {
describe('isExistingRelease', () => {
it('returns true if the release is an existing release that already exists in the database', () => {
const state = { tagName: 'test-tag-name' };
diff --git a/spec/frontend/releases/stores/modules/detail/mutations_spec.js b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
index cdf26bfa834..20ae332e500 100644
--- a/spec/frontend/releases/stores/modules/detail/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/detail/mutations_spec.js
@@ -1,13 +1,13 @@
import { getJSONFixture } from 'helpers/fixtures';
import { convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
import { ASSET_LINK_TYPE, DEFAULT_ASSET_LINK_TYPE } from '~/releases/constants';
-import * as types from '~/releases/stores/modules/detail/mutation_types';
-import mutations from '~/releases/stores/modules/detail/mutations';
-import createState from '~/releases/stores/modules/detail/state';
+import * as types from '~/releases/stores/modules/edit_new/mutation_types';
+import mutations from '~/releases/stores/modules/edit_new/mutations';
+import createState from '~/releases/stores/modules/edit_new/state';
const originalRelease = getJSONFixture('api/releases/release.json');
-describe('Release detail mutations', () => {
+describe('Release edit/new mutations', () => {
let state;
let release;
diff --git a/spec/frontend/releases/stores/modules/list/actions_spec.js b/spec/frontend/releases/stores/modules/list/actions_spec.js
index 309f7387929..4dc996174bc 100644
--- a/spec/frontend/releases/stores/modules/list/actions_spec.js
+++ b/spec/frontend/releases/stores/modules/list/actions_spec.js
@@ -15,9 +15,9 @@ import {
fetchReleasesRest,
receiveReleasesError,
setSorting,
-} from '~/releases/stores/modules/list/actions';
-import * as types from '~/releases/stores/modules/list/mutation_types';
-import createState from '~/releases/stores/modules/list/state';
+} from '~/releases/stores/modules/index/actions';
+import * as types from '~/releases/stores/modules/index/mutation_types';
+import createState from '~/releases/stores/modules/index/state';
import { gqClient, convertAllReleasesGraphQLResponse } from '~/releases/util';
import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
diff --git a/spec/frontend/releases/stores/modules/list/helpers.js b/spec/frontend/releases/stores/modules/list/helpers.js
index 3913eba31b8..6669f44aa95 100644
--- a/spec/frontend/releases/stores/modules/list/helpers.js
+++ b/spec/frontend/releases/stores/modules/list/helpers.js
@@ -1,4 +1,4 @@
-import state from '~/releases/stores/modules/list/state';
+import state from '~/releases/stores/modules/index/state';
export const resetStore = (store) => {
store.replaceState(state());
diff --git a/spec/frontend/releases/stores/modules/list/mutations_spec.js b/spec/frontend/releases/stores/modules/list/mutations_spec.js
index ea6a4ada16a..8b35ba5d7ac 100644
--- a/spec/frontend/releases/stores/modules/list/mutations_spec.js
+++ b/spec/frontend/releases/stores/modules/list/mutations_spec.js
@@ -1,8 +1,8 @@
import { getJSONFixture } from 'helpers/fixtures';
import { parseIntPagination, convertObjectPropsToCamelCase } from '~/lib/utils/common_utils';
-import * as types from '~/releases/stores/modules/list/mutation_types';
-import mutations from '~/releases/stores/modules/list/mutations';
-import createState from '~/releases/stores/modules/list/state';
+import * as types from '~/releases/stores/modules/index/mutation_types';
+import mutations from '~/releases/stores/modules/index/mutations';
+import createState from '~/releases/stores/modules/index/state';
import { convertAllReleasesGraphQLResponse } from '~/releases/util';
import { pageInfoHeadersWithoutPagination } from '../../../mock_data';
diff --git a/spec/frontend/reports/components/report_section_spec.js b/spec/frontend/reports/components/report_section_spec.js
index c9bf3185f8f..e1b36aa1e21 100644
--- a/spec/frontend/reports/components/report_section_spec.js
+++ b/spec/frontend/reports/components/report_section_spec.js
@@ -1,12 +1,14 @@
-import { shallowMount } from '@vue/test-utils';
+import { mount } from '@vue/test-utils';
import Vue from 'vue';
import mountComponent, { mountComponentWithSlots } from 'helpers/vue_mount_component_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import reportSection from '~/reports/components/report_section.vue';
describe('Report section', () => {
let vm;
let wrapper;
const ReportSection = Vue.extend(reportSection);
+ const findCollapseButton = () => wrapper.findByTestId('report-section-expand-button');
const resolvedIssues = [
{
@@ -30,12 +32,14 @@ describe('Report section', () => {
};
const createComponent = (props) => {
- wrapper = shallowMount(reportSection, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
+ wrapper = extendedWrapper(
+ mount(reportSection, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ }),
+ );
return wrapper;
};
@@ -182,7 +186,7 @@ describe('Report section', () => {
expect(wrapper.emitted().toggleEvent).toBeUndefined();
- wrapper.vm.$el.querySelector('button').click();
+ findCollapseButton().trigger('click');
return wrapper.vm
.$nextTick()
.then(() => {
@@ -197,7 +201,7 @@ describe('Report section', () => {
expect(wrapper.emitted().toggleEvent).toBeUndefined();
- wrapper.vm.$el.querySelector('button').click();
+ findCollapseButton().trigger('click');
return wrapper.vm
.$nextTick()
.then(() => {
diff --git a/spec/frontend/reports/grouped_test_report/components/modal_spec.js b/spec/frontend/reports/grouped_test_report/components/modal_spec.js
index 303009bab3a..3de81f754fd 100644
--- a/spec/frontend/reports/grouped_test_report/components/modal_spec.js
+++ b/spec/frontend/reports/grouped_test_report/components/modal_spec.js
@@ -15,7 +15,10 @@ describe('Grouped Test Reports Modal', () => {
// populate data
modalDataStructure.execution_time.value = 0.009411;
modalDataStructure.system_output.value = 'Failure/Error: is_expected.to eq(3)\n\n';
- modalDataStructure.class.value = 'link';
+ modalDataStructure.filename.value = {
+ text: 'link',
+ path: '/file/path',
+ };
let wrapper;
@@ -43,9 +46,9 @@ describe('Grouped Test Reports Modal', () => {
it('renders link', () => {
const link = wrapper.findComponent(GlLink);
- expect(link.attributes().href).toEqual(modalDataStructure.class.value);
+ expect(link.attributes().href).toEqual(modalDataStructure.filename.value.path);
- expect(link.text()).toEqual(modalDataStructure.class.value);
+ expect(link.text()).toEqual(modalDataStructure.filename.value.text);
});
it('renders seconds', () => {
diff --git a/spec/frontend/reports/grouped_test_report/components/test_issue_body_spec.js b/spec/frontend/reports/grouped_test_report/components/test_issue_body_spec.js
index e03a52aad8d..2f6f62ca1d3 100644
--- a/spec/frontend/reports/grouped_test_report/components/test_issue_body_spec.js
+++ b/spec/frontend/reports/grouped_test_report/components/test_issue_body_spec.js
@@ -52,7 +52,7 @@ describe('Test issue body', () => {
});
it('renders issue name', () => {
- expect(findDescription().text()).toBe(failedIssue.name);
+ expect(findDescription().text()).toContain(failedIssue.name);
});
it('renders failed status icon', () => {
diff --git a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
index 49332157691..55bb7dbe5c0 100644
--- a/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
+++ b/spec/frontend/reports/grouped_test_report/grouped_test_reports_app_spec.js
@@ -1,6 +1,6 @@
import { mount, createLocalVue } from '@vue/test-utils';
import Vuex from 'vuex';
-import { mockTracking } from 'helpers/tracking_helper';
+import Api from '~/api';
import GroupedTestReportsApp from '~/reports/grouped_test_report/grouped_test_reports_app.vue';
import { getStoreConfig } from '~/reports/grouped_test_report/store';
@@ -12,24 +12,31 @@ import successTestReports from '../mock_data/no_failures_report.json';
import recentFailuresTestReports from '../mock_data/recent_failures_report.json';
import resolvedFailures from '../mock_data/resolved_failures.json';
+jest.mock('~/api.js');
+
const localVue = createLocalVue();
localVue.use(Vuex);
describe('Grouped test reports app', () => {
const endpoint = 'endpoint.json';
+ const headBlobPath = '/blob/path';
const pipelinePath = '/path/to/pipeline';
let wrapper;
let mockStore;
- const mountComponent = ({ props = { pipelinePath } } = {}) => {
+ const mountComponent = ({ props = { pipelinePath }, glFeatures = {} } = {}) => {
wrapper = mount(GroupedTestReportsApp, {
store: mockStore,
localVue,
propsData: {
endpoint,
+ headBlobPath,
pipelinePath,
...props,
},
+ provide: {
+ glFeatures,
+ },
});
};
@@ -56,7 +63,7 @@ describe('Grouped test reports app', () => {
...getStoreConfig(),
actions: {
fetchReports: () => {},
- setEndpoint: () => {},
+ setPaths: () => {},
},
});
mountComponent();
@@ -103,31 +110,33 @@ describe('Grouped test reports app', () => {
});
describe('`Expand` button', () => {
- let trackingSpy;
-
beforeEach(() => {
setReports(newFailedTestReports);
- mountComponent();
- document.body.dataset.page = 'projects:merge_requests:show';
- trackingSpy = mockTracking('_category_', wrapper.element, jest.spyOn);
});
- it('tracks an event on click', () => {
+ it('tracks usage ping metric when enabled', () => {
+ mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: true } });
findExpandButton().trigger('click');
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'expand_test_report_widget', {});
+ expect(Api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
+ expect(Api.trackRedisHllUserEvent).toHaveBeenCalledWith(wrapper.vm.$options.expandEvent);
});
it('only tracks the first expansion', () => {
- expect(trackingSpy).not.toHaveBeenCalled();
+ mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: true } });
+ const expandButton = findExpandButton();
+ expandButton.trigger('click');
+ expandButton.trigger('click');
+ expandButton.trigger('click');
- const button = findExpandButton();
+ expect(Api.trackRedisHllUserEvent).toHaveBeenCalledTimes(1);
+ });
- button.trigger('click');
- button.trigger('click');
- button.trigger('click');
+ it('does not track usage ping metric when disabled', () => {
+ mountComponent({ glFeatures: { usageDataITestingSummaryWidgetTotal: false } });
+ findExpandButton().trigger('click');
- expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(Api.trackRedisHllUserEvent).not.toHaveBeenCalled();
});
});
diff --git a/spec/frontend/reports/grouped_test_report/store/actions_spec.js b/spec/frontend/reports/grouped_test_report/store/actions_spec.js
index 28633f7ba16..bbc3a5dbba5 100644
--- a/spec/frontend/reports/grouped_test_report/store/actions_spec.js
+++ b/spec/frontend/reports/grouped_test_report/store/actions_spec.js
@@ -3,7 +3,7 @@ import { TEST_HOST } from 'helpers/test_constants';
import testAction from 'helpers/vuex_action_helper';
import axios from '~/lib/utils/axios_utils';
import {
- setEndpoint,
+ setPaths,
requestReports,
fetchReports,
stopPolling,
@@ -23,13 +23,18 @@ describe('Reports Store Actions', () => {
mockedState = state();
});
- describe('setEndpoint', () => {
- it('should commit SET_ENDPOINT mutation', (done) => {
+ describe('setPaths', () => {
+ it('should commit SET_PATHS mutation', (done) => {
testAction(
- setEndpoint,
- 'endpoint.json',
+ setPaths,
+ { endpoint: 'endpoint.json', headBlobPath: '/blob/path' },
mockedState,
- [{ type: types.SET_ENDPOINT, payload: 'endpoint.json' }],
+ [
+ {
+ type: types.SET_PATHS,
+ payload: { endpoint: 'endpoint.json', headBlobPath: '/blob/path' },
+ },
+ ],
[],
done,
);
diff --git a/spec/frontend/reports/grouped_test_report/store/mutations_spec.js b/spec/frontend/reports/grouped_test_report/store/mutations_spec.js
index 60d5016a11b..d8642a9b440 100644
--- a/spec/frontend/reports/grouped_test_report/store/mutations_spec.js
+++ b/spec/frontend/reports/grouped_test_report/store/mutations_spec.js
@@ -10,11 +10,15 @@ describe('Reports Store Mutations', () => {
stateCopy = state();
});
- describe('SET_ENDPOINT', () => {
+ describe('SET_PATHS', () => {
it('should set endpoint', () => {
- mutations[types.SET_ENDPOINT](stateCopy, 'endpoint.json');
+ mutations[types.SET_PATHS](stateCopy, {
+ endpoint: 'endpoint.json',
+ headBlobPath: '/blob/path',
+ });
expect(stateCopy.endpoint).toEqual('endpoint.json');
+ expect(stateCopy.headBlobPath).toEqual('/blob/path');
});
});
diff --git a/spec/frontend/reports/grouped_test_report/store/utils_spec.js b/spec/frontend/reports/grouped_test_report/store/utils_spec.js
index 63320744796..760afe1c11a 100644
--- a/spec/frontend/reports/grouped_test_report/store/utils_spec.js
+++ b/spec/frontend/reports/grouped_test_report/store/utils_spec.js
@@ -238,4 +238,18 @@ describe('Reports store utils', () => {
});
});
});
+
+ describe('formatFilePath', () => {
+ it.each`
+ file | expected
+ ${'./test.js'} | ${'test.js'}
+ ${'/test.js'} | ${'test.js'}
+ ${'.//////////////test.js'} | ${'test.js'}
+ ${'test.js'} | ${'test.js'}
+ ${'mock/path./test.js'} | ${'mock/path./test.js'}
+ ${'./mock/path./test.js'} | ${'mock/path./test.js'}
+ `('should format $file to be $expected', ({ file, expected }) => {
+ expect(utils.formatFilePath(file)).toBe(expected);
+ });
+ });
});
diff --git a/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
index 6968fb3e153..836ae5c22e6 100644
--- a/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
+++ b/spec/frontend/repository/components/__snapshots__/directory_download_links_spec.js.snap
@@ -16,22 +16,30 @@ exports[`Repository directory download links component renders downloads links f
<div
class="btn-group ml-0 w-100"
>
- <gl-link-stub
- class="btn btn-xs btn-primary"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
href="http://test.com/?path=app"
+ icon=""
+ size="small"
+ variant="confirm"
>
zip
- </gl-link-stub>
- <gl-link-stub
- class="btn btn-xs"
+ </gl-button-stub>
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
href="http://test.com/?path=app"
+ icon=""
+ size="small"
+ variant="default"
>
tar
- </gl-link-stub>
+ </gl-button-stub>
</div>
</div>
</section>
@@ -53,22 +61,30 @@ exports[`Repository directory download links component renders downloads links f
<div
class="btn-group ml-0 w-100"
>
- <gl-link-stub
- class="btn btn-xs btn-primary"
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
href="http://test.com/?path=app/assets"
+ icon=""
+ size="small"
+ variant="confirm"
>
zip
- </gl-link-stub>
- <gl-link-stub
- class="btn btn-xs"
+ </gl-button-stub>
+ <gl-button-stub
+ buttontextclasses=""
+ category="primary"
href="http://test.com/?path=app/assets"
+ icon=""
+ size="small"
+ variant="default"
>
tar
- </gl-link-stub>
+ </gl-button-stub>
</div>
</div>
</section>
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
new file mode 100644
index 00000000000..b662a1d20a9
--- /dev/null
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -0,0 +1,86 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import BlobContent from '~/blob/components/blob_content.vue';
+import BlobHeader from '~/blob/components/blob_header.vue';
+import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
+
+let wrapper;
+const mockData = {
+ name: 'some_file.js',
+ size: 123,
+ rawBlob: 'raw content',
+ type: 'text',
+ fileType: 'text',
+ tooLarge: false,
+ path: 'some_file.js',
+ editBlobPath: 'some_file.js/edit',
+ ideEditPath: 'some_file.js/ide/edit',
+ storedExternally: false,
+ rawPath: 'some_file.js',
+ externalStorageUrl: 'some_file.js',
+ replacePath: 'some_file.js/replace',
+ deletePath: 'some_file.js/delete',
+ canLock: true,
+ isLocked: false,
+ lockLink: 'some_file.js/lock',
+ canModifyBlob: true,
+ forkPath: 'some_file.js/fork',
+ simpleViewer: {},
+ richViewer: {},
+};
+
+function factory(path, loading = false) {
+ wrapper = shallowMount(BlobContentViewer, {
+ propsData: {
+ path,
+ },
+ mocks: {
+ $apollo: {
+ queries: {
+ blobInfo: {
+ loading,
+ },
+ },
+ },
+ },
+ });
+
+ wrapper.setData({ blobInfo: mockData });
+}
+
+describe('Blob content viewer component', () => {
+ const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ const findBlobHeader = () => wrapper.find(BlobHeader);
+ const findBlobContent = () => wrapper.find(BlobContent);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ beforeEach(() => {
+ factory('some_file.js');
+ });
+
+ it('renders a GlLoadingIcon component', () => {
+ factory('some_file.js', true);
+
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+
+ it('renders a BlobHeader component', () => {
+ expect(findBlobHeader().exists()).toBe(true);
+ });
+
+ it('renders a BlobContent component', () => {
+ expect(findBlobContent().exists()).toBe(true);
+
+ expect(findBlobContent().props('loading')).toEqual(false);
+ expect(findBlobContent().props('content')).toEqual('raw content');
+ expect(findBlobContent().props('isRawContent')).toBe(true);
+ expect(findBlobContent().props('activeViewer')).toEqual({
+ fileType: 'text',
+ tooLarge: false,
+ type: 'text',
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/breadcrumbs_spec.js b/spec/frontend/repository/components/breadcrumbs_spec.js
index 2ac2069a177..93bfd3d9d32 100644
--- a/spec/frontend/repository/components/breadcrumbs_spec.js
+++ b/spec/frontend/repository/components/breadcrumbs_spec.js
@@ -1,24 +1,36 @@
import { GlDropdown } from '@gitlab/ui';
import { shallowMount, RouterLinkStub } from '@vue/test-utils';
import Breadcrumbs from '~/repository/components/breadcrumbs.vue';
-
-let vm;
-
-function factory(currentPath, extraProps = {}) {
- vm = shallowMount(Breadcrumbs, {
- propsData: {
- currentPath,
- ...extraProps,
- },
- stubs: {
- RouterLink: RouterLinkStub,
- },
- });
-}
+import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
describe('Repository breadcrumbs component', () => {
+ let wrapper;
+
+ const factory = (currentPath, extraProps = {}) => {
+ const $apollo = {
+ queries: {
+ userPermissions: {
+ loading: true,
+ },
+ },
+ };
+
+ wrapper = shallowMount(Breadcrumbs, {
+ propsData: {
+ currentPath,
+ ...extraProps,
+ },
+ stubs: {
+ RouterLink: RouterLinkStub,
+ },
+ mocks: { $apollo },
+ });
+ };
+
+ const findUploadBlobModal = () => wrapper.find(UploadBlobModal);
+
afterEach(() => {
- vm.destroy();
+ wrapper.destroy();
});
it.each`
@@ -30,13 +42,13 @@ describe('Repository breadcrumbs component', () => {
`('renders $linkCount links for path $path', ({ path, linkCount }) => {
factory(path);
- expect(vm.findAll(RouterLinkStub).length).toEqual(linkCount);
+ expect(wrapper.findAll(RouterLinkStub).length).toEqual(linkCount);
});
it('escapes hash in directory path', () => {
factory('app/assets/javascripts#');
- expect(vm.findAll(RouterLinkStub).at(3).props('to')).toEqual(
+ expect(wrapper.findAll(RouterLinkStub).at(3).props('to')).toEqual(
'/-/tree/app/assets/javascripts%23',
);
});
@@ -44,26 +56,44 @@ describe('Repository breadcrumbs component', () => {
it('renders last link as active', () => {
factory('app/assets');
- expect(vm.findAll(RouterLinkStub).at(2).attributes('aria-current')).toEqual('page');
+ expect(wrapper.findAll(RouterLinkStub).at(2).attributes('aria-current')).toEqual('page');
});
- it('does not render add to tree dropdown when permissions are false', () => {
+ it('does not render add to tree dropdown when permissions are false', async () => {
factory('/', { canCollaborate: false });
- vm.setData({ userPermissions: { forkProject: false, createMergeRequestIn: false } });
+ wrapper.setData({ userPermissions: { forkProject: false, createMergeRequestIn: false } });
- return vm.vm.$nextTick(() => {
- expect(vm.find(GlDropdown).exists()).toBe(false);
- });
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlDropdown).exists()).toBe(false);
});
- it('renders add to tree dropdown when permissions are true', () => {
+ it('renders add to tree dropdown when permissions are true', async () => {
factory('/', { canCollaborate: true });
- vm.setData({ userPermissions: { forkProject: true, createMergeRequestIn: true } });
+ wrapper.setData({ userPermissions: { forkProject: true, createMergeRequestIn: true } });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlDropdown).exists()).toBe(true);
+ });
+
+ describe('renders the upload blob modal', () => {
+ beforeEach(() => {
+ factory('/', { canEditTree: true });
+ });
+
+ it('does not render the modal while loading', () => {
+ expect(findUploadBlobModal().exists()).toBe(false);
+ });
+
+ it('renders the modal once loaded', async () => {
+ wrapper.setData({ $apollo: { queries: { userPermissions: { loading: false } } } });
+
+ await wrapper.vm.$nextTick();
- return vm.vm.$nextTick(() => {
- expect(vm.find(GlDropdown).exists()).toBe(true);
+ expect(findUploadBlobModal().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/repository/components/table/row_spec.js b/spec/frontend/repository/components/table/row_spec.js
index 69cb69de5df..3ebffbedcdb 100644
--- a/spec/frontend/repository/components/table/row_spec.js
+++ b/spec/frontend/repository/components/table/row_spec.js
@@ -19,6 +19,9 @@ function factory(propsData = {}) {
projectPath: 'gitlab-org/gitlab-ce',
url: `https://test.com`,
},
+ provide: {
+ glFeatures: { refactorBlobViewer: true },
+ },
mocks: {
$router,
},
@@ -81,7 +84,7 @@ describe('Repository table row component', () => {
it.each`
type | component | componentName
${'tree'} | ${RouterLinkStub} | ${'RouterLink'}
- ${'file'} | ${'a'} | ${'hyperlink'}
+ ${'blob'} | ${RouterLinkStub} | ${'RouterLink'}
${'commit'} | ${'a'} | ${'hyperlink'}
`('renders a $componentName for type $type', ({ type, component }) => {
factory({
diff --git a/spec/frontend/repository/pages/blob_spec.js b/spec/frontend/repository/pages/blob_spec.js
new file mode 100644
index 00000000000..3e7ead4ad00
--- /dev/null
+++ b/spec/frontend/repository/pages/blob_spec.js
@@ -0,0 +1,25 @@
+import { shallowMount } from '@vue/test-utils';
+import BlobContentViewer from '~/repository/components/blob_content_viewer.vue';
+import BlobPage from '~/repository/pages/blob.vue';
+
+jest.mock('~/repository/utils/dom');
+
+describe('Repository blob page component', () => {
+ let wrapper;
+
+ const findBlobContentViewer = () => wrapper.find(BlobContentViewer);
+ const path = 'file.js';
+
+ beforeEach(() => {
+ wrapper = shallowMount(BlobPage, { propsData: { path } });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('has a Blob Content Viewer component', () => {
+ expect(findBlobContentViewer().exists()).toBe(true);
+ expect(findBlobContentViewer().props('path')).toBe(path);
+ });
+});
diff --git a/spec/frontend/repository/router_spec.js b/spec/frontend/repository/router_spec.js
index 3c7dda05ca3..3354b2315fc 100644
--- a/spec/frontend/repository/router_spec.js
+++ b/spec/frontend/repository/router_spec.js
@@ -1,3 +1,4 @@
+import BlobPage from '~/repository/pages/blob.vue';
import IndexPage from '~/repository/pages/index.vue';
import TreePage from '~/repository/pages/tree.vue';
import createRouter from '~/repository/router';
@@ -11,6 +12,7 @@ describe('Repository router spec', () => {
${'/-/tree/master'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/master/app/assets'} | ${'master'} | ${TreePage} | ${'TreePage'}
${'/-/tree/123/app/assets'} | ${'master'} | ${null} | ${'null'}
+ ${'/-/blob/master/file.md'} | ${'master'} | ${BlobPage} | ${'BlobPage'}
`('sets component as $componentName for path "$path"', ({ path, component, branch }) => {
const router = createRouter('', branch);
diff --git a/spec/frontend/runner/runner_detail/runner_detail_app_spec.js b/spec/frontend/runner/runner_detail/runner_detail_app_spec.js
new file mode 100644
index 00000000000..5caa37c8cb3
--- /dev/null
+++ b/spec/frontend/runner/runner_detail/runner_detail_app_spec.js
@@ -0,0 +1,29 @@
+import { shallowMount } from '@vue/test-utils';
+import RunnerDetailsApp from '~/runner/runner_details/runner_details_app.vue';
+
+const mockRunnerId = '55';
+
+describe('RunnerDetailsApp', () => {
+ let wrapper;
+
+ const createComponent = (props) => {
+ wrapper = shallowMount(RunnerDetailsApp, {
+ propsData: {
+ runnerId: mockRunnerId,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('displays the runner id', () => {
+ expect(wrapper.text()).toContain('Runner #55');
+ });
+});
diff --git a/spec/frontend/security_configuration/configuration_table_spec.js b/spec/frontend/security_configuration/configuration_table_spec.js
index b8a574dc4e0..a1789052c92 100644
--- a/spec/frontend/security_configuration/configuration_table_spec.js
+++ b/spec/frontend/security_configuration/configuration_table_spec.js
@@ -30,7 +30,7 @@ describe('Configuration Table Component', () => {
expect(wrapper.text()).toContain(scanner.name);
expect(wrapper.text()).toContain(scanner.description);
if (scanner.type === REPORT_TYPE_SAST) {
- expect(wrapper.findByTestId(scanner.type).text()).toBe('Configure via Merge Request');
+ expect(wrapper.findByTestId(scanner.type).text()).toBe('Configure via merge request');
} else if (scanner.type !== REPORT_TYPE_SECRET_DETECTION) {
expect(wrapper.findByTestId(scanner.type).text()).toMatchInterpolatedText(UPGRADE_CTA);
}
diff --git a/spec/frontend/security_configuration/manage_sast_spec.js b/spec/frontend/security_configuration/manage_sast_spec.js
index 7c76f19ddb4..15a57210246 100644
--- a/spec/frontend/security_configuration/manage_sast_spec.js
+++ b/spec/frontend/security_configuration/manage_sast_spec.js
@@ -79,7 +79,7 @@ describe('Manage Sast Component', () => {
it('should render Button with correct text', () => {
createComponent();
- expect(findButton().text()).toContain('Configure via Merge Request');
+ expect(findButton().text()).toContain('Configure via merge request');
});
describe('given a successful response', () => {
diff --git a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
index 4b827301943..33df3a66fcd 100644
--- a/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
+++ b/spec/frontend/serverless/components/__snapshots__/empty_state_spec.js.snap
@@ -10,8 +10,8 @@ exports[`EmptyStateComponent should render content 1`] = `
<h1 class=\\"h4\\">Getting started with serverless</h1>
<p>In order to start using functions as a service, you must first install Knative on your Kubernetes cluster. <gl-link-stub href=\\"/help\\">More information</gl-link-stub>
</p>
- <div>
- <gl-button-stub category=\\"primary\\" variant=\\"success\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" href=\\"/clusters\\">Install Knative</gl-button-stub>
+ <div class=\\"gl-display-flex gl-flex-wrap gl-justify-content-center\\">
+ <gl-button-stub category=\\"primary\\" variant=\\"confirm\\" size=\\"medium\\" icon=\\"\\" buttontextclasses=\\"\\" href=\\"/clusters\\" class=\\"gl-mb-3 gl-mx-2\\">Install Knative</gl-button-stub>
<!---->
</div>
</div>
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index 21b9721438d..403f9509f84 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -44,6 +44,7 @@ describe('SetStatusModalWrapper', () => {
const findNoEmojiPlaceholder = () => wrapper.find('.js-no-emoji-placeholder');
const findToggleEmojiButton = () => wrapper.find('.js-toggle-emoji-menu');
const findAvailabilityCheckbox = () => wrapper.find(GlFormCheckbox);
+ const findClearStatusAtMessage = () => wrapper.find('[data-testid="clear-status-at-message"]');
const initModal = ({ mockOnUpdateSuccess = true, mockOnUpdateFailure = true } = {}) => {
const modal = findModal();
@@ -57,18 +58,18 @@ describe('SetStatusModalWrapper', () => {
return wrapper.vm.$nextTick();
};
- beforeEach(async () => {
- mockEmoji = await initEmojiMock();
- wrapper = createComponent();
- return initModal();
- });
-
afterEach(() => {
wrapper.destroy();
mockEmoji.restore();
});
describe('with minimum props', () => {
+ beforeEach(async () => {
+ mockEmoji = await initEmojiMock();
+ wrapper = createComponent();
+ return initModal();
+ });
+
it('sets the hidden status emoji field', () => {
const field = findFormField('emoji');
expect(field.exists()).toBe(true);
@@ -96,6 +97,14 @@ describe('SetStatusModalWrapper', () => {
findToggleEmojiButton().trigger('click');
expect(wrapper.vm.showEmojiMenu).toHaveBeenCalled();
});
+
+ it('displays the clear status at dropdown', () => {
+ expect(wrapper.find('[data-testid="clear-status-at-dropdown"]').exists()).toBe(true);
+ });
+
+ it('does not display the clear status at message', () => {
+ expect(findClearStatusAtMessage().exists()).toBe(false);
+ });
});
describe('with no currentMessage set', () => {
@@ -146,9 +155,28 @@ describe('SetStatusModalWrapper', () => {
});
});
+ describe('with currentClearStatusAfter set', () => {
+ beforeEach(async () => {
+ mockEmoji = await initEmojiMock();
+ wrapper = createComponent({ currentClearStatusAfter: '2021-01-01 00:00:00 UTC' });
+ return initModal();
+ });
+
+ it('displays the clear status at message', () => {
+ const clearStatusAtMessage = findClearStatusAtMessage();
+
+ expect(clearStatusAtMessage.exists()).toBe(true);
+ expect(clearStatusAtMessage.text()).toBe('Your status resets on 2021-01-01 00:00:00 UTC.');
+ });
+ });
+
describe('update status', () => {
describe('succeeds', () => {
- beforeEach(() => {
+ beforeEach(async () => {
+ mockEmoji = await initEmojiMock();
+ wrapper = createComponent();
+ await initModal();
+
jest.spyOn(UserApi, 'updateUserStatus').mockResolvedValue();
});
@@ -167,18 +195,26 @@ describe('SetStatusModalWrapper', () => {
// set the availability status
findAvailabilityCheckbox().vm.$emit('input', true);
+ // set the currentClearStatusAfter to 30 minutes
+ wrapper.find('[data-testid="thirtyMinutes"]').vm.$emit('click');
+
findModal().vm.$emit('ok');
await wrapper.vm.$nextTick();
- const commonParams = { emoji: defaultEmoji, message: defaultMessage };
+ const commonParams = {
+ emoji: defaultEmoji,
+ message: defaultMessage,
+ };
expect(UserApi.updateUserStatus).toHaveBeenCalledTimes(2);
expect(UserApi.updateUserStatus).toHaveBeenNthCalledWith(1, {
availability: AVAILABILITY_STATUS.NOT_SET,
+ clearStatusAfter: null,
...commonParams,
});
expect(UserApi.updateUserStatus).toHaveBeenNthCalledWith(2, {
availability: AVAILABILITY_STATUS.BUSY,
+ clearStatusAfter: '30_minutes',
...commonParams,
});
});
@@ -208,7 +244,11 @@ describe('SetStatusModalWrapper', () => {
});
describe('with errors', () => {
- beforeEach(() => {
+ beforeEach(async () => {
+ mockEmoji = await initEmojiMock();
+ wrapper = createComponent();
+ await initModal();
+
jest.spyOn(UserApi, 'updateUserStatus').mockRejectedValue();
});
diff --git a/spec/frontend/sidebar/assignees_realtime_spec.js b/spec/frontend/sidebar/assignees_realtime_spec.js
index 0fab6a29f71..f0a6fa40d67 100644
--- a/spec/frontend/sidebar/assignees_realtime_spec.js
+++ b/spec/frontend/sidebar/assignees_realtime_spec.js
@@ -1,7 +1,7 @@
import ActionCable from '@rails/actioncable';
import { shallowMount } from '@vue/test-utils';
-import query from '~/issuable_sidebar/queries/issue_sidebar.query.graphql';
import AssigneesRealtime from '~/sidebar/components/assignees/assignees_realtime.vue';
+import { assigneesQueries } from '~/sidebar/constants';
import SidebarMediator from '~/sidebar/sidebar_mediator';
import Mock from './mock_data';
@@ -18,18 +18,19 @@ describe('Assignees Realtime', () => {
let wrapper;
let mediator;
- const createComponent = () => {
+ const createComponent = (issuableType = 'issue') => {
wrapper = shallowMount(AssigneesRealtime, {
propsData: {
issuableIid: '1',
mediator,
projectPath: 'path/to/project',
+ issuableType,
},
mocks: {
$apollo: {
- query,
+ query: assigneesQueries[issuableType].query,
queries: {
- project: {
+ workspace: {
refetch: jest.fn(),
},
},
@@ -51,8 +52,8 @@ describe('Assignees Realtime', () => {
describe('when handleFetchResult is called from smart query', () => {
it('sets assignees to the store', () => {
const data = {
- project: {
- issue: {
+ workspace: {
+ issuable: {
assignees: {
nodes: [{ id: 'gid://gitlab/Environments/123', avatarUrl: 'url' }],
},
@@ -95,7 +96,7 @@ describe('Assignees Realtime', () => {
wrapper.vm.received({ event: 'updated' });
return wrapper.vm.$nextTick().then(() => {
- expect(wrapper.vm.$apollo.queries.project.refetch).toHaveBeenCalledTimes(1);
+ expect(wrapper.vm.$apollo.queries.workspace.refetch).toHaveBeenCalledTimes(1);
});
});
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
new file mode 100644
index 00000000000..824f6d49c65
--- /dev/null
+++ b/spec/frontend/sidebar/components/assignees/sidebar_assignees_widget_spec.js
@@ -0,0 +1,558 @@
+import { GlSearchBoxByType, GlDropdown } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { cloneDeep } from 'lodash';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import searchUsersQuery from '~/graphql_shared/queries/users_search.query.graphql';
+import { IssuableType } from '~/issue_show/constants';
+import SidebarAssigneesRealtime from '~/sidebar/components/assignees/assignees_realtime.vue';
+import IssuableAssignees from '~/sidebar/components/assignees/issuable_assignees.vue';
+import SidebarAssigneesWidget from '~/sidebar/components/assignees/sidebar_assignees_widget.vue';
+import SidebarInviteMembers from '~/sidebar/components/assignees/sidebar_invite_members.vue';
+import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
+import { ASSIGNEES_DEBOUNCE_DELAY } from '~/sidebar/constants';
+import MultiSelectDropdown from '~/vue_shared/components/sidebar/multiselect_dropdown.vue';
+import getIssueParticipantsQuery from '~/vue_shared/components/sidebar/queries/get_issue_participants.query.graphql';
+import updateIssueAssigneesMutation from '~/vue_shared/components/sidebar/queries/update_issue_assignees.mutation.graphql';
+import {
+ issuableQueryResponse,
+ searchQueryResponse,
+ updateIssueAssigneesMutationResponse,
+} from '../../mock_data';
+
+jest.mock('~/flash');
+
+const updateIssueAssigneesMutationSuccess = jest
+ .fn()
+ .mockResolvedValue(updateIssueAssigneesMutationResponse);
+const mockError = jest.fn().mockRejectedValue('Error!');
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+const initialAssignees = [
+ {
+ id: 'some-user',
+ avatarUrl: 'some-user-avatar',
+ name: 'test',
+ username: 'test',
+ webUrl: '/test',
+ },
+];
+
+describe('Sidebar assignees widget', () => {
+ let wrapper;
+ let fakeApollo;
+
+ const findAssignees = () => wrapper.findComponent(IssuableAssignees);
+ const findRealtimeAssignees = () => wrapper.findComponent(SidebarAssigneesRealtime);
+ const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
+ const findDropdown = () => wrapper.findComponent(MultiSelectDropdown);
+ const findInviteMembersLink = () => wrapper.findComponent(SidebarInviteMembers);
+ const findSearchField = () => wrapper.findComponent(GlSearchBoxByType);
+
+ const findParticipantsLoading = () => wrapper.find('[data-testid="loading-participants"]');
+ const findSelectedParticipants = () => wrapper.findAll('[data-testid="selected-participant"]');
+ const findUnselectedParticipants = () =>
+ wrapper.findAll('[data-testid="unselected-participant"]');
+ const findCurrentUser = () => wrapper.findAll('[data-testid="current-user"]');
+ const findUnassignLink = () => wrapper.find('[data-testid="unassign"]');
+ const findEmptySearchResults = () => wrapper.find('[data-testid="empty-results"]');
+
+ const expandDropdown = () => wrapper.vm.$refs.toggle.expand();
+
+ const createComponent = ({
+ search = '',
+ issuableQueryHandler = jest.fn().mockResolvedValue(issuableQueryResponse),
+ searchQueryHandler = jest.fn().mockResolvedValue(searchQueryResponse),
+ updateIssueAssigneesMutationHandler = updateIssueAssigneesMutationSuccess,
+ props = {},
+ provide = {},
+ } = {}) => {
+ fakeApollo = createMockApollo([
+ [getIssueParticipantsQuery, issuableQueryHandler],
+ [searchUsersQuery, searchQueryHandler],
+ [updateIssueAssigneesMutation, updateIssueAssigneesMutationHandler],
+ ]);
+ wrapper = shallowMount(SidebarAssigneesWidget, {
+ localVue,
+ apolloProvider: fakeApollo,
+ propsData: {
+ iid: '1',
+ fullPath: '/mygroup/myProject',
+ ...props,
+ },
+ data() {
+ return {
+ search,
+ selected: [],
+ };
+ },
+ provide: {
+ canUpdate: true,
+ rootPath: '/',
+ ...provide,
+ },
+ stubs: {
+ SidebarEditableItem,
+ MultiSelectDropdown,
+ GlSearchBoxByType,
+ GlDropdown,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ gon.current_username = 'root';
+ gon.current_user_fullname = 'Administrator';
+ gon.current_user_avatar_url = '/root';
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ wrapper = null;
+ fakeApollo = null;
+ delete gon.current_username;
+ });
+
+ describe('with passed initial assignees', () => {
+ it('passes `initialLoading` as false to editable item', () => {
+ createComponent({
+ props: {
+ initialAssignees,
+ },
+ });
+
+ expect(findEditableItem().props('initialLoading')).toBe(false);
+ });
+
+ it('renders an initial assignees list with initialAssignees prop', () => {
+ createComponent({
+ props: {
+ initialAssignees,
+ },
+ });
+
+ expect(findAssignees().props('users')).toEqual(initialAssignees);
+ });
+
+ it('renders a collapsible item title calculated with initial assignees length', () => {
+ createComponent({
+ props: {
+ initialAssignees,
+ },
+ });
+
+ expect(findEditableItem().props('title')).toBe('Assignee');
+ });
+
+ describe('when expanded', () => {
+ it('renders a loading spinner if participants are loading', () => {
+ createComponent({
+ props: {
+ initialAssignees,
+ },
+ });
+ expandDropdown();
+
+ expect(findParticipantsLoading().exists()).toBe(true);
+ });
+ });
+ });
+
+ describe('without passed initial assignees', () => {
+ it('passes `initialLoading` as true to editable item', () => {
+ createComponent();
+
+ expect(findEditableItem().props('initialLoading')).toBe(true);
+ });
+
+ it('renders assignees list from API response when resolved', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findAssignees().props('users')).toEqual(
+ issuableQueryResponse.data.workspace.issuable.assignees.nodes,
+ );
+ });
+
+ it('renders an error when issuable query is rejected', async () => {
+ createComponent({
+ issuableQueryHandler: mockError,
+ });
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'An error occurred while fetching participants.',
+ });
+ });
+
+ it('assigns current user when clicking `Assign self`', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ findAssignees().vm.$emit('assign-self');
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: 'root',
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+
+ await waitForPromises();
+
+ expect(
+ findAssignees()
+ .props('users')
+ .some((user) => user.username === 'root'),
+ ).toBe(true);
+ });
+
+ it('emits an event with assignees list on successful mutation', async () => {
+ createComponent();
+
+ await waitForPromises();
+
+ findAssignees().vm.$emit('assign-self');
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: 'root',
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('assignees-updated')).toEqual([
+ [
+ [
+ {
+ __typename: 'User',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ ],
+ ],
+ ]);
+ });
+
+ it('renders current user if they are not in participants or assignees', async () => {
+ gon.current_username = 'random';
+ gon.current_user_fullname = 'Mr Random';
+ gon.current_user_avatar_url = '/random';
+
+ createComponent();
+ await waitForPromises();
+ expandDropdown();
+
+ expect(findCurrentUser().exists()).toBe(true);
+ });
+
+ describe('when expanded', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ expandDropdown();
+ });
+
+ it('collapses the widget on multiselect dropdown toggle event', async () => {
+ findDropdown().vm.$emit('toggle');
+ await nextTick();
+ expect(findDropdown().isVisible()).toBe(false);
+ });
+
+ it('renders participants list with correct amount of selected and unselected', async () => {
+ expect(findSelectedParticipants()).toHaveLength(1);
+ expect(findUnselectedParticipants()).toHaveLength(2);
+ });
+
+ it('does not render current user if they are in participants', () => {
+ expect(findCurrentUser().exists()).toBe(false);
+ });
+
+ it('unassigns all participants when clicking on `Unassign`', () => {
+ findUnassignLink().vm.$emit('click');
+ findEditableItem().vm.$emit('close');
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: [],
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+ });
+ });
+
+ describe('when multiselect is disabled', () => {
+ beforeEach(async () => {
+ createComponent({ props: { multipleAssignees: false } });
+ await waitForPromises();
+ expandDropdown();
+ });
+
+ it('adds a single assignee when clicking on unselected user', async () => {
+ findUnselectedParticipants().at(0).vm.$emit('click');
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: ['root'],
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+ });
+
+ it('removes an assignee when clicking on selected user', () => {
+ findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: [],
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+ });
+ });
+
+ describe('when multiselect is enabled', () => {
+ beforeEach(async () => {
+ createComponent({ props: { multipleAssignees: true } });
+ await waitForPromises();
+ expandDropdown();
+ });
+
+ it('adds a few assignees after clicking on unselected users and closing a dropdown', () => {
+ findUnselectedParticipants().at(0).vm.$emit('click');
+ findUnselectedParticipants().at(1).vm.$emit('click');
+ findEditableItem().vm.$emit('close');
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: ['francina.skiles', 'root', 'johndoe'],
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+ });
+
+ it('removes an assignee when clicking on selected user and then closing dropdown', () => {
+ findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+
+ findEditableItem().vm.$emit('close');
+
+ expect(updateIssueAssigneesMutationSuccess).toHaveBeenCalledWith({
+ assigneeUsernames: [],
+ fullPath: '/mygroup/myProject',
+ iid: '1',
+ });
+ });
+
+ it('does not call a mutation when clicking on participants until dropdown is closed', () => {
+ findUnselectedParticipants().at(0).vm.$emit('click');
+ findSelectedParticipants().at(0).vm.$emit('click', new Event('click'));
+
+ expect(updateIssueAssigneesMutationSuccess).not.toHaveBeenCalled();
+ });
+ });
+
+ it('shows an error if update assignees mutation is rejected', async () => {
+ createComponent({ updateIssueAssigneesMutationHandler: mockError });
+ await waitForPromises();
+ expandDropdown();
+
+ findUnassignLink().vm.$emit('click');
+ findEditableItem().vm.$emit('close');
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'An error occurred while updating assignees.',
+ });
+ });
+
+ describe('when searching', () => {
+ it('does not show loading spinner when debounce timer is still running', async () => {
+ createComponent({ search: 'roo' });
+ await waitForPromises();
+ expandDropdown();
+
+ expect(findParticipantsLoading().exists()).toBe(false);
+ });
+
+ it('shows loading spinner when searching for users', async () => {
+ createComponent({ search: 'roo' });
+ await waitForPromises();
+ expandDropdown();
+ jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ await nextTick();
+
+ expect(findParticipantsLoading().exists()).toBe(true);
+ });
+
+ it('renders a list of found users and external participants matching search term', async () => {
+ const responseCopy = cloneDeep(issuableQueryResponse);
+ responseCopy.data.workspace.issuable.participants.nodes.push({
+ id: 'gid://gitlab/User/5',
+ avatarUrl: '/someavatar',
+ name: 'Roodie',
+ username: 'roodie',
+ webUrl: '/roodie',
+ status: null,
+ });
+
+ const issuableQueryHandler = jest.fn().mockResolvedValue(responseCopy);
+
+ createComponent({ issuableQueryHandler });
+ await waitForPromises();
+ expandDropdown();
+
+ findSearchField().vm.$emit('input', 'roo');
+ await nextTick();
+
+ jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ await nextTick();
+ await waitForPromises();
+
+ expect(findUnselectedParticipants()).toHaveLength(3);
+ });
+
+ it('renders a list of found users only if no external participants match search term', async () => {
+ createComponent({ search: 'roo' });
+ await waitForPromises();
+ expandDropdown();
+ jest.advanceTimersByTime(250);
+ await nextTick();
+ await waitForPromises();
+
+ expect(findUnselectedParticipants()).toHaveLength(2);
+ });
+
+ it('shows a message about no matches if search returned an empty list', async () => {
+ const responseCopy = cloneDeep(searchQueryResponse);
+ responseCopy.data.workspace.users.nodes = [];
+
+ createComponent({
+ search: 'roo',
+ searchQueryHandler: jest.fn().mockResolvedValue(responseCopy),
+ });
+ await waitForPromises();
+ expandDropdown();
+ jest.advanceTimersByTime(ASSIGNEES_DEBOUNCE_DELAY);
+ await nextTick();
+ await waitForPromises();
+
+ expect(findUnselectedParticipants()).toHaveLength(0);
+ expect(findEmptySearchResults().exists()).toBe(true);
+ });
+
+ it('shows an error if search query was rejected', async () => {
+ createComponent({ search: 'roo', searchQueryHandler: mockError });
+ await waitForPromises();
+ expandDropdown();
+ jest.advanceTimersByTime(250);
+ await nextTick();
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'An error occurred while searching users.',
+ });
+ });
+ });
+ });
+
+ describe('when user is not signed in', () => {
+ beforeEach(() => {
+ gon.current_username = undefined;
+ createComponent();
+ });
+
+ it('does not show current user in the dropdown', () => {
+ expandDropdown();
+ expect(findCurrentUser().exists()).toBe(false);
+ });
+
+ it('passes signedIn prop as false to IssuableAssignees', () => {
+ expect(findAssignees().props('signedIn')).toBe(false);
+ });
+ });
+
+ it('when realtime feature flag is disabled', async () => {
+ createComponent();
+ await waitForPromises();
+ expect(findRealtimeAssignees().exists()).toBe(false);
+ });
+
+ it('when realtime feature flag is enabled', async () => {
+ createComponent({
+ provide: {
+ glFeatures: {
+ realTimeIssueSidebar: true,
+ },
+ },
+ });
+ await waitForPromises();
+ expect(findRealtimeAssignees().exists()).toBe(true);
+ });
+
+ describe('when making changes to participants list', () => {
+ beforeEach(async () => {
+ createComponent();
+ });
+
+ it('passes falsy `isDirty` prop to editable item if no changes to selected users were made', () => {
+ expandDropdown();
+ expect(findEditableItem().props('isDirty')).toBe(false);
+ });
+
+ it('passes truthy `isDirty` prop if selected users list was changed', async () => {
+ expandDropdown();
+ expect(findEditableItem().props('isDirty')).toBe(false);
+ findUnselectedParticipants().at(0).vm.$emit('click');
+ await nextTick();
+ expect(findEditableItem().props('isDirty')).toBe(true);
+ });
+
+ it('passes falsy `isDirty` prop after dropdown is closed', async () => {
+ expandDropdown();
+ findUnselectedParticipants().at(0).vm.$emit('click');
+ findEditableItem().vm.$emit('close');
+ await waitForPromises();
+ expect(findEditableItem().props('isDirty')).toBe(false);
+ });
+ });
+
+ it('does not render invite members link on non-issue sidebar', async () => {
+ createComponent({ props: { issuableType: IssuableType.MergeRequest } });
+ await waitForPromises();
+ expect(findInviteMembersLink().exists()).toBe(false);
+ });
+
+ it('does not render invite members link if `directlyInviteMembers` and `indirectlyInviteMembers` were not passed', async () => {
+ createComponent();
+ await waitForPromises();
+ expect(findInviteMembersLink().exists()).toBe(false);
+ });
+
+ it('renders invite members link if `directlyInviteMembers` is true', async () => {
+ createComponent({
+ provide: {
+ directlyInviteMembers: true,
+ },
+ });
+ await waitForPromises();
+ expect(findInviteMembersLink().exists()).toBe(true);
+ });
+
+ it('renders invite members link if `indirectlyInviteMembers` is true', async () => {
+ createComponent({
+ provide: {
+ indirectlyInviteMembers: true,
+ },
+ });
+ await waitForPromises();
+ expect(findInviteMembersLink().exists()).toBe(true);
+ });
+});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
index 4ee12838491..84b192aaf41 100644
--- a/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
+++ b/spec/frontend/sidebar/components/assignees/sidebar_editable_item_spec.js
@@ -5,7 +5,7 @@ import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue'
describe('boards sidebar remove issue', () => {
let wrapper;
- const findLoader = () => wrapper.find(GlLoadingIcon);
+ const findLoader = () => wrapper.findComponent(GlLoadingIcon);
const findEditButton = () => wrapper.find('[data-testid="edit-button"]');
const findTitle = () => wrapper.find('[data-testid="title"]');
const findCollapsed = () => wrapper.find('[data-testid="collapsed-content"]');
@@ -117,4 +117,35 @@ describe('boards sidebar remove issue', () => {
expect(wrapper.emitted().close).toBeUndefined();
});
+
+ it('renders `Edit` test when passed `isDirty` prop is false', () => {
+ createComponent({ props: { isDirty: false }, canUpdate: true });
+
+ expect(findEditButton().text()).toBe('Edit');
+ });
+
+ it('renders `Apply` test when passed `isDirty` prop is true', () => {
+ createComponent({ props: { isDirty: true }, canUpdate: true });
+
+ expect(findEditButton().text()).toBe('Apply');
+ });
+
+ describe('when initial loading is true', () => {
+ beforeEach(() => {
+ createComponent({ props: { initialLoading: true } });
+ });
+
+ it('renders loading icon', () => {
+ expect(findLoader().exists()).toBe(true);
+ });
+
+ it('does not render edit button', () => {
+ expect(findEditButton().exists()).toBe(false);
+ });
+
+ it('does not render collapsed and expanded content', () => {
+ expect(findCollapsed().exists()).toBe(false);
+ expect(findExpanded().exists()).toBe(false);
+ });
+ });
});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
new file mode 100644
index 00000000000..06f7da3d1ab
--- /dev/null
+++ b/spec/frontend/sidebar/components/assignees/sidebar_invite_members_spec.js
@@ -0,0 +1,59 @@
+import { shallowMount } from '@vue/test-utils';
+import InviteMemberModal from '~/invite_member/components/invite_member_modal.vue';
+import InviteMemberTrigger from '~/invite_member/components/invite_member_trigger.vue';
+import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
+import SidebarInviteMembers from '~/sidebar/components/assignees/sidebar_invite_members.vue';
+
+const testProjectMembersPath = 'test-path';
+
+describe('Sidebar invite members component', () => {
+ let wrapper;
+
+ const findDirectInviteLink = () => wrapper.findComponent(InviteMembersTrigger);
+ const findIndirectInviteLink = () => wrapper.findComponent(InviteMemberTrigger);
+ const findInviteModal = () => wrapper.findComponent(InviteMemberModal);
+
+ const createComponent = ({ directlyInviteMembers = false } = {}) => {
+ wrapper = shallowMount(SidebarInviteMembers, {
+ provide: {
+ directlyInviteMembers,
+ projectMembersPath: testProjectMembersPath,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when directly inviting members', () => {
+ beforeEach(() => {
+ createComponent({ directlyInviteMembers: true });
+ });
+
+ it('renders a direct link to project members path', () => {
+ expect(findDirectInviteLink().exists()).toBe(true);
+ });
+
+ it('does not render invite members trigger and modal components', () => {
+ expect(findIndirectInviteLink().exists()).toBe(false);
+ expect(findInviteModal().exists()).toBe(false);
+ });
+ });
+
+ describe('when indirectly inviting members', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('does not render a direct link to project members path', () => {
+ expect(findDirectInviteLink().exists()).toBe(false);
+ });
+
+ it('does not render invite members trigger and modal components', () => {
+ expect(findIndirectInviteLink().exists()).toBe(true);
+ expect(findInviteModal().exists()).toBe(true);
+ expect(findInviteModal().props('membersPath')).toBe(testProjectMembersPath);
+ });
+ });
+});
diff --git a/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
new file mode 100644
index 00000000000..88a5f4ea8b7
--- /dev/null
+++ b/spec/frontend/sidebar/components/assignees/sidebar_participant_spec.js
@@ -0,0 +1,43 @@
+import { GlAvatarLabeled } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import SidebarParticipant from '~/sidebar/components/assignees/sidebar_participant.vue';
+
+const user = {
+ name: 'John Doe',
+ username: 'johndoe',
+ webUrl: '/link',
+ avatarUrl: '/avatar',
+};
+
+describe('Sidebar participant component', () => {
+ let wrapper;
+
+ const findAvatar = () => wrapper.findComponent(GlAvatarLabeled);
+
+ const createComponent = (status = null) => {
+ wrapper = shallowMount(SidebarParticipant, {
+ propsData: {
+ user: {
+ ...user,
+ status,
+ },
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('when user is not busy', () => {
+ createComponent();
+
+ expect(findAvatar().props('label')).toBe(user.name);
+ });
+
+ it('when user is busy', () => {
+ createComponent({ availability: 'BUSY' });
+
+ expect(findAvatar().props('label')).toBe(`${user.name} (Busy)`);
+ });
+});
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
index d5e6310ed38..28a19fb9df6 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_form_spec.js
@@ -20,11 +20,9 @@ describe('Sidebar Confidentiality Form', () => {
mutate = jest.fn().mockResolvedValue('Success'),
} = {}) => {
wrapper = shallowMount(SidebarConfidentialityForm, {
- provide: {
+ propsData: {
fullPath: 'group/project',
iid: '1',
- },
- propsData: {
confidential: false,
issuableType: 'issue',
...props,
diff --git a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
index 20a5be9b518..707215d0739 100644
--- a/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
+++ b/spec/frontend/sidebar/components/confidential/sidebar_confidentiality_widget_spec.js
@@ -35,11 +35,11 @@ describe('Sidebar Confidentiality Widget', () => {
localVue,
apolloProvider: fakeApollo,
provide: {
- fullPath: 'group/project',
- iid: '1',
canUpdate: true,
},
propsData: {
+ fullPath: 'group/project',
+ iid: '1',
issuableType: 'issue',
},
stubs: {
diff --git a/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js b/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
index 704847f65bf..699b2bbd0b1 100644
--- a/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
+++ b/spec/frontend/sidebar/components/copy_email_to_clipboard_spec.js
@@ -1,22 +1,17 @@
-import { getByText } from '@testing-library/dom';
-import { mount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import CopyEmailToClipboard from '~/sidebar/components/copy_email_to_clipboard.vue';
-import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue';
describe('CopyEmailToClipboard component', () => {
- const sampleEmail = 'sample+email@test.com';
+ const mockIssueEmailAddress = 'sample+email@test.com';
- const wrapper = mount(CopyEmailToClipboard, {
+ const wrapper = shallowMount(CopyEmailToClipboard, {
propsData: {
- copyText: sampleEmail,
+ issueEmailAddress: mockIssueEmailAddress,
},
});
- it('renders the Issue email text with the forwardable email', () => {
- expect(getByText(wrapper.element, `Issue email: ${sampleEmail}`)).not.toBeNull();
- });
-
- it('finds ClipboardButton with the correct props', () => {
- expect(wrapper.find(ClipboardButton).props('text')).toBe(sampleEmail);
+ it('sets CopyableField `value` prop to issueEmailAddress', () => {
+ expect(wrapper.find(CopyableField).props('value')).toBe(mockIssueEmailAddress);
});
});
diff --git a/spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js b/spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js
new file mode 100644
index 00000000000..f58ceb0f1be
--- /dev/null
+++ b/spec/frontend/sidebar/components/due_date/sidebar_due_date_widget_spec.js
@@ -0,0 +1,106 @@
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import SidebarDueDateWidget from '~/sidebar/components/due_date/sidebar_due_date_widget.vue';
+import SidebarEditableItem from '~/sidebar/components/sidebar_editable_item.vue';
+import issueDueDateQuery from '~/sidebar/queries/issue_due_date.query.graphql';
+import { issueDueDateResponse } from '../../mock_data';
+
+jest.mock('~/flash');
+
+Vue.use(VueApollo);
+
+describe('Sidebar Due date Widget', () => {
+ let wrapper;
+ let fakeApollo;
+ const date = '2021-04-15';
+
+ const findEditableItem = () => wrapper.findComponent(SidebarEditableItem);
+ const findFormattedDueDate = () => wrapper.find("[data-testid='sidebar-duedate-value']");
+
+ const createComponent = ({
+ dueDateQueryHandler = jest.fn().mockResolvedValue(issueDueDateResponse()),
+ } = {}) => {
+ fakeApollo = createMockApollo([[issueDueDateQuery, dueDateQueryHandler]]);
+
+ wrapper = shallowMount(SidebarDueDateWidget, {
+ apolloProvider: fakeApollo,
+ provide: {
+ fullPath: 'group/project',
+ iid: '1',
+ canUpdate: true,
+ },
+ propsData: {
+ issuableType: 'issue',
+ },
+ stubs: {
+ SidebarEditableItem,
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ fakeApollo = null;
+ });
+
+ it('passes a `loading` prop as true to editable item when query is loading', () => {
+ createComponent();
+
+ expect(findEditableItem().props('loading')).toBe(true);
+ });
+
+ describe('when issue has no due date', () => {
+ beforeEach(async () => {
+ createComponent({
+ dueDateQueryHandler: jest.fn().mockResolvedValue(issueDueDateResponse(null)),
+ });
+ await waitForPromises();
+ });
+
+ it('passes a `loading` prop as false to editable item', () => {
+ expect(findEditableItem().props('loading')).toBe(false);
+ });
+
+ it('dueDate is null by default', () => {
+ expect(findFormattedDueDate().text()).toBe('None');
+ });
+
+ it('emits `dueDateUpdated` event with a `null` payload', () => {
+ expect(wrapper.emitted('dueDateUpdated')).toEqual([[null]]);
+ });
+ });
+
+ describe('when issue has due date', () => {
+ beforeEach(async () => {
+ createComponent({
+ dueDateQueryHandler: jest.fn().mockResolvedValue(issueDueDateResponse(date)),
+ });
+ await waitForPromises();
+ });
+
+ it('passes a `loading` prop as false to editable item', () => {
+ expect(findEditableItem().props('loading')).toBe(false);
+ });
+
+ it('has dueDate', () => {
+ expect(findFormattedDueDate().text()).toBe('Apr 15, 2021');
+ });
+
+ it('emits `dueDateUpdated` event with the date payload', () => {
+ expect(wrapper.emitted('dueDateUpdated')).toEqual([[date]]);
+ });
+ });
+
+ it('displays a flash message when query is rejected', async () => {
+ createComponent({
+ dueDateQueryHandler: jest.fn().mockRejectedValue('Houston, we have a problem'),
+ });
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js b/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js
index 1dbb7702a15..cc428693930 100644
--- a/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js
+++ b/spec/frontend/sidebar/components/reference/sidebar_reference_widget_spec.js
@@ -1,4 +1,3 @@
-import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import VueApollo from 'vue-apollo';
@@ -8,18 +7,21 @@ import { IssuableType } from '~/issue_show/constants';
import SidebarReferenceWidget from '~/sidebar/components/reference/sidebar_reference_widget.vue';
import issueReferenceQuery from '~/sidebar/queries/issue_reference.query.graphql';
import mergeRequestReferenceQuery from '~/sidebar/queries/merge_request_reference.query.graphql';
-import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue';
import { issueReferenceResponse } from '../../mock_data';
describe('Sidebar Reference Widget', () => {
let wrapper;
let fakeApollo;
- const referenceText = 'reference';
+
+ const mockReferenceValue = 'reference-1234';
+
+ const findCopyableField = () => wrapper.findComponent(CopyableField);
const createComponent = ({
- issuableType,
+ issuableType = IssuableType.Issue,
referenceQuery = issueReferenceQuery,
- referenceQueryHandler = jest.fn().mockResolvedValue(issueReferenceResponse(referenceText)),
+ referenceQueryHandler = jest.fn().mockResolvedValue(issueReferenceResponse(mockReferenceValue)),
} = {}) => {
Vue.use(VueApollo);
@@ -39,14 +41,20 @@ describe('Sidebar Reference Widget', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
+ });
+
+ describe('when reference is loading', () => {
+ it('sets CopyableField `is-loading` prop to `true`', () => {
+ createComponent({ referenceQueryHandler: jest.fn().mockReturnValue(new Promise(() => {})) });
+ expect(findCopyableField().props('isLoading')).toBe(true);
+ });
});
describe.each([
[IssuableType.Issue, issueReferenceQuery],
[IssuableType.MergeRequest, mergeRequestReferenceQuery],
])('when issuableType is %s', (issuableType, referenceQuery) => {
- it('displays the reference text', async () => {
+ it('sets CopyableField `value` prop to reference value', async () => {
createComponent({
issuableType,
referenceQuery,
@@ -54,40 +62,32 @@ describe('Sidebar Reference Widget', () => {
await waitForPromises();
- expect(wrapper.text()).toContain(referenceText);
+ expect(findCopyableField().props('value')).toBe(mockReferenceValue);
});
- it('displays loading icon while fetching and hides clipboard icon', async () => {
- createComponent({
- issuableType,
- referenceQuery,
- });
+ describe('when error occurs', () => {
+ it('calls createFlash with correct parameters', async () => {
+ const mockError = new Error('mayday');
- expect(wrapper.find(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.find(ClipboardButton).exists()).toBe(false);
- });
+ createComponent({
+ issuableType,
+ referenceQuery,
+ referenceQueryHandler: jest.fn().mockRejectedValue(mockError),
+ });
- it('calls createFlash with correct parameters', async () => {
- const mockError = new Error('mayday');
+ await waitForPromises();
- createComponent({
- issuableType,
- referenceQuery,
- referenceQueryHandler: jest.fn().mockRejectedValue(mockError),
+ const [
+ [
+ {
+ message,
+ error: { networkError },
+ },
+ ],
+ ] = wrapper.emitted('fetch-error');
+ expect(message).toBe('An error occurred while fetching reference');
+ expect(networkError).toEqual(mockError);
});
-
- await waitForPromises();
-
- const [
- [
- {
- message,
- error: { networkError },
- },
- ],
- ] = wrapper.emitted('fetch-error');
- expect(message).toBe('An error occurred while fetching reference');
- expect(networkError).toEqual(mockError);
});
});
});
diff --git a/spec/frontend/sidebar/issuable_assignees_spec.js b/spec/frontend/sidebar/issuable_assignees_spec.js
index af4dc315aad..3563d478f3f 100644
--- a/spec/frontend/sidebar/issuable_assignees_spec.js
+++ b/spec/frontend/sidebar/issuable_assignees_spec.js
@@ -5,12 +5,15 @@ import UncollapsedAssigneeList from '~/sidebar/components/assignees/uncollapsed_
describe('IssuableAssignees', () => {
let wrapper;
- const createComponent = (props = { users: [] }) => {
+ const createComponent = (props = {}) => {
wrapper = shallowMount(IssuableAssignees, {
provide: {
rootPath: '',
},
- propsData: { ...props },
+ propsData: {
+ users: [],
+ ...props,
+ },
});
};
const findUncollapsedAssigneeList = () => wrapper.find(UncollapsedAssigneeList);
@@ -22,12 +25,14 @@ describe('IssuableAssignees', () => {
});
describe('when no assignees are present', () => {
- beforeEach(() => {
- createComponent();
+ it('renders "None - assign yourself" when user is logged in', () => {
+ createComponent({ signedIn: true });
+ expect(findEmptyAssignee().text()).toBe('None - assign yourself');
});
- it('renders "None - assign yourself"', () => {
- expect(findEmptyAssignee().text()).toBe('None - assign yourself');
+ it('renders "None" when user is not logged in', () => {
+ createComponent();
+ expect(findEmptyAssignee().text()).toBe('None');
});
});
@@ -41,7 +46,7 @@ describe('IssuableAssignees', () => {
describe('when clicking "assign yourself"', () => {
it('emits "assign-self"', () => {
- createComponent();
+ createComponent({ signedIn: true });
wrapper.find('[data-testid="assign-yourself"]').vm.$emit('click');
expect(wrapper.emitted('assign-self')).toHaveLength(1);
});
diff --git a/spec/frontend/sidebar/mock_data.js b/spec/frontend/sidebar/mock_data.js
index e751f1239c8..2a4858a6320 100644
--- a/spec/frontend/sidebar/mock_data.js
+++ b/spec/frontend/sidebar/mock_data.js
@@ -233,6 +233,19 @@ export const issueConfidentialityResponse = (confidential = false) => ({
},
});
+export const issueDueDateResponse = (dueDate = null) => ({
+ data: {
+ workspace: {
+ __typename: 'Project',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/4',
+ dueDate,
+ },
+ },
+ },
+});
+
export const issueReferenceResponse = (reference) => ({
data: {
workspace: {
@@ -245,4 +258,147 @@ export const issueReferenceResponse = (reference) => ({
},
},
});
+
+export const issuableQueryResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ issuable: {
+ __typename: 'Issue',
+ id: 'gid://gitlab/Issue/1',
+ iid: '1',
+ participants: {
+ nodes: [
+ {
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ {
+ id: 'gid://gitlab/User/2',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
+ name: 'Jacki Kub',
+ username: 'francina.skiles',
+ webUrl: '/franc',
+ status: {
+ availability: 'BUSY',
+ },
+ },
+ {
+ id: 'gid://gitlab/User/3',
+ avatarUrl: '/avatar',
+ name: 'John Doe',
+ username: 'johndoe',
+ webUrl: '/john',
+ status: null,
+ },
+ ],
+ },
+ assignees: {
+ nodes: [
+ {
+ id: 'gid://gitlab/User/2',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
+ name: 'Jacki Kub',
+ username: 'francina.skiles',
+ webUrl: '/franc',
+ status: null,
+ },
+ ],
+ },
+ },
+ },
+ },
+};
+
+export const searchQueryResponse = {
+ data: {
+ workspace: {
+ __typename: 'Project',
+ users: {
+ nodes: [
+ {
+ user: {
+ id: '1',
+ avatarUrl: '/avatar',
+ name: 'root',
+ username: 'root',
+ webUrl: 'root',
+ status: null,
+ },
+ },
+ {
+ user: {
+ id: '2',
+ avatarUrl: '/avatar2',
+ name: 'rookie',
+ username: 'rookie',
+ webUrl: 'rookie',
+ status: null,
+ },
+ },
+ ],
+ },
+ },
+ },
+};
+
+export const updateIssueAssigneesMutationResponse = {
+ data: {
+ issuableSetAssignees: {
+ issuable: {
+ id: 'gid://gitlab/Issue/1',
+ iid: '1',
+ assignees: {
+ nodes: [
+ {
+ __typename: 'User',
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ ],
+ __typename: 'UserConnection',
+ },
+ participants: {
+ nodes: [
+ {
+ __typename: 'User',
+ id: 'gid://gitlab/User/1',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80\u0026d=identicon',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: '/root',
+ status: null,
+ },
+ {
+ __typename: 'User',
+ id: 'gid://gitlab/User/2',
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/a95e5b71488f4b9d69ce5ff58bfd28d6?s=80\u0026d=identicon',
+ name: 'Jacki Kub',
+ username: 'francina.skiles',
+ webUrl: '/franc',
+ status: null,
+ },
+ ],
+ __typename: 'UserConnection',
+ },
+ __typename: 'Issue',
+ },
+ },
+ },
+};
+
export default mockData;
diff --git a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
index cef5f8cc528..22e206bb483 100644
--- a/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
+++ b/spec/frontend/snippets/components/__snapshots__/snippet_description_edit_spec.js.snap
@@ -25,9 +25,11 @@ exports[`Snippet Description Edit component rendering matches the snapshot 1`] =
<div
class="js-vue-markdown-field md-area position-relative gfm-form js-expanded"
+ data-uploads-path=""
>
<markdown-header-stub
linecontent=""
+ suggestionstartindex="0"
/>
<div
diff --git a/spec/frontend/snippets/components/edit_spec.js b/spec/frontend/snippets/components/edit_spec.js
index 2b6d3ca8c2a..efdb52cfcd9 100644
--- a/spec/frontend/snippets/components/edit_spec.js
+++ b/spec/frontend/snippets/components/edit_spec.js
@@ -5,10 +5,9 @@ import { nextTick } from 'vue';
import VueApollo, { ApolloMutation } from 'vue-apollo';
import { useFakeDate } from 'helpers/fake_date';
import createMockApollo from 'helpers/mock_apollo_helper';
-import { stubComponent } from 'helpers/stub_component';
import waitForPromises from 'helpers/wait_for_promises';
import GetSnippetQuery from 'shared_queries/snippet/snippet.query.graphql';
-import CaptchaModal from '~/captcha/captcha_modal.vue';
+import UnsolvedCaptchaError from '~/captcha/unsolved_captcha_error';
import { deprecatedCreateFlash as Flash } from '~/flash';
import * as urlUtils from '~/lib/utils/url_utility';
import SnippetEditApp from '~/snippets/components/edit.vue';
@@ -30,9 +29,8 @@ jest.mock('~/flash');
const TEST_UPLOADED_FILES = ['foo/bar.txt', 'alpha/beta.js'];
const TEST_API_ERROR = new Error('TEST_API_ERROR');
+const TEST_CAPTCHA_ERROR = new UnsolvedCaptchaError();
const TEST_MUTATION_ERROR = 'Test mutation error';
-const TEST_CAPTCHA_RESPONSE = 'i-got-a-captcha';
-const TEST_CAPTCHA_SITE_KEY = 'abc123';
const TEST_ACTIONS = {
NO_CONTENT: merge({}, testEntries.created.diff, { content: '' }),
NO_PATH: merge({}, testEntries.created.diff, { filePath: '' }),
@@ -59,9 +57,6 @@ const createMutationResponse = (key, obj = {}) => ({
__typename: 'Snippet',
webUrl: TEST_WEB_URL,
},
- spamLogId: null,
- needsCaptchaResponse: false,
- captchaSiteKey: null,
},
obj,
),
@@ -71,13 +66,6 @@ const createMutationResponse = (key, obj = {}) => ({
const createMutationResponseWithErrors = (key) =>
createMutationResponse(key, { errors: [TEST_MUTATION_ERROR] });
-const createMutationResponseWithRecaptcha = (key) =>
- createMutationResponse(key, {
- errors: ['ignored captcha error message'],
- needsCaptchaResponse: true,
- captchaSiteKey: TEST_CAPTCHA_SITE_KEY,
- });
-
const getApiData = ({
id,
title = '',
@@ -126,7 +114,6 @@ describe('Snippet Edit app', () => {
});
const findBlobActions = () => wrapper.find(SnippetBlobActionsEdit);
- const findCaptchaModal = () => wrapper.find(CaptchaModal);
const findSubmitButton = () => wrapper.find('[data-testid="snippet-submit-btn"]');
const findCancelButton = () => wrapper.find('[data-testid="snippet-cancel-btn"]');
const hasDisabledSubmit = () => Boolean(findSubmitButton().attributes('disabled'));
@@ -159,7 +146,6 @@ describe('Snippet Edit app', () => {
stubs: {
ApolloMutation,
FormFooterActions,
- CaptchaModal: stubComponent(CaptchaModal),
},
provide: {
selectedLevel,
@@ -209,7 +195,6 @@ describe('Snippet Edit app', () => {
});
it('should render components', () => {
- expect(wrapper.find(CaptchaModal).exists()).toBe(true);
expect(wrapper.find(TitleField).exists()).toBe(true);
expect(wrapper.find(SnippetDescriptionEdit).exists()).toBe(true);
expect(wrapper.find(SnippetVisibilityEdit).exists()).toBe(true);
@@ -338,10 +323,10 @@ describe('Snippet Edit app', () => {
},
);
- describe('with apollo network error', () => {
+ describe.each([TEST_API_ERROR, TEST_CAPTCHA_ERROR])('with apollo network error', (error) => {
beforeEach(async () => {
jest.spyOn(console, 'error').mockImplementation();
- mutateSpy.mockRejectedValue(TEST_API_ERROR);
+ mutateSpy.mockRejectedValue(error);
await createComponentAndSubmit();
});
@@ -353,7 +338,7 @@ describe('Snippet Edit app', () => {
it('should flash', () => {
// Apollo automatically wraps the resolver's error in a NetworkError
expect(Flash).toHaveBeenCalledWith(
- `Can't update snippet: Network error: ${TEST_API_ERROR.message}`,
+ `Can't update snippet: Network error: ${error.message}`,
);
});
@@ -363,54 +348,10 @@ describe('Snippet Edit app', () => {
// eslint-disable-next-line no-console
expect(console.error).toHaveBeenCalledWith(
'[gitlab] unexpected error while updating snippet',
- expect.objectContaining({ message: `Network error: ${TEST_API_ERROR.message}` }),
+ expect.objectContaining({ message: `Network error: ${error.message}` }),
);
});
});
-
- describe('when needsCaptchaResponse is true', () => {
- let modal;
-
- beforeEach(async () => {
- mutateSpy
- .mockResolvedValueOnce(createMutationResponseWithRecaptcha('updateSnippet'))
- .mockResolvedValueOnce(createMutationResponseWithErrors('updateSnippet'));
-
- await createComponentAndSubmit();
-
- modal = findCaptchaModal();
-
- mutateSpy.mockClear();
- });
-
- it('should display captcha modal', () => {
- expect(urlUtils.redirectTo).not.toHaveBeenCalled();
- expect(modal.props()).toEqual({
- needsCaptchaResponse: true,
- captchaSiteKey: TEST_CAPTCHA_SITE_KEY,
- });
- });
-
- describe.each`
- response | expectedCalls
- ${null} | ${[]}
- ${TEST_CAPTCHA_RESPONSE} | ${[['updateSnippet', { input: { ...getApiData(createSnippet()), captchaResponse: TEST_CAPTCHA_RESPONSE } }]]}
- `('when captcha response is $response', ({ response, expectedCalls }) => {
- beforeEach(async () => {
- modal.vm.$emit('receivedCaptchaResponse', response);
-
- await nextTick();
- });
-
- it('sets needsCaptchaResponse to false', () => {
- expect(modal.props('needsCaptchaResponse')).toEqual(false);
- });
-
- it(`expected to call times = ${expectedCalls.length}`, () => {
- expect(mutateSpy.mock.calls).toEqual(expectedCalls);
- });
- });
- });
});
});
diff --git a/spec/frontend/tags/components/sort_dropdown_spec.js b/spec/frontend/tags/components/sort_dropdown_spec.js
new file mode 100644
index 00000000000..b0fd98ec68e
--- /dev/null
+++ b/spec/frontend/tags/components/sort_dropdown_spec.js
@@ -0,0 +1,81 @@
+import { GlDropdownItem, GlSearchBoxByClick } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import * as urlUtils from '~/lib/utils/url_utility';
+import SortDropdown from '~/tags/components/sort_dropdown.vue';
+
+describe('Tags sort dropdown', () => {
+ let wrapper;
+
+ const createWrapper = (props = {}) => {
+ return extendedWrapper(
+ mount(SortDropdown, {
+ provide: {
+ filterTagsPath: '/root/ci-cd-project-demo/-/tags',
+ sortOptions: {
+ name_asc: 'Name',
+ updated_asc: 'Oldest updated',
+ updated_desc: 'Last updated',
+ },
+ ...props,
+ },
+ }),
+ );
+ };
+
+ const findSearchBox = () => wrapper.findComponent(GlSearchBoxByClick);
+ const findTagsDropdown = () => wrapper.findByTestId('tags-dropdown');
+
+ afterEach(() => {
+ if (wrapper) {
+ wrapper.destroy();
+ }
+ });
+
+ describe('default state', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('should have a search box with a placeholder', () => {
+ const searchBox = findSearchBox();
+
+ expect(searchBox.exists()).toBe(true);
+ expect(searchBox.find('input').attributes('placeholder')).toBe('Filter by tag name');
+ });
+
+ it('should have a sort order dropdown', () => {
+ const branchesDropdown = findTagsDropdown();
+
+ expect(branchesDropdown.exists()).toBe(true);
+ });
+ });
+
+ describe('when submitting a search term', () => {
+ beforeEach(() => {
+ urlUtils.visitUrl = jest.fn();
+
+ wrapper = createWrapper();
+ });
+
+ it('should call visitUrl', () => {
+ const searchBox = findSearchBox();
+
+ searchBox.vm.$emit('submit');
+
+ expect(urlUtils.visitUrl).toHaveBeenCalledWith(
+ '/root/ci-cd-project-demo/-/tags?sort=updated_desc',
+ );
+ });
+
+ it('should send a sort parameter', () => {
+ const sortDropdownItems = findTagsDropdown().findAllComponents(GlDropdownItem).at(0);
+
+ sortDropdownItems.vm.$emit('click');
+
+ expect(urlUtils.visitUrl).toHaveBeenCalledWith(
+ '/root/ci-cd-project-demo/-/tags?sort=name_asc',
+ );
+ });
+ });
+});
diff --git a/spec/frontend/tracking_spec.js b/spec/frontend/tracking_spec.js
index 6a22de3be5c..2c7bcaa98b0 100644
--- a/spec/frontend/tracking_spec.js
+++ b/spec/frontend/tracking_spec.js
@@ -176,25 +176,29 @@ describe('Tracking', () => {
});
});
- describe('tracking interface events', () => {
+ describe.each`
+ term
+ ${'event'}
+ ${'action'}
+ `('tracking interface events with data-track-$term', ({ term }) => {
let eventSpy;
beforeEach(() => {
eventSpy = jest.spyOn(Tracking, 'event');
Tracking.bindDocument('_category_'); // only happens once
setHTMLFixture(`
- <input data-track-event="click_input1" data-track-label="_label_" value="_value_"/>
- <input data-track-event="click_input2" data-track-value="_value_override_" value="_value_"/>
- <input type="checkbox" data-track-event="toggle_checkbox" value="_value_" checked/>
- <input class="dropdown" data-track-event="toggle_dropdown"/>
- <div data-track-event="nested_event"><span class="nested"></span></div>
- <input data-track-eventbogus="click_bogusinput" data-track-label="_label_" value="_value_"/>
- <input data-track-event="click_input3" data-track-experiment="example" value="_value_"/>
+ <input data-track-${term}="click_input1" data-track-label="_label_" value="_value_"/>
+ <input data-track-${term}="click_input2" data-track-value="_value_override_" value="_value_"/>
+ <input type="checkbox" data-track-${term}="toggle_checkbox" value="_value_" checked/>
+ <input class="dropdown" data-track-${term}="toggle_dropdown"/>
+ <div data-track-${term}="nested_event"><span class="nested"></span></div>
+ <input data-track-bogus="click_bogusinput" data-track-label="_label_" value="_value_"/>
+ <input data-track-${term}="click_input3" data-track-experiment="example" value="_value_"/>
`);
});
- it('binds to clicks on elements matching [data-track-event]', () => {
- document.querySelector('[data-track-event="click_input1"]').click();
+ it(`binds to clicks on elements matching [data-track-${term}]`, () => {
+ document.querySelector(`[data-track-${term}="click_input1"]`).click();
expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input1', {
label: '_label_',
@@ -202,14 +206,14 @@ describe('Tracking', () => {
});
});
- it('does not bind to clicks on elements without [data-track-event]', () => {
- document.querySelector('[data-track-eventbogus="click_bogusinput"]').click();
+ it(`does not bind to clicks on elements without [data-track-${term}]`, () => {
+ document.querySelector('[data-track-bogus="click_bogusinput"]').click();
expect(eventSpy).not.toHaveBeenCalled();
});
it('allows value override with the data-track-value attribute', () => {
- document.querySelector('[data-track-event="click_input2"]').click();
+ document.querySelector(`[data-track-${term}="click_input2"]`).click();
expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input2', {
value: '_value_override_',
@@ -217,7 +221,7 @@ describe('Tracking', () => {
});
it('handles checkbox values correctly', () => {
- const checkbox = document.querySelector('[data-track-event="toggle_checkbox"]');
+ const checkbox = document.querySelector(`[data-track-${term}="toggle_checkbox"]`);
checkbox.click(); // unchecking
@@ -233,7 +237,7 @@ describe('Tracking', () => {
});
it('handles bootstrap dropdowns', () => {
- const dropdown = document.querySelector('[data-track-event="toggle_dropdown"]');
+ const dropdown = document.querySelector(`[data-track-${term}="toggle_dropdown"]`);
dropdown.dispatchEvent(new Event('show.bs.dropdown', { bubbles: true }));
@@ -250,7 +254,7 @@ describe('Tracking', () => {
expect(eventSpy).toHaveBeenCalledWith('_category_', 'nested_event', {});
});
- it('brings in experiment data if linked to an experiment', () => {
+ it('includes experiment data if linked to an experiment', () => {
const mockExperimentData = {
variant: 'candidate',
experiment: 'repo_integrations_link',
@@ -258,7 +262,7 @@ describe('Tracking', () => {
};
getExperimentData.mockReturnValue(mockExperimentData);
- document.querySelector('[data-track-event="click_input3"]').click();
+ document.querySelector(`[data-track-${term}="click_input3"]`).click();
expect(eventSpy).toHaveBeenCalledWith('_category_', 'click_input3', {
value: '_value_',
@@ -267,22 +271,26 @@ describe('Tracking', () => {
});
});
- describe('tracking page loaded events', () => {
+ describe.each`
+ term
+ ${'event'}
+ ${'action'}
+ `('tracking page loaded events with -$term', ({ term }) => {
let eventSpy;
beforeEach(() => {
eventSpy = jest.spyOn(Tracking, 'event');
setHTMLFixture(`
- <input data-track-event="render" data-track-label="label1" value="_value_" data-track-property="_property_"/>
- <span data-track-event="render" data-track-label="label2" data-track-value="_value_">
+ <input data-track-${term}="render" data-track-label="label1" value="_value_" data-track-property="_property_"/>
+ <span data-track-${term}="render" data-track-label="label2" data-track-value="_value_">
Something
</span>
- <input data-track-event="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_"/>
+ <input data-track-${term}="_render_bogus_" data-track-label="label3" value="_value_" data-track-property="_property_"/>
`);
Tracking.trackLoadEvents('_category_'); // only happens once
});
- it('sends tracking events when [data-track-event="render"] is on an element', () => {
+ it(`sends tracking events when [data-track-${term}="render"] is on an element`, () => {
expect(eventSpy.mock.calls).toEqual([
[
'_category_',
@@ -318,6 +326,30 @@ describe('Tracking', () => {
mixin.computed.tracking = { foo: 'baz', baz: 'bar' };
expect(mixin.computed.trackingOptions()).toEqual({ foo: 'baz', baz: 'bar' });
});
+
+ it('includes experiment data if linked to an experiment', () => {
+ const mockExperimentData = {
+ variant: 'candidate',
+ experiment: 'darkMode',
+ };
+ getExperimentData.mockReturnValue(mockExperimentData);
+
+ const mixin = Tracking.mixin({ foo: 'bar', experiment: 'darkMode' });
+ expect(mixin.computed.trackingOptions()).toEqual({
+ foo: 'bar',
+ context: {
+ schema: TRACKING_CONTEXT_SCHEMA,
+ data: mockExperimentData,
+ },
+ });
+ });
+
+ it('does not include experiment data if experiment data does not exist', () => {
+ const mixin = Tracking.mixin({ foo: 'bar', experiment: 'lightMode' });
+ expect(mixin.computed.trackingOptions()).toEqual({
+ foo: 'bar',
+ });
+ });
});
describe('trackingCategory', () => {
diff --git a/spec/frontend/users_select/index_spec.js b/spec/frontend/users_select/index_spec.js
new file mode 100644
index 00000000000..5b07087b76c
--- /dev/null
+++ b/spec/frontend/users_select/index_spec.js
@@ -0,0 +1,223 @@
+import { waitFor } from '@testing-library/dom';
+import MockAdapter from 'axios-mock-adapter';
+import { cloneDeep } from 'lodash';
+import { getJSONFixture } from 'helpers/fixtures';
+import axios from '~/lib/utils/axios_utils';
+import UsersSelect from '~/users_select';
+
+// TODO: generate this from a fixture that guarantees the same output in CE and EE [(see issue)][1].
+// Hardcoding this HTML temproarily fixes a FOSS ~"master::broken" [(see issue)][2].
+// [1]: https://gitlab.com/gitlab-org/gitlab/-/issues/327809
+// [2]: https://gitlab.com/gitlab-org/gitlab/-/issues/327805
+const getUserSearchHTML = () => `
+<div class="js-sidebar-assignee-data selectbox hide-collapsed">
+<input type="hidden" name="merge_request[assignee_ids][]" value="0">
+<div class="dropdown js-sidebar-assignee-dropdown">
+<button class="dropdown-menu-toggle js-user-search js-author-search js-multiselect js-save-user-data js-invite-members-track" type="button" data-first-user="frontend-fixtures" data-current-user="true" data-iid="1" data-issuable-type="merge_request" data-project-id="1" data-author-id="1" data-field-name="merge_request[assignee_ids][]" data-issue-update="http://test.host/frontend-fixtures/merge-requests-project/-/merge_requests/1.json" data-ability-name="merge_request" data-null-user="true" data-display="static" data-multi-select="true" data-dropdown-title="Select assignee(s)" data-dropdown-header="Assignee(s)" data-track-event="show_invite_members" data-toggle="dropdown"><span class="dropdown-toggle-text ">Select assignee(s)</span><svg class="s16 dropdown-menu-toggle-icon gl-top-3" data-testid="chevron-down-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#chevron-down"></use></svg></button><div class="dropdown-menu dropdown-select dropdown-menu-user dropdown-menu-selectable dropdown-menu-author dropdown-extended-height">
+<div class="dropdown-title gl-display-flex">
+<span class="gl-ml-auto">Assign to</span><button class="dropdown-title-button dropdown-menu-close gl-ml-auto" aria-label="Close" type="button"><svg class="s16 dropdown-menu-close-icon" data-testid="close-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#close"></use></svg></button>
+</div>
+<div class="dropdown-input">
+<input type="search" id="" data-qa-selector="dropdown_input_field" class="dropdown-input-field" placeholder="Search users" autocomplete="off"><svg class="s16 dropdown-input-search" data-testid="search-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#search"></use></svg><svg class="s16 dropdown-input-clear js-dropdown-input-clear" data-testid="close-icon"><use xlink:href="http://test.host/assets/icons-16c30bec0d8a57f0a33e6f6215c6aff7a6ec5e4a7e6b7de733a6b648541a336a.svg#close"></use></svg>
+</div>
+<div data-qa-selector="dropdown_list_content" class="dropdown-content "></div>
+<div class="dropdown-footer">
+<ul class="dropdown-footer-list">
+<li>
+<div class="js-invite-members-trigger" data-display-text="Invite Members" data-event="click_invite_members" data-label="edit_assignee" data-trigger-element="anchor"></div>
+</li>
+</ul>
+</div>
+<div class="dropdown-loading"><div class="gl-spinner-container"><span class="gl-spinner gl-spinner-orange gl-spinner-md gl-mt-7" aria-label="Loading"></span></div></div>
+</div>
+</div>
+</div>
+`;
+
+const USER_SEARCH_HTML = getUserSearchHTML();
+const AUTOCOMPLETE_USERS = getJSONFixture('autocomplete/users.json');
+
+describe('~/users_select/index', () => {
+ let subject;
+ let mock;
+
+ const createSubject = (currentUser = null) => {
+ if (subject) {
+ throw new Error('test subject is already created');
+ }
+
+ subject = new UsersSelect(currentUser);
+ };
+
+ // finders -------------------------------------------------------------------
+ const findAssigneesInputs = () =>
+ document.querySelectorAll('input[name="merge_request[assignee_ids][]');
+ const findAssigneesInputsModel = () =>
+ Array.from(findAssigneesInputs()).map((input) => ({
+ value: input.value,
+ dataset: { ...input.dataset },
+ }));
+ const findUserSearchButton = () => document.querySelector('.js-user-search');
+ const findDropdownItem = ({ id }) => document.querySelector(`li[data-user-id="${id}"] a`);
+ const findDropdownItemsModel = () =>
+ Array.from(document.querySelectorAll('.dropdown-content li')).map((el) => {
+ if (el.classList.contains('divider')) {
+ return {
+ type: 'divider',
+ };
+ } else if (el.classList.contains('dropdown-header')) {
+ return {
+ type: 'dropdown-header',
+ text: el.textContent,
+ };
+ }
+
+ return {
+ type: 'user',
+ userId: el.dataset.userId,
+ };
+ });
+
+ // arrange/act helpers -------------------------------------------------------
+ const setAssignees = (...users) => {
+ findAssigneesInputs().forEach((x) => x.remove());
+
+ const container = document.querySelector('.js-sidebar-assignee-data');
+
+ container.prepend(
+ ...users.map((user) => {
+ const input = document.createElement('input');
+ input.name = 'merge_request[assignee_ids][]';
+ input.value = user.id.toString();
+ input.setAttribute('data-avatar-url', user.avatar_url);
+ input.setAttribute('data-name', user.name);
+ input.setAttribute('data-username', user.username);
+ input.setAttribute('data-can-merge', user.can_merge);
+ return input;
+ }),
+ );
+ };
+ const toggleDropdown = () => findUserSearchButton().click();
+ const waitForDropdownItems = () =>
+ waitFor(() => expect(findDropdownItem(AUTOCOMPLETE_USERS[0])).not.toBeNull());
+
+ // assertion helpers ---------------------------------------------------------
+ const createUnassignedExpectation = () => {
+ return [
+ { type: 'user', userId: '0' },
+ { type: 'divider' },
+ ...AUTOCOMPLETE_USERS.map((x) => ({ type: 'user', userId: x.id.toString() })),
+ ];
+ };
+ const createAssignedExpectation = (...selectedUsers) => {
+ const selectedIds = new Set(selectedUsers.map((x) => x.id));
+ const unselectedUsers = AUTOCOMPLETE_USERS.filter((x) => !selectedIds.has(x.id));
+
+ return [
+ { type: 'user', userId: '0' },
+ { type: 'divider' },
+ { type: 'dropdown-header', text: 'Assignee(s)' },
+ ...selectedUsers.map((x) => ({ type: 'user', userId: x.id.toString() })),
+ { type: 'divider' },
+ ...unselectedUsers.map((x) => ({ type: 'user', userId: x.id.toString() })),
+ ];
+ };
+
+ beforeEach(() => {
+ const rootEl = document.createElement('div');
+ rootEl.innerHTML = USER_SEARCH_HTML;
+ document.body.appendChild(rootEl);
+
+ mock = new MockAdapter(axios);
+ mock.onGet('/-/autocomplete/users.json').reply(200, cloneDeep(AUTOCOMPLETE_USERS));
+ });
+
+ afterEach(() => {
+ document.body.innerHTML = '';
+ subject = null;
+ });
+
+ describe('when opened', () => {
+ beforeEach(async () => {
+ createSubject();
+
+ toggleDropdown();
+ await waitForDropdownItems();
+ });
+
+ it('shows users', () => {
+ expect(findDropdownItemsModel()).toEqual(createUnassignedExpectation());
+ });
+
+ describe('when users are selected', () => {
+ const selectedUsers = [AUTOCOMPLETE_USERS[2], AUTOCOMPLETE_USERS[4]];
+ const expectation = createAssignedExpectation(...selectedUsers);
+
+ beforeEach(() => {
+ selectedUsers.forEach((user) => {
+ findDropdownItem(user).click();
+ });
+ });
+
+ it('shows assignee', () => {
+ expect(findDropdownItemsModel()).toEqual(expectation);
+ });
+
+ it('shows assignee even after close and open', () => {
+ toggleDropdown();
+ toggleDropdown();
+
+ expect(findDropdownItemsModel()).toEqual(expectation);
+ });
+
+ it('updates field', () => {
+ expect(findAssigneesInputsModel()).toEqual(
+ selectedUsers.map((user) => ({
+ value: user.id.toString(),
+ dataset: {
+ approved: user.approved.toString(),
+ avatar_url: user.avatar_url,
+ can_merge: user.can_merge.toString(),
+ can_update_merge_request: user.can_update_merge_request.toString(),
+ id: user.id.toString(),
+ name: user.name,
+ show_status: user.show_status.toString(),
+ state: user.state,
+ username: user.username,
+ web_url: user.web_url,
+ },
+ })),
+ );
+ });
+ });
+ });
+
+ describe('with preselected user and opened', () => {
+ const expectation = createAssignedExpectation(AUTOCOMPLETE_USERS[0]);
+
+ beforeEach(async () => {
+ setAssignees(AUTOCOMPLETE_USERS[0]);
+
+ createSubject();
+
+ toggleDropdown();
+ await waitForDropdownItems();
+ });
+
+ it('shows users', () => {
+ expect(findDropdownItemsModel()).toEqual(expectation);
+ });
+
+ // Regression test for https://gitlab.com/gitlab-org/gitlab/-/issues/325991
+ describe('when closed and reopened', () => {
+ beforeEach(() => {
+ toggleDropdown();
+ toggleDropdown();
+ });
+
+ it('shows users', () => {
+ expect(findDropdownItemsModel()).toEqual(expectation);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_alerts_spec.js b/spec/frontend/vue_alerts_spec.js
index 16eb2d44e4d..05b73415544 100644
--- a/spec/frontend/vue_alerts_spec.js
+++ b/spec/frontend/vue_alerts_spec.js
@@ -42,15 +42,17 @@ describe('VueAlerts', () => {
const findJsHooks = () => document.querySelectorAll('.js-vue-alert');
const findAlerts = () => document.querySelectorAll('.gl-alert');
- const findAlertDismiss = (alert) => alert.querySelector('.gl-alert-dismiss');
+ const findAlertDismiss = (alert) => alert.querySelector('.gl-dismiss-btn');
const serializeAlert = (alert) => ({
title: alert.querySelector('.gl-alert-title').textContent.trim(),
html: alert.querySelector('.gl-alert-body div').innerHTML,
- dismissible: Boolean(alert.querySelector('.gl-alert-dismiss')),
+ dismissible: Boolean(alert.querySelector('.gl-dismiss-btn')),
primaryButtonText: alert.querySelector('.gl-alert-action').textContent.trim(),
primaryButtonLink: alert.querySelector('.gl-alert-action').href,
- variant: [...alert.classList].find((x) => x.match('gl-alert-')).replace('gl-alert-', ''),
+ variant: [...alert.classList]
+ .find((x) => x.match(/gl-alert-(?!not-dismissible)/))
+ .replace('gl-alert-', ''),
});
it('starts with only JsHooks', () => {
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js
index 78efcb6e695..8fd93809e01 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_author_time_spec.js
@@ -1,42 +1,43 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
+import { shallowMount } from '@vue/test-utils';
+import MrWidgetAuthor from '~/vue_merge_request_widget/components/mr_widget_author.vue';
import MrWidgetAuthorTime from '~/vue_merge_request_widget/components/mr_widget_author_time.vue';
describe('MrWidgetAuthorTime', () => {
- let vm;
+ let wrapper;
+
+ const defaultProps = {
+ actionText: 'Merged by',
+ author: {
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://localhost:3000/root',
+ avatarUrl: 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ },
+ dateTitle: '2017-03-23T23:02:00.807Z',
+ dateReadable: '12 hours ago',
+ };
beforeEach(() => {
- const Component = Vue.extend(MrWidgetAuthorTime);
-
- vm = mountComponent(Component, {
- actionText: 'Merged by',
- author: {
- name: 'Administrator',
- username: 'root',
- webUrl: 'http://localhost:3000/root',
- avatarUrl:
- 'http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
- },
- dateTitle: '2017-03-23T23:02:00.807Z',
- dateReadable: '12 hours ago',
+ wrapper = shallowMount(MrWidgetAuthorTime, {
+ propsData: defaultProps,
});
});
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
});
it('renders provided action text', () => {
- expect(vm.$el.textContent).toContain('Merged by');
+ expect(wrapper.text()).toContain('Merged by');
});
it('renders author', () => {
- expect(vm.$el.textContent).toContain('Administrator');
+ expect(wrapper.find(MrWidgetAuthor).props('author')).toStrictEqual(defaultProps.author);
});
it('renders provided time', () => {
- expect(vm.$el.querySelector('time').getAttribute('title')).toEqual('2017-03-23T23:02:00.807Z');
+ expect(wrapper.find('time').attributes('title')).toBe('2017-03-23T23:02:00.807Z');
- expect(vm.$el.querySelector('time').textContent.trim()).toEqual('12 hours ago');
+ expect(wrapper.find('time').text().trim()).toBe('12 hours ago');
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
index db884dfe015..eadf07e54fb 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_header_spec.js
@@ -1,38 +1,35 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import headerComponent from '~/vue_merge_request_widget/components/mr_widget_header.vue';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import Header from '~/vue_merge_request_widget/components/mr_widget_header.vue';
describe('MRWidgetHeader', () => {
- let vm;
- let Component;
+ let wrapper;
- beforeEach(() => {
- Component = Vue.extend(headerComponent);
- });
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(Header, {
+ propsData,
+ });
+ };
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
gon.relative_url_root = '';
});
const expectDownloadDropdownItems = () => {
- const downloadEmailPatchesEl = vm.$el.querySelector('.js-download-email-patches');
- const downloadPlainDiffEl = vm.$el.querySelector('.js-download-plain-diff');
-
- expect(downloadEmailPatchesEl.innerText.trim()).toEqual('Email patches');
- expect(downloadEmailPatchesEl.querySelector('a').getAttribute('href')).toEqual(
- '/mr/email-patches',
- );
- expect(downloadPlainDiffEl.innerText.trim()).toEqual('Plain diff');
- expect(downloadPlainDiffEl.querySelector('a').getAttribute('href')).toEqual(
- '/mr/plainDiffPath',
- );
+ const downloadEmailPatchesEl = wrapper.find('.js-download-email-patches');
+ const downloadPlainDiffEl = wrapper.find('.js-download-plain-diff');
+
+ expect(downloadEmailPatchesEl.text().trim()).toBe('Email patches');
+ expect(downloadEmailPatchesEl.attributes('href')).toBe('/mr/email-patches');
+ expect(downloadPlainDiffEl.text().trim()).toBe('Plain diff');
+ expect(downloadPlainDiffEl.attributes('href')).toBe('/mr/plainDiffPath');
};
describe('computed', () => {
describe('shouldShowCommitsBehindText', () => {
it('return true when there are divergedCommitsCount', () => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 12,
sourceBranch: 'mr-widget-refactor',
@@ -42,11 +39,11 @@ describe('MRWidgetHeader', () => {
},
});
- expect(vm.shouldShowCommitsBehindText).toEqual(true);
+ expect(wrapper.vm.shouldShowCommitsBehindText).toBe(true);
});
it('returns false where there are no divergedComits count', () => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 0,
sourceBranch: 'mr-widget-refactor',
@@ -56,13 +53,13 @@ describe('MRWidgetHeader', () => {
},
});
- expect(vm.shouldShowCommitsBehindText).toEqual(false);
+ expect(wrapper.vm.shouldShowCommitsBehindText).toBe(false);
});
});
describe('commitsBehindText', () => {
it('returns singular when there is one commit', () => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 1,
sourceBranch: 'mr-widget-refactor',
@@ -73,13 +70,13 @@ describe('MRWidgetHeader', () => {
},
});
- expect(vm.commitsBehindText).toEqual(
+ expect(wrapper.vm.commitsBehindText).toBe(
'The source branch is <a href="/foo/bar/master">1 commit behind</a> the target branch',
);
});
it('returns plural when there is more than one commit', () => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 2,
sourceBranch: 'mr-widget-refactor',
@@ -90,7 +87,7 @@ describe('MRWidgetHeader', () => {
},
});
- expect(vm.commitsBehindText).toEqual(
+ expect(wrapper.vm.commitsBehindText).toBe(
'The source branch is <a href="/foo/bar/master">2 commits behind</a> the target branch',
);
});
@@ -100,7 +97,7 @@ describe('MRWidgetHeader', () => {
describe('template', () => {
describe('common elements', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 12,
sourceBranch: 'mr-widget-refactor',
@@ -118,17 +115,17 @@ describe('MRWidgetHeader', () => {
});
it('renders source branch link', () => {
- expect(vm.$el.querySelector('.js-source-branch').innerHTML).toEqual(
+ expect(wrapper.find('.js-source-branch').html()).toContain(
'<a href="/foo/bar/mr-widget-refactor">mr-widget-refactor</a>',
);
});
it('renders clipboard button', () => {
- expect(vm.$el.querySelector('[data-testid="mr-widget-copy-clipboard"]')).not.toEqual(null);
+ expect(wrapper.find('[data-testid="mr-widget-copy-clipboard"]')).not.toBe(null);
});
it('renders target branch', () => {
- expect(vm.$el.querySelector('.js-target-branch').textContent.trim()).toEqual('master');
+ expect(wrapper.find('.js-target-branch').text().trim()).toBe('master');
});
});
@@ -151,71 +148,68 @@ describe('MRWidgetHeader', () => {
targetProjectFullPath: 'gitlab-org/gitlab-ce',
};
- afterEach(() => {
- vm.$destroy();
- });
-
beforeEach(() => {
- vm = mountComponent(Component, {
+ createComponent({
mr: { ...mrDefaultOptions },
});
});
it('renders checkout branch button with modal trigger', () => {
- const button = vm.$el.querySelector('.js-check-out-branch');
+ const button = wrapper.find('.js-check-out-branch');
- expect(button.textContent.trim()).toBe('Check out branch');
+ expect(button.text().trim()).toBe('Check out branch');
});
- it('renders web ide button', () => {
- const button = vm.$el.querySelector('.js-web-ide');
+ it('renders web ide button', async () => {
+ const button = wrapper.find('.js-web-ide');
- expect(button.textContent.trim()).toEqual('Open in Web IDE');
- expect(button.classList.contains('disabled')).toBe(false);
- expect(button.getAttribute('href')).toEqual(
+ await nextTick();
+
+ expect(button.text().trim()).toBe('Open in Web IDE');
+ expect(button.classes('disabled')).toBe(false);
+ expect(button.attributes('href')).toBe(
'/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=gitlab-org%2Fgitlab-ce',
);
});
- it('renders web ide button in disabled state with no href', () => {
+ it('renders web ide button in disabled state with no href', async () => {
const mr = { ...mrDefaultOptions, canPushToSourceBranch: false };
- vm = mountComponent(Component, { mr });
+ createComponent({ mr });
+
+ await nextTick();
- const link = vm.$el.querySelector('.js-web-ide');
+ const link = wrapper.find('.js-web-ide');
- expect(link.classList.contains('disabled')).toBe(true);
- expect(link.getAttribute('href')).toBeNull();
+ expect(link.attributes('disabled')).toBe('true');
+ expect(link.attributes('href')).toBeUndefined();
});
- it('renders web ide button with blank query string if target & source project branch', (done) => {
- vm.mr.targetProjectFullPath = 'root/gitlab-ce';
+ it('renders web ide button with blank query string if target & source project branch', async () => {
+ createComponent({ mr: { ...mrDefaultOptions, targetProjectFullPath: 'root/gitlab-ce' } });
- vm.$nextTick(() => {
- const button = vm.$el.querySelector('.js-web-ide');
+ await nextTick();
- expect(button.textContent.trim()).toEqual('Open in Web IDE');
- expect(button.getAttribute('href')).toEqual(
- '/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=',
- );
+ const button = wrapper.find('.js-web-ide');
- done();
- });
+ expect(button.text().trim()).toBe('Open in Web IDE');
+ expect(button.attributes('href')).toBe(
+ '/-/ide/project/root/gitlab-ce/merge_requests/1?target_project=',
+ );
});
- it('renders web ide button with relative URL', (done) => {
+ it('renders web ide button with relative URL', async () => {
gon.relative_url_root = '/gitlab';
- vm.mr.iid = 2;
- vm.$nextTick(() => {
- const button = vm.$el.querySelector('.js-web-ide');
+ createComponent({ mr: { ...mrDefaultOptions, iid: 2 } });
- expect(button.textContent.trim()).toEqual('Open in Web IDE');
- expect(button.getAttribute('href')).toEqual(
- '/gitlab/-/ide/project/root/gitlab-ce/merge_requests/2?target_project=gitlab-org%2Fgitlab-ce',
- );
+ await nextTick();
- done();
- });
+ const button = wrapper.find('.js-web-ide');
+
+ expect(button.text().trim()).toBe('Open in Web IDE');
+ expect(button.attributes('href')).toBe(
+ '/gitlab/-/ide/project/root/gitlab-ce/merge_requests/2?target_project=gitlab-org%2Fgitlab-ce',
+ );
});
it('renders download dropdown with links', () => {
@@ -225,7 +219,7 @@ describe('MRWidgetHeader', () => {
describe('with a closed merge request', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 12,
sourceBranch: 'mr-widget-refactor',
@@ -243,9 +237,9 @@ describe('MRWidgetHeader', () => {
});
it('does not render checkout branch button with modal trigger', () => {
- const button = vm.$el.querySelector('.js-check-out-branch');
+ const button = wrapper.find('.js-check-out-branch');
- expect(button).toEqual(null);
+ expect(button.exists()).toBe(false);
});
it('renders download dropdown with links', () => {
@@ -255,7 +249,7 @@ describe('MRWidgetHeader', () => {
describe('without diverged commits', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 0,
sourceBranch: 'mr-widget-refactor',
@@ -273,13 +267,13 @@ describe('MRWidgetHeader', () => {
});
it('does not render diverged commits info', () => {
- expect(vm.$el.querySelector('.diverged-commits-count')).toEqual(null);
+ expect(wrapper.find('.diverged-commits-count').exists()).toBe(false);
});
});
describe('with diverged commits', () => {
beforeEach(() => {
- vm = mountComponent(Component, {
+ createComponent({
mr: {
divergedCommitsCount: 12,
sourceBranch: 'mr-widget-refactor',
@@ -297,17 +291,13 @@ describe('MRWidgetHeader', () => {
});
it('renders diverged commits info', () => {
- expect(vm.$el.querySelector('.diverged-commits-count').textContent).toEqual(
+ expect(wrapper.find('.diverged-commits-count').text().trim()).toBe(
'The source branch is 12 commits behind the target branch',
);
- expect(vm.$el.querySelector('.diverged-commits-count a').textContent).toEqual(
- '12 commits behind',
- );
-
- expect(vm.$el.querySelector('.diverged-commits-count a')).toHaveAttr(
- 'href',
- vm.mr.targetBranchPath,
+ expect(wrapper.find('.diverged-commits-count a').text().trim()).toBe('12 commits behind');
+ expect(wrapper.find('.diverged-commits-count a').attributes('href')).toBe(
+ wrapper.vm.mr.targetBranchPath,
);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js
index 28492018600..924dc37aab9 100644
--- a/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js
+++ b/spec/frontend/vue_mr_widget/components/mr_widget_pipeline_spec.js
@@ -1,6 +1,7 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount, mount } from '@vue/test-utils';
import { trimText } from 'helpers/text_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import PipelineMiniGraph from '~/pipelines/components/pipelines_list/pipeline_mini_graph.vue';
import PipelineStage from '~/pipelines/components/pipelines_list/pipeline_stage.vue';
import PipelineComponent from '~/vue_merge_request_widget/components/mr_widget_pipeline.vue';
@@ -22,27 +23,31 @@ describe('MRWidgetPipeline', () => {
'Could not retrieve the pipeline status. For troubleshooting steps, read the documentation.';
const monitoringMessage = 'Checking pipeline status.';
- const findCIErrorMessage = () => wrapper.find('[data-testid="ci-error-message"]');
- const findPipelineID = () => wrapper.find('[data-testid="pipeline-id"]');
- const findPipelineInfoContainer = () => wrapper.find('[data-testid="pipeline-info-container"]');
- const findCommitLink = () => wrapper.find('[data-testid="commit-link"]');
- const findPipelineMiniGraph = () => wrapper.find(PipelineMiniGraph);
- const findAllPipelineStages = () => wrapper.findAll(PipelineStage);
- const findPipelineCoverage = () => wrapper.find('[data-testid="pipeline-coverage"]');
- const findPipelineCoverageDelta = () => wrapper.find('[data-testid="pipeline-coverage-delta"]');
+ const findCIErrorMessage = () => wrapper.findByTestId('ci-error-message');
+ const findPipelineID = () => wrapper.findByTestId('pipeline-id');
+ const findPipelineInfoContainer = () => wrapper.findByTestId('pipeline-info-container');
+ const findCommitLink = () => wrapper.findByTestId('commit-link');
+ const findPipelineFinishedAt = () => wrapper.findByTestId('finished-at');
+ const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
+ const findAllPipelineStages = () => wrapper.findAllComponents(PipelineStage);
+ const findPipelineCoverage = () => wrapper.findByTestId('pipeline-coverage');
+ const findPipelineCoverageDelta = () => wrapper.findByTestId('pipeline-coverage-delta');
const findPipelineCoverageTooltipText = () =>
- wrapper.find('[data-testid="pipeline-coverage-tooltip"]').text();
- const findMonitoringPipelineMessage = () =>
- wrapper.find('[data-testid="monitoring-pipeline-message"]');
- const findLoadingIcon = () => wrapper.find(GlLoadingIcon);
+ wrapper.findByTestId('pipeline-coverage-tooltip').text();
+ const findPipelineCoverageDeltaTooltipText = () =>
+ wrapper.findByTestId('pipeline-coverage-delta-tooltip').text();
+ const findMonitoringPipelineMessage = () => wrapper.findByTestId('monitoring-pipeline-message');
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const createWrapper = (props = {}, mountFn = shallowMount) => {
- wrapper = mountFn(PipelineComponent, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
+ wrapper = extendedWrapper(
+ mountFn(PipelineComponent, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ }),
+ );
};
afterEach(() => {
@@ -87,6 +92,13 @@ describe('MRWidgetPipeline', () => {
expect(findCommitLink().attributes('href')).toBe(mockData.pipeline.commit.commit_path);
});
+ it('should render pipeline finished timestamp', () => {
+ expect(findPipelineFinishedAt().attributes()).toMatchObject({
+ title: 'Apr 7, 2017 2:00pm GMT+0000',
+ datetime: mockData.pipeline.details.finished_at,
+ });
+ });
+
it('should render pipeline graph', () => {
expect(findPipelineMiniGraph().exists()).toBe(true);
expect(findAllPipelineStages()).toHaveLength(mockData.pipeline.details.stages.length);
@@ -94,7 +106,9 @@ describe('MRWidgetPipeline', () => {
describe('should render pipeline coverage information', () => {
it('should render coverage percentage', () => {
- expect(findPipelineCoverage().text()).toMatch(`Coverage ${mockData.pipeline.coverage}%`);
+ expect(findPipelineCoverage().text()).toMatch(
+ `Test coverage ${mockData.pipeline.coverage}%`,
+ );
});
it('should render coverage delta', () => {
@@ -102,24 +116,9 @@ describe('MRWidgetPipeline', () => {
expect(findPipelineCoverageDelta().text()).toBe(`(${mockData.pipelineCoverageDelta}%)`);
});
- it('coverage delta should have no special style if there is no coverage change', () => {
- createWrapper({ pipelineCoverageDelta: '0' });
- expect(findPipelineCoverageDelta().classes()).toEqual([]);
- });
-
- it('coverage delta should have text-success style if coverage increased', () => {
- createWrapper({ pipelineCoverageDelta: '10' });
- expect(findPipelineCoverageDelta().classes()).toEqual(['text-success']);
- });
-
- it('coverage delta should have text-danger style if coverage increased', () => {
- createWrapper({ pipelineCoverageDelta: '-10' });
- expect(findPipelineCoverageDelta().classes()).toEqual(['text-danger']);
- });
-
it('should render tooltip for jobs contributing to code coverage', () => {
const tooltipText = findPipelineCoverageTooltipText();
- const expectedDescription = `Coverage value for this pipeline was calculated by averaging the resulting coverage values of ${mockData.buildsWithCoverage.length} jobs.`;
+ const expectedDescription = `Test coverage value for this pipeline was calculated by averaging the resulting coverage values of ${mockData.buildsWithCoverage.length} jobs.`;
expect(tooltipText).toContain(expectedDescription);
});
@@ -132,6 +131,26 @@ describe('MRWidgetPipeline', () => {
expect(tooltipText).toContain(`${build.name} (${build.coverage}%)`);
},
);
+
+ describe.each`
+ style | coverageState | coverageChangeText | styleClass | pipelineCoverageDelta
+ ${'no special'} | ${'the same'} | ${'not change'} | ${''} | ${'0'}
+ ${'success'} | ${'increased'} | ${'increase'} | ${'text-success'} | ${'10'}
+ ${'danger'} | ${'decreased'} | ${'decrease'} | ${'text-danger'} | ${'-10'}
+ `(
+ 'if test coverage is $coverageState',
+ ({ style, styleClass, coverageChangeText, pipelineCoverageDelta }) => {
+ it(`coverage delta should have ${style}`, () => {
+ createWrapper({ pipelineCoverageDelta });
+ expect(findPipelineCoverageDelta().classes()).toEqual(styleClass ? [styleClass] : []);
+ });
+
+ it(`coverage delta tooltip should say that the coverage will ${coverageChangeText}`, () => {
+ createWrapper({ pipelineCoverageDelta });
+ expect(findPipelineCoverageDeltaTooltipText()).toContain(coverageChangeText);
+ });
+ },
+ );
});
});
@@ -163,7 +182,7 @@ describe('MRWidgetPipeline', () => {
});
it('should render coverage information', () => {
- expect(findPipelineCoverage().text()).toMatch(`Coverage ${mockData.pipeline.coverage}%`);
+ expect(findPipelineCoverage().text()).toMatch(`Test coverage ${mockData.pipeline.coverage}%`);
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
index 4dd1bd2aa9c..1af96717b56 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_auto_merge_enabled_spec.js
@@ -28,11 +28,11 @@ function convertPropsToGraphqlState(props) {
};
}
-function factory(propsData) {
+function factory(propsData, stateOverride = {}) {
let state = {};
if (mergeRequestWidgetGraphqlEnabled) {
- state = convertPropsToGraphqlState(propsData);
+ state = { ...convertPropsToGraphqlState(propsData), ...stateOverride };
}
wrapper = extendedWrapper(
@@ -125,7 +125,7 @@ describe('MRWidgetAutoMergeEnabled', () => {
},
);
- it('should return false when shouldRemoveSourceBranch set to false', () => {
+ it('should not find "Delete" button when shouldRemoveSourceBranch set to true', () => {
factory({
...defaultMrProps(),
shouldRemoveSourceBranch: true,
@@ -134,6 +134,29 @@ describe('MRWidgetAutoMergeEnabled', () => {
expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(false);
});
+ it('should find "Delete" button when shouldRemoveSourceBranch overrides state.forceRemoveSourceBranch', () => {
+ factory(
+ {
+ ...defaultMrProps(),
+ shouldRemoveSourceBranch: false,
+ },
+ {
+ forceRemoveSourceBranch: true,
+ },
+ );
+
+ expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(true);
+ });
+
+ it('should find "Delete" button when shouldRemoveSourceBranch set to false', () => {
+ factory({
+ ...defaultMrProps(),
+ shouldRemoveSourceBranch: false,
+ });
+
+ expect(wrapper.findByTestId('removeSourceBranchButton').exists()).toBe(true);
+ });
+
it('should return false if user is not able to remove the source branch', () => {
factory({
...defaultMrProps(),
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
index dc2f227b29c..fee78d3af94 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_conflicts_spec.js
@@ -1,4 +1,3 @@
-import { GlPopover } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { TEST_HOST } from 'helpers/test_constants';
import { removeBreakLine } from 'helpers/text_helper';
@@ -10,7 +9,6 @@ describe('MRWidgetConflicts', () => {
let mergeRequestWidgetGraphql = null;
const path = '/conflicts';
- const findPopover = () => wrapper.find(GlPopover);
const findResolveButton = () => wrapper.findByTestId('resolve-conflicts-button');
const findMergeLocalButton = () => wrapper.findByTestId('merge-locally-button');
@@ -219,12 +217,8 @@ describe('MRWidgetConflicts', () => {
});
});
- it('sets resolve button as disabled', () => {
- expect(findResolveButton().attributes('disabled')).toBe('true');
- });
-
- it('shows the popover', () => {
- expect(findPopover().exists()).toBe(true);
+ it('should not allow you to resolve the conflicts', () => {
+ expect(findResolveButton().exists()).toBe(false);
});
});
@@ -241,12 +235,9 @@ describe('MRWidgetConflicts', () => {
});
});
- it('sets resolve button as disabled', () => {
- expect(findResolveButton().attributes('disabled')).toBe(undefined);
- });
-
- it('does not show the popover', () => {
- expect(findPopover().exists()).toBe(false);
+ it('should allow you to resolve the conflicts', () => {
+ expect(findResolveButton().text()).toContain('Resolve conflicts');
+ expect(findResolveButton().attributes('href')).toEqual(TEST_HOST);
});
});
});
diff --git a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js b/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
index c1471314c4a..6d8e7056366 100644
--- a/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
+++ b/spec/frontend/vue_mr_widget/components/states/mr_widget_failed_to_merge_spec.js
@@ -1,69 +1,67 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import failedToMergeComponent from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import StatusIcon from '~/vue_merge_request_widget/components/mr_widget_status_icon.vue';
+import MrWidgetFailedToMerge from '~/vue_merge_request_widget/components/states/mr_widget_failed_to_merge.vue';
import eventHub from '~/vue_merge_request_widget/event_hub';
describe('MRWidgetFailedToMerge', () => {
const dummyIntervalId = 1337;
- let Component;
- let mr;
- let vm;
+ let wrapper;
+
+ const createComponent = (props = {}, data = {}) => {
+ wrapper = shallowMount(MrWidgetFailedToMerge, {
+ propsData: {
+ mr: {
+ mergeError: 'Merge error happened',
+ },
+ ...props,
+ },
+ data() {
+ return data;
+ },
+ });
+ };
beforeEach(() => {
- Component = Vue.extend(failedToMergeComponent);
jest.spyOn(eventHub, '$emit').mockImplementation(() => {});
jest.spyOn(window, 'setInterval').mockReturnValue(dummyIntervalId);
jest.spyOn(window, 'clearInterval').mockImplementation();
- mr = {
- mergeError: 'Merge error happened',
- };
- vm = mountComponent(Component, {
- mr,
- });
});
afterEach(() => {
- vm.$destroy();
- });
-
- it('sets interval to refresh', () => {
- expect(window.setInterval).toHaveBeenCalledWith(vm.updateTimer, 1000);
- expect(vm.intervalId).toBe(dummyIntervalId);
+ wrapper.destroy();
});
- it('clears interval when destroying ', () => {
- vm.$destroy();
+ describe('interval', () => {
+ it('sets interval to refresh', () => {
+ createComponent();
- expect(window.clearInterval).toHaveBeenCalledWith(dummyIntervalId);
- });
-
- describe('computed', () => {
- describe('timerText', () => {
- it('should return correct timer text', () => {
- expect(vm.timerText).toEqual('Refreshing in 10 seconds to show the updated status...');
+ expect(window.setInterval).toHaveBeenCalledWith(wrapper.vm.updateTimer, 1000);
+ expect(wrapper.vm.intervalId).toBe(dummyIntervalId);
+ });
- vm.timer = 1;
+ it('clears interval when destroying ', () => {
+ createComponent();
+ wrapper.destroy();
- expect(vm.timerText).toEqual('Refreshing in a second to show the updated status...');
- });
+ expect(window.clearInterval).toHaveBeenCalledWith(dummyIntervalId);
});
+ });
- describe('mergeError', () => {
- it('removes forced line breaks', (done) => {
- mr.mergeError = 'contains<br />line breaks<br />';
+ describe('mergeError', () => {
+ it('removes forced line breaks', async () => {
+ createComponent({ mr: { mergeError: 'contains<br />line breaks<br />' } });
- Vue.nextTick()
- .then(() => {
- expect(vm.mergeError).toBe('contains line breaks.');
- })
- .then(done)
- .catch(done.fail);
- });
+ await nextTick();
+
+ expect(wrapper.vm.mergeError).toBe('contains line breaks.');
});
});
describe('created', () => {
it('should disable polling', () => {
+ createComponent();
+
expect(eventHub.$emit).toHaveBeenCalledWith('DisablePolling');
});
});
@@ -71,11 +69,13 @@ describe('MRWidgetFailedToMerge', () => {
describe('methods', () => {
describe('refresh', () => {
it('should emit event to request component refresh', () => {
- expect(vm.isRefreshing).toEqual(false);
+ createComponent();
+
+ expect(wrapper.vm.isRefreshing).toBe(false);
- vm.refresh();
+ wrapper.vm.refresh();
- expect(vm.isRefreshing).toEqual(true);
+ expect(wrapper.vm.isRefreshing).toBe(true);
expect(eventHub.$emit).toHaveBeenCalledWith('MRWidgetUpdateRequested');
expect(eventHub.$emit).toHaveBeenCalledWith('EnablePolling');
});
@@ -83,78 +83,76 @@ describe('MRWidgetFailedToMerge', () => {
describe('updateTimer', () => {
it('should update timer and emit event when timer end', () => {
- jest.spyOn(vm, 'refresh').mockImplementation(() => {});
+ createComponent();
+
+ jest.spyOn(wrapper.vm, 'refresh').mockImplementation(() => {});
- expect(vm.timer).toEqual(10);
+ expect(wrapper.vm.timer).toEqual(10);
for (let i = 0; i < 10; i += 1) {
- expect(vm.timer).toEqual(10 - i);
- vm.updateTimer();
+ expect(wrapper.vm.timer).toEqual(10 - i);
+ wrapper.vm.updateTimer();
}
- expect(vm.refresh).toHaveBeenCalled();
+ expect(wrapper.vm.refresh).toHaveBeenCalled();
});
});
});
describe('while it is refreshing', () => {
- it('renders Refresing now', (done) => {
- vm.isRefreshing = true;
-
- Vue.nextTick(() => {
- expect(vm.$el.querySelector('.js-refresh-label').textContent.trim()).toEqual(
- 'Refreshing now',
- );
- done();
- });
+ it('renders Refresing now', async () => {
+ createComponent({}, { isRefreshing: true });
+
+ await nextTick();
+
+ expect(wrapper.find('.js-refresh-label').text().trim()).toBe('Refreshing now');
});
});
describe('while it is not regresing', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
it('renders warning icon and disabled merge button', () => {
- expect(vm.$el.querySelector('.js-ci-status-icon-warning')).not.toBeNull();
- expect(
- vm.$el.querySelector('[data-testid="disabled-merge-button"]').getAttribute('disabled'),
- ).toEqual('disabled');
+ expect(wrapper.find('.js-ci-status-icon-warning')).not.toBeNull();
+ expect(wrapper.find(StatusIcon).props('showDisabledButton')).toBe(true);
});
it('renders given error', () => {
- expect(vm.$el.querySelector('.has-error-message').textContent.trim()).toEqual(
- 'Merge error happened.',
- );
+ expect(wrapper.find('.has-error-message').text().trim()).toBe('Merge error happened.');
});
it('renders refresh button', () => {
expect(
- vm.$el
- .querySelector('[data-testid="merge-request-failed-refresh-button"]')
- .textContent.trim(),
- ).toEqual('Refresh now');
+ wrapper.find('[data-testid="merge-request-failed-refresh-button"]').text().trim(),
+ ).toBe('Refresh now');
});
it('renders remaining time', () => {
- expect(vm.$el.querySelector('.has-custom-error').textContent.trim()).toEqual(
+ expect(wrapper.find('.has-custom-error').text().trim()).toBe(
'Refreshing in 10 seconds to show the updated status...',
);
});
});
- it('should just generic merge failed message if merge_error is not available', (done) => {
- vm.mr.mergeError = null;
+ it('should just generic merge failed message if merge_error is not available', async () => {
+ createComponent({ mr: { mergeError: null } });
- Vue.nextTick(() => {
- expect(vm.$el.innerText).toContain('Merge failed.');
- expect(vm.$el.innerText).not.toContain('Merge error happened.');
- done();
- });
+ await nextTick();
+
+ expect(wrapper.text().trim()).toContain('Merge failed.');
+ expect(wrapper.text().trim()).not.toContain('Merge error happened.');
});
- it('should show refresh label when refresh requested', (done) => {
- vm.refresh();
- Vue.nextTick(() => {
- expect(vm.$el.innerText).not.toContain('Merge failed. Refreshing');
- expect(vm.$el.innerText).toContain('Refreshing now');
- done();
- });
+ it('should show refresh label when refresh requested', async () => {
+ createComponent();
+
+ wrapper.vm.refresh();
+
+ await nextTick();
+
+ expect(wrapper.text().trim()).not.toContain('Merge failed. Refreshing');
+ expect(wrapper.text().trim()).toContain('Refreshing now');
});
});
diff --git a/spec/frontend/vue_shared/alert_details/alert_details_spec.js b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
index 68bcf1dc491..1fc655f1ebc 100644
--- a/spec/frontend/vue_shared/alert_details/alert_details_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_details_spec.js
@@ -8,7 +8,7 @@ import { joinPaths } from '~/lib/utils/url_utility';
import Tracking from '~/tracking';
import AlertDetails from '~/vue_shared/alert_details/components/alert_details.vue';
import AlertSummaryRow from '~/vue_shared/alert_details/components/alert_summary_row.vue';
-import { SEVERITY_LEVELS } from '~/vue_shared/alert_details/constants';
+import { PAGE_CONFIG, SEVERITY_LEVELS } from '~/vue_shared/alert_details/constants';
import createIssueMutation from '~/vue_shared/alert_details/graphql/mutations/alert_issue_create.mutation.graphql';
import AlertDetailsTable from '~/vue_shared/components/alert_details_table.vue';
import mockAlerts from './mocks/alerts.json';
@@ -271,7 +271,13 @@ describe('AlertDetails', () => {
});
it('should display a table of raw alert details data', () => {
- expect(findDetailsTable().exists()).toBe(true);
+ const details = findDetailsTable();
+ expect(details.exists()).toBe(true);
+ expect(details.props()).toStrictEqual({
+ alert: mockAlert,
+ statuses: PAGE_CONFIG.OPERATIONS.STATUSES,
+ loading: false,
+ });
});
});
diff --git a/spec/frontend/vue_shared/alert_details/alert_status_spec.js b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
index a866fc13539..c532f688cbd 100644
--- a/spec/frontend/vue_shared/alert_details/alert_status_spec.js
+++ b/spec/frontend/vue_shared/alert_details/alert_status_spec.js
@@ -12,6 +12,7 @@ describe('AlertManagementStatus', () => {
let wrapper;
const findStatusDropdown = () => wrapper.find(GlDropdown);
const findFirstStatusOption = () => findStatusDropdown().find(GlDropdownItem);
+ const findAllStatusOptions = () => findStatusDropdown().findAll(GlDropdownItem);
const selectFirstStatusOption = () => {
findFirstStatusOption().vm.$emit('click');
@@ -131,6 +132,24 @@ describe('AlertManagementStatus', () => {
});
});
+ describe('Statuses', () => {
+ it('renders default translated statuses', () => {
+ mountComponent({});
+ expect(findAllStatusOptions().length).toBe(3);
+ expect(findFirstStatusOption().text()).toBe('Triggered');
+ });
+
+ it('renders translated statuses', () => {
+ const status = 'TEST';
+ const translatedStatus = 'Test';
+ mountComponent({
+ props: { alert: { ...mockAlert, status }, statuses: { [status]: translatedStatus } },
+ });
+ expect(findAllStatusOptions().length).toBe(1);
+ expect(findFirstStatusOption().text()).toBe(translatedStatus);
+ });
+ });
+
describe('Snowplow tracking', () => {
beforeEach(() => {
jest.spyOn(Tracking, 'event');
diff --git a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
index 70cf2597963..ef75e038bff 100644
--- a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_spec.js
@@ -76,20 +76,4 @@ describe('Alert Details Sidebar', () => {
expect(wrapper.find(SidebarStatus).exists()).toBe(true);
});
});
-
- describe('the sidebar renders for threat monitoring', () => {
- beforeEach(() => {
- mock = new MockAdapter(axios);
- mountComponent();
- });
-
- it('should not render side bar status dropdown', () => {
- mountComponent({
- mountMethod: mount,
- alert: mockAlert,
- provide: { isThreatMonitoringPage: true },
- });
- expect(wrapper.find(SidebarStatus).exists()).toBe(false);
- });
- });
});
diff --git a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
index f5b9efb4d98..0014957517f 100644
--- a/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
+++ b/spec/frontend/vue_shared/alert_details/sidebar/alert_sidebar_status_spec.js
@@ -1,7 +1,9 @@
import { GlDropdown, GlDropdownItem, GlLoadingIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import updateAlertStatusMutation from '~/graphql_shared/mutations/alert_status_update.mutation.graphql';
+import AlertStatus from '~/vue_shared/alert_details/components/alert_status.vue';
import AlertSidebarStatus from '~/vue_shared/alert_details/components/sidebar/sidebar_status.vue';
+import { PAGE_CONFIG } from '~/vue_shared/alert_details/constants';
import mockAlerts from '../mocks/alerts.json';
const mockAlert = mockAlerts[0];
@@ -12,8 +14,16 @@ describe('Alert Details Sidebar Status', () => {
const findStatusDropdownItem = () => wrapper.find(GlDropdownItem);
const findStatusLoadingIcon = () => wrapper.find(GlLoadingIcon);
const findStatusDropdownHeader = () => wrapper.find('[data-testid="dropdown-header"]');
+ const findAlertStatus = () => wrapper.findComponent(AlertStatus);
+ const findStatus = () => wrapper.find('[data-testid="status"]');
- function mountComponent({ data, sidebarCollapsed = true, loading = false, stubs = {} } = {}) {
+ function mountComponent({
+ data,
+ sidebarCollapsed = true,
+ loading = false,
+ stubs = {},
+ provide = {},
+ } = {}) {
wrapper = mount(AlertSidebarStatus, {
propsData: {
alert: { ...mockAlert },
@@ -32,6 +42,7 @@ describe('Alert Details Sidebar Status', () => {
},
},
stubs,
+ provide,
});
}
@@ -96,8 +107,24 @@ describe('Alert Details Sidebar Status', () => {
jest.spyOn(wrapper.vm.$apollo, 'mutate').mockReturnValue(Promise.reject(new Error()));
findStatusDropdownItem().vm.$emit('click');
expect(findStatusLoadingIcon().exists()).toBe(false);
- expect(wrapper.find('[data-testid="status"]').text()).toBe('Triggered');
+ expect(findStatus().text()).toBe('Triggered');
});
});
});
+
+ describe('Statuses', () => {
+ it('renders default translated statuses', () => {
+ mountComponent({});
+ expect(findAlertStatus().props('statuses')).toBe(PAGE_CONFIG.OPERATIONS.STATUSES);
+ expect(findStatus().text()).toBe('Triggered');
+ });
+
+ it('renders translated statuses', () => {
+ const status = 'TEST';
+ const statuses = { [status]: 'Test' };
+ mountComponent({ data: { alert: { ...mockAlert, status } }, provide: { statuses } });
+ expect(findAlertStatus().props('statuses')).toBe(statuses);
+ expect(findStatus().text()).toBe(statuses.TEST);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
index 1bf757ea312..bab928318ce 100644
--- a/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
+++ b/spec/frontend/vue_shared/components/__snapshots__/clone_dropdown_spec.js.snap
@@ -40,6 +40,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
+ aria-label="Copy URL"
buttontextclasses=""
category="primary"
class="d-inline-flex"
@@ -82,6 +83,7 @@ exports[`Clone Dropdown Button rendering matches the snapshot 1`] = `
tag="div"
>
<gl-button-stub
+ aria-label="Copy URL"
buttontextclasses=""
category="primary"
class="d-inline-flex"
diff --git a/spec/frontend/vue_shared/components/alert_details_table_spec.js b/spec/frontend/vue_shared/components/alert_details_table_spec.js
index 49b82cb4d4e..03b04a92bdf 100644
--- a/spec/frontend/vue_shared/components/alert_details_table_spec.js
+++ b/spec/frontend/vue_shared/components/alert_details_table_spec.js
@@ -75,45 +75,62 @@ describe('AlertDetails', () => {
});
describe('with table data', () => {
- beforeEach(mountComponent);
-
- it('renders a table', () => {
- expect(findTableComponent().exists()).toBe(true);
- });
-
- it('renders a cell based on alert data', () => {
- expect(findTableComponent().text()).toContain('SyntaxError: Invalid or unexpected token');
- });
-
- it('should show allowed alert fields', () => {
- const fields = findTableKeys();
-
- expect(findTableField(fields, 'Iid').exists()).toBe(true);
- expect(findTableField(fields, 'Title').exists()).toBe(true);
- expect(findTableField(fields, 'Severity').exists()).toBe(true);
- expect(findTableField(fields, 'Status').exists()).toBe(true);
- expect(findTableField(fields, 'Hosts').exists()).toBe(true);
- expect(findTableField(fields, 'Environment').exists()).toBe(true);
+ describe('default', () => {
+ beforeEach(mountComponent);
+
+ it('renders a table', () => {
+ expect(findTableComponent().exists()).toBe(true);
+ });
+
+ it('renders a cell based on alert data', () => {
+ expect(findTableComponent().text()).toContain('SyntaxError: Invalid or unexpected token');
+ });
+
+ it('should show allowed alert fields', () => {
+ const fields = findTableKeys();
+ ['Iid', 'Title', 'Severity', 'Status', 'Hosts', 'Environment'].forEach((field) => {
+ expect(findTableField(fields, field).exists()).toBe(true);
+ });
+ });
+
+ it('should not show disallowed alert fields', () => {
+ const fields = findTableKeys();
+ ['Typename', 'Todos', 'Notes', 'Assignees'].forEach((field) => {
+ expect(findTableField(fields, field).exists()).toBe(false);
+ });
+ });
});
- it('should not show disallowed alert fields', () => {
- const fields = findTableKeys();
+ describe('environment', () => {
+ it('should display only the name for the environment', () => {
+ mountComponent();
+ expect(findTableFieldValueByKey('Environment').text()).toBe(environmentName);
+ });
- expect(findTableField(fields, 'Typename').exists()).toBe(false);
- expect(findTableField(fields, 'Todos').exists()).toBe(false);
- expect(findTableField(fields, 'Notes').exists()).toBe(false);
- expect(findTableField(fields, 'Assignees').exists()).toBe(false);
- });
+ it('should not display the environment row if there is not data', () => {
+ environmentData = { name: null, path: null };
+ mountComponent();
- it('should display only the name for the environment', () => {
- expect(findTableFieldValueByKey('Environment').text()).toBe(environmentName);
+ expect(findTableFieldValueByKey('Environment').text()).toBeFalsy();
+ });
});
- it('should not display the environment row if there is not data', () => {
- environmentData = { name: null, path: null };
- mountComponent();
-
- expect(findTableFieldValueByKey('Environment').text()).toBeFalsy();
+ describe('status', () => {
+ it('should show the translated status for the default statuses', () => {
+ mountComponent();
+ expect(findTableFieldValueByKey('Status').text()).toBe('Triggered');
+ });
+
+ it('should show the translated status for provided statuses', () => {
+ const translatedStatus = 'Test';
+ mountComponent({ statuses: { TRIGGERED: translatedStatus } });
+ expect(findTableFieldValueByKey('Status').text()).toBe(translatedStatus);
+ });
+
+ it('should show the provided status if value is not defined in statuses', () => {
+ mountComponent({ statuses: {} });
+ expect(findTableFieldValueByKey('Status').text()).toBe('TRIGGERED');
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap b/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
index 023895099b1..06753044e93 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
+++ b/spec/frontend/vue_shared/components/blob_viewers/__snapshots__/simple_viewer_spec.js.snap
@@ -1,87 +1,88 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`Blob Simple Viewer component rendering matches the snapshot 1`] = `
-<div
- class="file-content code js-syntax-highlight"
- data-qa-selector="file_content"
->
+<div>
<div
- class="line-numbers"
+ class="file-content code js-syntax-highlight"
>
- <a
- class="diff-line-num js-line-number"
- data-line-number="1"
- href="#LC1"
- id="L1"
+ <div
+ class="line-numbers"
>
- <gl-icon-stub
- name="link"
- size="12"
- />
+ <a
+ class="diff-line-num js-line-number"
+ data-line-number="1"
+ href="#LC1"
+ id="L1"
+ >
+ <gl-icon-stub
+ name="link"
+ size="12"
+ />
+
+ 1
- 1
-
- </a>
- <a
- class="diff-line-num js-line-number"
- data-line-number="2"
- href="#LC2"
- id="L2"
- >
- <gl-icon-stub
- name="link"
- size="12"
- />
+ </a>
+ <a
+ class="diff-line-num js-line-number"
+ data-line-number="2"
+ href="#LC2"
+ id="L2"
+ >
+ <gl-icon-stub
+ name="link"
+ size="12"
+ />
+
+ 2
- 2
-
- </a>
- <a
- class="diff-line-num js-line-number"
- data-line-number="3"
- href="#LC3"
- id="L3"
- >
- <gl-icon-stub
- name="link"
- size="12"
- />
+ </a>
+ <a
+ class="diff-line-num js-line-number"
+ data-line-number="3"
+ href="#LC3"
+ id="L3"
+ >
+ <gl-icon-stub
+ name="link"
+ size="12"
+ />
+
+ 3
- 3
-
- </a>
- </div>
-
- <div
- class="blob-content"
- >
- <pre
- class="code highlight"
+ </a>
+ </div>
+
+ <div
+ class="blob-content"
>
- <code
- data-blob-hash="foo-bar"
+ <pre
+ class="code highlight"
>
- <span
- id="LC1"
+ <code
+ data-blob-hash="foo-bar"
>
- First
- </span>
-
+ <span
+ id="LC1"
+ >
+ First
+ </span>
+
- <span
- id="LC2"
- >
- Second
- </span>
-
+ <span
+ id="LC2"
+ >
+ Second
+ </span>
+
- <span
- id="LC3"
- >
- Third
- </span>
- </code>
- </pre>
+ <span
+ id="LC3"
+ >
+ Third
+ </span>
+ </code>
+ </pre>
+ </div>
</div>
</div>
`;
diff --git a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
index 9a0616343fe..46d4edad891 100644
--- a/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/blob_viewers/simple_viewer_spec.js
@@ -1,20 +1,31 @@
import { shallowMount } from '@vue/test-utils';
+import waitForPromises from 'helpers/wait_for_promises';
import { HIGHLIGHT_CLASS_NAME } from '~/vue_shared/components/blob_viewers/constants';
import SimpleViewer from '~/vue_shared/components/blob_viewers/simple_viewer.vue';
+import EditorLite from '~/vue_shared/components/editor_lite.vue';
describe('Blob Simple Viewer component', () => {
let wrapper;
const contentMock = `<span id="LC1">First</span>\n<span id="LC2">Second</span>\n<span id="LC3">Third</span>`;
const blobHash = 'foo-bar';
- function createComponent(content = contentMock) {
+ function createComponent(
+ content = contentMock,
+ isRawContent = false,
+ isRefactorFlagEnabled = false,
+ ) {
wrapper = shallowMount(SimpleViewer, {
provide: {
blobHash,
+ glFeatures: {
+ refactorBlobViewer: isRefactorFlagEnabled,
+ },
},
propsData: {
content,
type: 'text',
+ fileName: 'test.js',
+ isRawContent,
},
});
}
@@ -83,4 +94,32 @@ describe('Blob Simple Viewer component', () => {
});
});
});
+
+ describe('Vue refactoring to use Source Editor', () => {
+ const findEditorLite = () => wrapper.find(EditorLite);
+
+ it.each`
+ doesRender | condition | isRawContent | isRefactorFlagEnabled
+ ${'Does not'} | ${'rawContent is not specified'} | ${false} | ${true}
+ ${'Does not'} | ${'feature flag is disabled is not specified'} | ${true} | ${false}
+ ${'Does not'} | ${'both, the FF and rawContent are not specified'} | ${false} | ${false}
+ ${'Does'} | ${'both, the FF and rawContent are specified'} | ${true} | ${true}
+ `(
+ '$doesRender render Editor Lite component in readonly mode when $condition',
+ async ({ isRawContent, isRefactorFlagEnabled } = {}) => {
+ createComponent('raw content', isRawContent, isRefactorFlagEnabled);
+ await waitForPromises();
+
+ if (isRawContent && isRefactorFlagEnabled) {
+ expect(findEditorLite().exists()).toBe(true);
+
+ expect(findEditorLite().props('value')).toBe('raw content');
+ expect(findEditorLite().props('fileName')).toBe('test.js');
+ expect(findEditorLite().props('editorOptions')).toEqual({ readOnly: true });
+ } else {
+ expect(findEditorLite().exists()).toBe(false);
+ }
+ },
+ );
+ });
});
diff --git a/spec/frontend/vue_shared/components/delete_label_modal_spec.js b/spec/frontend/vue_shared/components/delete_label_modal_spec.js
new file mode 100644
index 00000000000..3905690dab4
--- /dev/null
+++ b/spec/frontend/vue_shared/components/delete_label_modal_spec.js
@@ -0,0 +1,64 @@
+import { GlModal } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { stubComponent } from 'helpers/stub_component';
+import { TEST_HOST } from 'helpers/test_constants';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import DeleteLabelModal from '~/vue_shared/components/delete_label_modal.vue';
+
+const MOCK_MODAL_DATA = {
+ labelName: 'label 1',
+ subjectName: 'GitLab Org',
+ destroyPath: `${TEST_HOST}/1`,
+};
+
+describe('vue_shared/components/delete_label_modal', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = extendedWrapper(
+ mount(DeleteLabelModal, {
+ propsData: {
+ selector: '.js-test-btn',
+ },
+ stubs: {
+ GlModal: stubComponent(GlModal, {
+ template:
+ '<div><slot name="modal-title"></slot><slot></slot><slot name="modal-footer"></slot></div>',
+ }),
+ },
+ }),
+ );
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findModal = () => wrapper.find(GlModal);
+ const findPrimaryModalButton = () => wrapper.findByTestId('delete-button');
+
+ describe('template', () => {
+ describe('when modal data is set', () => {
+ beforeEach(() => {
+ createComponent();
+ wrapper.vm.labelName = MOCK_MODAL_DATA.labelName;
+ wrapper.vm.subjectName = MOCK_MODAL_DATA.subjectName;
+ wrapper.vm.destroyPath = MOCK_MODAL_DATA.destroyPath;
+ });
+
+ it('renders GlModal', () => {
+ expect(findModal().exists()).toBe(true);
+ });
+
+ it('displays the label name and subject name', () => {
+ expect(findModal().text()).toContain(
+ `${MOCK_MODAL_DATA.labelName} will be permanently deleted from ${MOCK_MODAL_DATA.subjectName}. This cannot be undone`,
+ );
+ });
+
+ it('passes the destroyPath to the button', () => {
+ expect(findPrimaryModalButton().attributes('href')).toBe(MOCK_MODAL_DATA.destroyPath);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/deprecated_modal_spec.js b/spec/frontend/vue_shared/components/deprecated_modal_spec.js
deleted file mode 100644
index b9793ce2d80..00000000000
--- a/spec/frontend/vue_shared/components/deprecated_modal_spec.js
+++ /dev/null
@@ -1,73 +0,0 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import DeprecatedModal from '~/vue_shared/components/deprecated_modal.vue';
-
-const modalComponent = Vue.extend(DeprecatedModal);
-
-describe('DeprecatedModal', () => {
- let vm;
-
- afterEach(() => {
- vm.$destroy();
- });
-
- describe('props', () => {
- describe('without primaryButtonLabel', () => {
- beforeEach(() => {
- vm = mountComponent(modalComponent, {
- primaryButtonLabel: null,
- });
- });
-
- it('does not render a primary button', () => {
- expect(vm.$el.querySelector('.js-primary-button')).toBeNull();
- });
- });
-
- describe('with id', () => {
- describe('does not render a primary button', () => {
- beforeEach(() => {
- vm = mountComponent(modalComponent, {
- id: 'my-modal',
- });
- });
-
- it('assigns the id to the modal', () => {
- expect(vm.$el.querySelector('#my-modal.modal')).not.toBeNull();
- });
-
- it('does not show the modal immediately', () => {
- expect(vm.$el.querySelector('#my-modal.modal')).not.toHaveClass('show');
- });
-
- it('does not show a backdrop', () => {
- expect(vm.$el.querySelector('modal-backdrop')).toBeNull();
- });
- });
- });
-
- it('works with data-toggle="modal"', () => {
- setFixtures(`
- <button id="modal-button" data-toggle="modal" data-target="#my-modal"></button>
- <div id="modal-container"></div>
- `);
-
- const modalContainer = document.getElementById('modal-container');
- const modalButton = document.getElementById('modal-button');
- vm = mountComponent(
- modalComponent,
- {
- id: 'my-modal',
- },
- modalContainer,
- );
- const modalElement = vm.$el.querySelector('#my-modal');
-
- expect(modalElement).not.toHaveClass('show');
-
- modalButton.click();
-
- expect(modalElement).toHaveClass('show');
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/ensure_data_spec.js b/spec/frontend/vue_shared/components/ensure_data_spec.js
new file mode 100644
index 00000000000..eef8b452f5f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/ensure_data_spec.js
@@ -0,0 +1,145 @@
+import { GlEmptyState } from '@gitlab/ui';
+import * as Sentry from '@sentry/browser';
+import { mount } from '@vue/test-utils';
+import ensureData from '~/ensure_data';
+
+const mockData = { message: 'Hello there' };
+const defaultOptions = {
+ parseData: () => mockData,
+ data: mockData,
+};
+
+const MockChildComponent = {
+ inject: ['message'],
+ render(createElement) {
+ return createElement('h1', this.message);
+ },
+};
+
+const MockParentComponent = {
+ components: {
+ MockChildComponent,
+ },
+ props: {
+ message: {
+ type: String,
+ required: true,
+ },
+ otherProp: {
+ type: Boolean,
+ default: false,
+ required: false,
+ },
+ },
+ render(createElement) {
+ return createElement('div', [this.message, createElement(MockChildComponent)]);
+ },
+};
+
+describe('EnsureData', () => {
+ let wrapper;
+
+ function findEmptyState() {
+ return wrapper.findComponent(GlEmptyState);
+ }
+
+ function findChild() {
+ return wrapper.findComponent(MockChildComponent);
+ }
+ function findParent() {
+ return wrapper.findComponent(MockParentComponent);
+ }
+
+ function createComponent(options = defaultOptions) {
+ return mount(ensureData(MockParentComponent, options));
+ }
+
+ beforeEach(() => {
+ Sentry.captureException = jest.fn();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ Sentry.captureException.mockClear();
+ });
+
+ describe('when parseData throws', () => {
+ it('should render GlEmptyState', () => {
+ wrapper = createComponent({
+ parseData: () => {
+ throw new Error();
+ },
+ });
+
+ expect(findParent().exists()).toBe(false);
+ expect(findChild().exists()).toBe(false);
+ expect(findEmptyState().exists()).toBe(true);
+ });
+
+ it('should not log to Sentry when shouldLog=false (default)', () => {
+ wrapper = createComponent({
+ parseData: () => {
+ throw new Error();
+ },
+ });
+
+ expect(Sentry.captureException).not.toHaveBeenCalled();
+ });
+
+ it('should log to Sentry when shouldLog=true', () => {
+ const error = new Error('Error!');
+ wrapper = createComponent({
+ parseData: () => {
+ throw error;
+ },
+ shouldLog: true,
+ });
+
+ expect(Sentry.captureException).toHaveBeenCalledWith(error);
+ });
+ });
+
+ describe('when parseData succeeds', () => {
+ it('should render MockParentComponent and MockChildComponent', () => {
+ wrapper = createComponent();
+
+ expect(findEmptyState().exists()).toBe(false);
+ expect(findParent().exists()).toBe(true);
+ expect(findChild().exists()).toBe(true);
+ });
+
+ it('enables user to provide data to child components', () => {
+ wrapper = createComponent();
+
+ const childComponent = findChild();
+ expect(childComponent.text()).toBe(mockData.message);
+ });
+
+ it('enables user to override provide data', () => {
+ const message = 'Another message';
+ wrapper = createComponent({ ...defaultOptions, provide: { message } });
+
+ const childComponent = findChild();
+ expect(childComponent.text()).toBe(message);
+ });
+
+ it('enables user to pass props to parent component', () => {
+ wrapper = createComponent();
+
+ expect(findParent().props()).toMatchObject(mockData);
+ });
+
+ it('enables user to override props data', () => {
+ const props = { message: 'Another message', otherProp: true };
+ wrapper = createComponent({ ...defaultOptions, props });
+
+ expect(findParent().props()).toMatchObject(props);
+ });
+
+ it('should not log to Sentry when shouldLog=true', () => {
+ wrapper = createComponent({ ...defaultOptions, shouldLog: true });
+
+ expect(Sentry.captureException).not.toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
index 7606b3bd91c..c24528ba4d2 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/mock_data.js
@@ -3,6 +3,8 @@ import { mockLabels } from 'jest/vue_shared/components/sidebar/labels_select_vue
import Api from '~/api';
import AuthorToken from '~/vue_shared/components/filtered_search_bar/tokens/author_token.vue';
import BranchToken from '~/vue_shared/components/filtered_search_bar/tokens/branch_token.vue';
+import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
+import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue';
import LabelToken from '~/vue_shared/components/filtered_search_bar/tokens/label_token.vue';
import MilestoneToken from '~/vue_shared/components/filtered_search_bar/tokens/milestone_token.vue';
@@ -59,6 +61,21 @@ export const mockMilestones = [
mockEscapedMilestone,
];
+export const mockEpics = [
+ { iid: 1, id: 1, title: 'Foo' },
+ { iid: 2, id: 2, title: 'Bar' },
+];
+
+export const mockEmoji1 = {
+ name: 'thumbsup',
+};
+
+export const mockEmoji2 = {
+ name: 'star',
+};
+
+export const mockEmojis = [mockEmoji1, mockEmoji2];
+
export const mockBranchToken = {
type: 'source_branch',
icon: 'branch',
@@ -103,6 +120,28 @@ export const mockMilestoneToken = {
fetchMilestones: () => Promise.resolve({ data: mockMilestones }),
};
+export const mockEpicToken = {
+ type: 'epic_iid',
+ icon: 'clock',
+ title: 'Epic',
+ unique: true,
+ symbol: '&',
+ token: EpicToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchEpics: () => Promise.resolve({ data: mockEpics }),
+ fetchSingleEpic: () => Promise.resolve({ data: mockEpics[0] }),
+};
+
+export const mockReactionEmojiToken = {
+ type: 'my_reaction_emoji',
+ icon: 'thumb-up',
+ title: 'My-Reaction',
+ unique: true,
+ token: EmojiToken,
+ operators: [{ value: '=', description: 'is', default: 'true' }],
+ fetchEmojis: () => Promise.resolve(mockEmojis),
+};
+
export const mockMembershipToken = {
type: 'with_inherited_permissions',
icon: 'group',
@@ -168,6 +207,14 @@ export const tokenValuePlain = {
value: { data: 'foo' },
};
+export const tokenValueEpic = {
+ type: 'epic_iid',
+ value: {
+ operator: '=',
+ data: '"foo"::&42',
+ },
+};
+
export const mockHistoryItems = [
[tokenValueAuthor, tokenValueLabel, tokenValueMilestone, 'duo'],
[tokenValueAuthor, 'si'],
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
new file mode 100644
index 00000000000..231f2f01428
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/emoji_token_spec.js
@@ -0,0 +1,217 @@
+import {
+ GlFilteredSearchToken,
+ GlFilteredSearchSuggestion,
+ GlFilteredSearchTokenSegment,
+ GlDropdownDivider,
+} from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import { deprecatedCreateFlash as createFlash } from '~/flash';
+import axios from '~/lib/utils/axios_utils';
+
+import {
+ DEFAULT_LABEL_NONE,
+ DEFAULT_LABEL_ANY,
+} from '~/vue_shared/components/filtered_search_bar/constants';
+import EmojiToken from '~/vue_shared/components/filtered_search_bar/tokens/emoji_token.vue';
+
+import { mockReactionEmojiToken, mockEmojis } from '../mock_data';
+
+jest.mock('~/flash');
+const GlEmoji = { template: '<img/>' };
+const defaultStubs = {
+ Portal: true,
+ GlFilteredSearchSuggestionList: {
+ template: '<div></div>',
+ methods: {
+ getValue: () => '=',
+ },
+ },
+ GlEmoji,
+};
+
+function createComponent(options = {}) {
+ const {
+ config = mockReactionEmojiToken,
+ value = { data: '' },
+ active = false,
+ stubs = defaultStubs,
+ } = options;
+ return mount(EmojiToken, {
+ propsData: {
+ config,
+ value,
+ active,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: 'custom-class',
+ },
+ stubs,
+ });
+}
+
+describe('EmojiToken', () => {
+ let mock;
+ let wrapper;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({ value: { data: mockEmojis[0].name } });
+
+ wrapper.setData({
+ emojis: mockEmojis,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ describe('currentValue', () => {
+ it('returns lowercase string for `value.data`', () => {
+ expect(wrapper.vm.currentValue).toBe(mockEmojis[0].name);
+ });
+ });
+
+ describe('activeEmoji', () => {
+ it('returns object for currently present `value.data`', () => {
+ expect(wrapper.vm.activeEmoji).toEqual(mockEmojis[0]);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ beforeEach(() => {
+ wrapper = createComponent();
+ });
+
+ describe('fetchEmojiBySearchTerm', () => {
+ it('calls `config.fetchEmojis` with provided searchTerm param', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEmojis');
+
+ wrapper.vm.fetchEmojiBySearchTerm('foo');
+
+ expect(wrapper.vm.config.fetchEmojis).toHaveBeenCalledWith('foo');
+ });
+
+ it('sets response to `emojis` when request is successful', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEmojis').mockResolvedValue(mockEmojis);
+
+ wrapper.vm.fetchEmojiBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(wrapper.vm.emojis).toEqual(mockEmojis);
+ });
+ });
+
+ it('calls `createFlash` with flash error message when request fails', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEmojis').mockRejectedValue({});
+
+ wrapper.vm.fetchEmojiBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(createFlash).toHaveBeenCalledWith('There was a problem fetching emojis.');
+ });
+ });
+
+ it('sets `loading` to false when request completes', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEmojis').mockRejectedValue({});
+
+ wrapper.vm.fetchEmojiBySearchTerm('foo');
+
+ return waitForPromises().then(() => {
+ expect(wrapper.vm.loading).toBe(false);
+ });
+ });
+ });
+ });
+
+ describe('template', () => {
+ const defaultEmojis = [DEFAULT_LABEL_NONE, DEFAULT_LABEL_ANY];
+
+ beforeEach(async () => {
+ wrapper = createComponent({
+ value: { data: `"${mockEmojis[0].name}"` },
+ });
+
+ wrapper.setData({
+ emojis: mockEmojis,
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders gl-filtered-search-token component', () => {
+ expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ });
+
+ it('renders token item when value is selected', () => {
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3); // My Reaction, =, "thumbsup"
+ expect(tokenSegments.at(2).find(GlEmoji).attributes('data-name')).toEqual('thumbsup');
+ });
+
+ it('renders provided defaultEmojis as suggestions', async () => {
+ wrapper = createComponent({
+ active: true,
+ config: { ...mockReactionEmojiToken, defaultEmojis },
+ stubs: { Portal: true, GlEmoji },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+ await wrapper.vm.$nextTick();
+
+ const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+
+ expect(suggestions).toHaveLength(defaultEmojis.length);
+ defaultEmojis.forEach((emoji, index) => {
+ expect(suggestions.at(index).text()).toBe(emoji.text);
+ });
+ });
+
+ it('does not render divider when no defaultEmojis', async () => {
+ wrapper = createComponent({
+ active: true,
+ config: { ...mockReactionEmojiToken, defaultEmojis: [] },
+ stubs: { Portal: true, GlEmoji },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.find(GlFilteredSearchSuggestion).exists()).toBe(false);
+ expect(wrapper.find(GlDropdownDivider).exists()).toBe(false);
+ });
+
+ it('renders `DEFAULT_LABEL_NONE` and `DEFAULT_LABEL_ANY` as default suggestions', async () => {
+ wrapper = createComponent({
+ active: true,
+ config: { ...mockReactionEmojiToken },
+ stubs: { Portal: true, GlEmoji },
+ });
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+ const suggestionsSegment = tokenSegments.at(2);
+ suggestionsSegment.vm.$emit('activate');
+ await wrapper.vm.$nextTick();
+
+ const suggestions = wrapper.findAll(GlFilteredSearchSuggestion);
+
+ expect(suggestions).toHaveLength(2);
+ expect(suggestions.at(0).text()).toBe(DEFAULT_LABEL_NONE.text);
+ expect(suggestions.at(1).text()).toBe(DEFAULT_LABEL_ANY.text);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
new file mode 100644
index 00000000000..0c3f9e1363f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/epic_token_spec.js
@@ -0,0 +1,180 @@
+import { GlFilteredSearchToken, GlFilteredSearchTokenSegment } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import MockAdapter from 'axios-mock-adapter';
+import waitForPromises from 'helpers/wait_for_promises';
+import createFlash from '~/flash';
+import axios from '~/lib/utils/axios_utils';
+
+import EpicToken from '~/vue_shared/components/filtered_search_bar/tokens/epic_token.vue';
+
+import { mockEpicToken, mockEpics } from '../mock_data';
+
+jest.mock('~/flash');
+
+const defaultStubs = {
+ Portal: true,
+ GlFilteredSearchSuggestionList: {
+ template: '<div></div>',
+ methods: {
+ getValue: () => '=',
+ },
+ },
+};
+
+function createComponent(options = {}) {
+ const {
+ config = mockEpicToken,
+ value = { data: '' },
+ active = false,
+ stubs = defaultStubs,
+ } = options;
+ return mount(EpicToken, {
+ propsData: {
+ config,
+ value,
+ active,
+ },
+ provide: {
+ portalName: 'fake target',
+ alignSuggestions: function fakeAlignSuggestions() {},
+ suggestionsListClass: 'custom-class',
+ },
+ stubs,
+ });
+}
+
+describe('EpicToken', () => {
+ let mock;
+ let wrapper;
+
+ beforeEach(() => {
+ mock = new MockAdapter(axios);
+ wrapper = createComponent();
+ });
+
+ afterEach(() => {
+ mock.restore();
+ wrapper.destroy();
+ });
+
+ describe('computed', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({
+ data: {
+ epics: mockEpics,
+ },
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ describe('currentValue', () => {
+ it.each`
+ data | id
+ ${`${mockEpics[0].title}::&${mockEpics[0].iid}`} | ${mockEpics[0].iid}
+ ${mockEpics[0].iid} | ${mockEpics[0].iid}
+ ${'foobar'} | ${'foobar'}
+ `('$data returns $id', async ({ data, id }) => {
+ wrapper.setProps({ value: { data } });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.currentValue).toBe(id);
+ });
+ });
+
+ describe('activeEpic', () => {
+ it('returns object for currently present `value.data`', async () => {
+ wrapper.setProps({
+ value: { data: `${mockEpics[0].iid}` },
+ });
+
+ await wrapper.vm.$nextTick();
+
+ expect(wrapper.vm.activeEpic).toEqual(mockEpics[0]);
+ });
+ });
+ });
+
+ describe('methods', () => {
+ describe('fetchEpicsBySearchTerm', () => {
+ it('calls `config.fetchEpics` with provided searchTerm param', () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEpics');
+
+ wrapper.vm.fetchEpicsBySearchTerm('foo');
+
+ expect(wrapper.vm.config.fetchEpics).toHaveBeenCalledWith('foo');
+ });
+
+ it('sets response to `epics` when request is successful', async () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEpics').mockResolvedValue({
+ data: mockEpics,
+ });
+
+ wrapper.vm.fetchEpicsBySearchTerm();
+
+ await waitForPromises();
+
+ expect(wrapper.vm.epics).toEqual(mockEpics);
+ });
+
+ it('calls `createFlash` with flash error message when request fails', async () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEpics').mockRejectedValue({});
+
+ wrapper.vm.fetchEpicsBySearchTerm('foo');
+
+ await waitForPromises();
+
+ expect(createFlash).toHaveBeenCalledWith({
+ message: 'There was a problem fetching epics.',
+ });
+ });
+
+ it('sets `loading` to false when request completes', async () => {
+ jest.spyOn(wrapper.vm.config, 'fetchEpics').mockRejectedValue({});
+
+ wrapper.vm.fetchEpicsBySearchTerm('foo');
+
+ await waitForPromises();
+
+ expect(wrapper.vm.loading).toBe(false);
+ });
+ });
+
+ describe('fetchSingleEpic', () => {
+ it('calls `config.fetchSingleEpic` with provided iid param', async () => {
+ jest.spyOn(wrapper.vm.config, 'fetchSingleEpic');
+
+ wrapper.vm.fetchSingleEpic(1);
+
+ expect(wrapper.vm.config.fetchSingleEpic).toHaveBeenCalledWith(1);
+
+ await waitForPromises();
+
+ expect(wrapper.vm.epics).toEqual([mockEpics[0]]);
+ });
+ });
+ });
+
+ describe('template', () => {
+ beforeEach(async () => {
+ wrapper = createComponent({
+ value: { data: `${mockEpics[0].iid}` },
+ data: { epics: mockEpics },
+ });
+
+ await wrapper.vm.$nextTick();
+ });
+
+ it('renders gl-filtered-search-token component', () => {
+ expect(wrapper.find(GlFilteredSearchToken).exists()).toBe(true);
+ });
+
+ it('renders token item when value is selected', () => {
+ const tokenSegments = wrapper.findAll(GlFilteredSearchTokenSegment);
+
+ expect(tokenSegments).toHaveLength(3);
+ expect(tokenSegments.at(2).text()).toBe(`${mockEpics[0].title}::&${mockEpics[0].iid}`);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
index 7676ce10ce0..8528c062426 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/tokens/label_token_spec.js
@@ -118,6 +118,22 @@ describe('LabelToken', () => {
wrapper = createComponent();
});
+ describe('getLabelName', () => {
+ it('returns value of `name` or `title` property present in provided label param', () => {
+ let mockLabel = {
+ title: 'foo',
+ };
+
+ expect(wrapper.vm.getLabelName(mockLabel)).toBe(mockLabel.title);
+
+ mockLabel = {
+ name: 'foo',
+ };
+
+ expect(wrapper.vm.getLabelName(mockLabel)).toBe(mockLabel.name);
+ });
+ });
+
describe('fetchLabelBySearchTerm', () => {
it('calls `config.fetchLabels` with provided searchTerm param', () => {
jest.spyOn(wrapper.vm.config, 'fetchLabels');
diff --git a/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js b/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js
deleted file mode 100644
index ac670b622b1..00000000000
--- a/spec/frontend/vue_shared/components/gl_toggle_vuex_spec.js
+++ /dev/null
@@ -1,114 +0,0 @@
-import { GlToggle } from '@gitlab/ui';
-import { mount, createLocalVue } from '@vue/test-utils';
-import Vuex from 'vuex';
-import GlToggleVuex from '~/vue_shared/components/gl_toggle_vuex.vue';
-
-const localVue = createLocalVue();
-localVue.use(Vuex);
-
-describe('GlToggleVuex component', () => {
- let wrapper;
- let store;
-
- const findButton = () => wrapper.find('button');
-
- const createWrapper = (props = {}) => {
- wrapper = mount(GlToggleVuex, {
- localVue,
- store,
- propsData: {
- stateProperty: 'toggleState',
- ...props,
- },
- });
- };
-
- beforeEach(() => {
- store = new Vuex.Store({
- state: {
- toggleState: false,
- },
- actions: {
- setToggleState: ({ commit }, { key, value }) => commit('setToggleState', { key, value }),
- },
- mutations: {
- setToggleState: (state, { key, value }) => {
- state[key] = value;
- },
- },
- });
- createWrapper();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('renders gl-toggle', () => {
- expect(wrapper.find(GlToggle).exists()).toBe(true);
- });
-
- it('properly computes default value for setAction', () => {
- expect(wrapper.props('setAction')).toBe('setToggleState');
- });
-
- describe('without a store module', () => {
- it('calls action with new value when value changes', () => {
- jest.spyOn(store, 'dispatch');
-
- findButton().trigger('click');
- expect(store.dispatch).toHaveBeenCalledWith('setToggleState', {
- key: 'toggleState',
- value: true,
- });
- });
-
- it('updates store property when value changes', () => {
- findButton().trigger('click');
- expect(store.state.toggleState).toBe(true);
- });
- });
-
- describe('with a store module', () => {
- beforeEach(() => {
- store = new Vuex.Store({
- modules: {
- someModule: {
- namespaced: true,
- state: {
- toggleState: false,
- },
- actions: {
- setToggleState: ({ commit }, { key, value }) =>
- commit('setToggleState', { key, value }),
- },
- mutations: {
- setToggleState: (state, { key, value }) => {
- state[key] = value;
- },
- },
- },
- },
- });
-
- createWrapper({
- storeModule: 'someModule',
- });
- });
-
- it('calls action with new value when value changes', () => {
- jest.spyOn(store, 'dispatch');
-
- findButton().trigger('click');
- expect(store.dispatch).toHaveBeenCalledWith('someModule/setToggleState', {
- key: 'toggleState',
- value: true,
- });
- });
-
- it('updates store property when value changes', () => {
- findButton().trigger('click');
- expect(store.state.someModule.toggleState).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/vue_shared/components/help_popover_spec.js b/spec/frontend/vue_shared/components/help_popover_spec.js
index baf80a8a04e..30c6fa04032 100644
--- a/spec/frontend/vue_shared/components/help_popover_spec.js
+++ b/spec/frontend/vue_shared/components/help_popover_spec.js
@@ -27,7 +27,6 @@ describe('HelpPopover', () => {
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
it('renders a link button with an icon question', () => {
@@ -35,17 +34,12 @@ describe('HelpPopover', () => {
icon: 'question',
variant: 'link',
});
- expect(findQuestionButton().attributes().tabindex).toBe('0');
});
it('renders popover that uses the question button as target', () => {
expect(findPopover().props().target()).toBe(findQuestionButton().vm.$el);
});
- it('triggers popover on hover and focus', () => {
- expect(findPopover().props().triggers).toBe('hover focus');
- });
-
it('allows rendering title with HTML tags', () => {
expect(findPopover().find('strong').exists()).toBe(true);
});
@@ -54,6 +48,14 @@ describe('HelpPopover', () => {
expect(findPopover().find('b').exists()).toBe(true);
});
+ describe('without title', () => {
+ it('does not render title', () => {
+ buildWrapper({ title: null });
+
+ expect(findPopover().find('span').exists()).toBe(false);
+ });
+ });
+
it('binds other popover options to the popover instance', () => {
const placement = 'bottom';
diff --git a/spec/frontend/vue_shared/components/lib/utils/props_utils_spec.js b/spec/frontend/vue_shared/components/lib/utils/props_utils_spec.js
new file mode 100644
index 00000000000..f1c9fbb00c9
--- /dev/null
+++ b/spec/frontend/vue_shared/components/lib/utils/props_utils_spec.js
@@ -0,0 +1,91 @@
+import { propsUnion } from '~/vue_shared/components/lib/utils/props_utils';
+
+describe('propsUnion', () => {
+ const stringRequired = {
+ type: String,
+ required: true,
+ };
+
+ const stringOptional = {
+ type: String,
+ required: false,
+ };
+
+ const numberOptional = {
+ type: Number,
+ required: false,
+ };
+
+ const booleanRequired = {
+ type: Boolean,
+ required: true,
+ };
+
+ const FooComponent = {
+ props: { foo: stringRequired },
+ };
+
+ const BarComponent = {
+ props: { bar: numberOptional },
+ };
+
+ const FooBarComponent = {
+ props: {
+ foo: stringRequired,
+ bar: numberOptional,
+ },
+ };
+
+ const FooOptionalComponent = {
+ props: {
+ foo: stringOptional,
+ },
+ };
+
+ const QuxComponent = {
+ props: {
+ foo: booleanRequired,
+ qux: stringRequired,
+ },
+ };
+
+ it('returns an empty object given no components', () => {
+ expect(propsUnion([])).toEqual({});
+ });
+
+ it('merges non-overlapping props', () => {
+ expect(propsUnion([FooComponent, BarComponent])).toEqual({
+ ...FooComponent.props,
+ ...BarComponent.props,
+ });
+ });
+
+ it('merges overlapping props', () => {
+ expect(propsUnion([FooComponent, BarComponent, FooBarComponent])).toEqual({
+ ...FooComponent.props,
+ ...BarComponent.props,
+ ...FooBarComponent.props,
+ });
+ });
+
+ it.each`
+ components
+ ${[FooComponent, FooOptionalComponent]}
+ ${[FooOptionalComponent, FooComponent]}
+ `('prefers required props over non-required props', ({ components }) => {
+ expect(propsUnion(components)).toEqual(FooComponent.props);
+ });
+
+ it('throws if given props with conflicting types', () => {
+ expect(() => propsUnion([FooComponent, QuxComponent])).toThrow(/incompatible prop types/);
+ });
+
+ it.each`
+ components
+ ${[{ props: ['foo', 'bar'] }]}
+ ${[{ props: { foo: String, bar: Number } }]}
+ ${[{ props: { foo: {}, bar: {} } }]}
+ `('throw if given a non-verbose props object', ({ components }) => {
+ expect(() => propsUnion(components)).toThrow(/expected verbose prop/);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
index 5364e2d5f52..ba2450b56c9 100644
--- a/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/suggestion_diff_header_spec.js
@@ -1,5 +1,6 @@
import { GlLoadingIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import ApplySuggestion from '~/vue_shared/components/markdown/apply_suggestion.vue';
import SuggestionDiffHeader from '~/vue_shared/components/markdown/suggestion_diff_header.vue';
@@ -16,17 +17,14 @@ const DEFAULT_PROPS = {
describe('Suggestion Diff component', () => {
let wrapper;
- const createComponent = (props, glFeatures = {}) => {
+ const createComponent = (props) => {
wrapper = shallowMount(SuggestionDiffHeader, {
propsData: {
...DEFAULT_PROPS,
...props,
},
- provide: {
- glFeatures: {
- batchSuggestions: true,
- ...glFeatures,
- },
+ directives: {
+ GlTooltip: createMockDirective(),
},
});
};
@@ -211,18 +209,6 @@ describe('Suggestion Diff component', () => {
});
});
- describe('batchSuggestions feature flag is set to false', () => {
- beforeEach(() => {
- createComponent({}, { batchSuggestions: false });
- });
-
- it('disables add to batch buttons but keeps apply suggestion enabled', () => {
- expect(findApplyButton().exists()).toBe(true);
- expect(findAddToBatchButton().exists()).toBe(false);
- expect(findApplyButton().attributes('disabled')).not.toBe('true');
- });
- });
-
describe('canApply is set to false', () => {
beforeEach(() => {
createComponent({ canApply: false });
@@ -236,15 +222,23 @@ describe('Suggestion Diff component', () => {
});
describe('tooltip message for apply button', () => {
+ const findTooltip = () => getBinding(findApplyButton().element, 'gl-tooltip');
+
it('renders correct tooltip message when button is applicable', () => {
createComponent();
- expect(wrapper.vm.tooltipMessage).toBe('This also resolves this thread');
+ const tooltip = findTooltip();
+
+ expect(tooltip.modifiers.viewport).toBe(true);
+ expect(tooltip.value).toBe('This also resolves this thread');
});
it('renders the inapplicable reason in the tooltip when button is not applicable', () => {
const inapplicableReason = 'lorem';
createComponent({ canApply: false, inapplicableReason });
- expect(wrapper.vm.tooltipMessage).toBe(inapplicableReason);
+ const tooltip = findTooltip();
+
+ expect(tooltip.modifiers.viewport).toBe(true);
+ expect(tooltip.value).toBe(inapplicableReason);
});
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
index e7c31014bfc..eddc4033a65 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
@@ -1,35 +1,75 @@
-import Vue from 'vue';
-import mountComponent from 'helpers/vue_mount_component_helper';
-import toolbar from '~/vue_shared/components/markdown/toolbar.vue';
+import { mount } from '@vue/test-utils';
+import { isExperimentVariant } from '~/experimentation/utils';
+import InviteMembersTrigger from '~/invite_members/components/invite_members_trigger.vue';
+import { INVITE_MEMBERS_IN_COMMENT } from '~/invite_members/constants';
+import Toolbar from '~/vue_shared/components/markdown/toolbar.vue';
+
+jest.mock('~/experimentation/utils', () => ({ isExperimentVariant: jest.fn() }));
describe('toolbar', () => {
- let vm;
- const Toolbar = Vue.extend(toolbar);
- const props = {
- markdownDocsPath: '',
+ let wrapper;
+
+ const createMountedWrapper = (props = {}) => {
+ wrapper = mount(Toolbar, {
+ propsData: { markdownDocsPath: '', ...props },
+ stubs: { 'invite-members-trigger': true },
+ });
};
afterEach(() => {
- vm.$destroy();
+ wrapper.destroy();
+ isExperimentVariant.mockReset();
});
describe('user can attach file', () => {
beforeEach(() => {
- vm = mountComponent(Toolbar, props);
+ createMountedWrapper();
});
it('should render uploading-container', () => {
- expect(vm.$el.querySelector('.uploading-container')).not.toBeNull();
+ expect(wrapper.vm.$el.querySelector('.uploading-container')).not.toBeNull();
});
});
describe('user cannot attach file', () => {
beforeEach(() => {
- vm = mountComponent(Toolbar, { ...props, canAttachFile: false });
+ createMountedWrapper({ canAttachFile: false });
});
it('should not render uploading-container', () => {
- expect(vm.$el.querySelector('.uploading-container')).toBeNull();
+ expect(wrapper.vm.$el.querySelector('.uploading-container')).toBeNull();
+ });
+ });
+
+ describe('user can invite member', () => {
+ const findInviteLink = () => wrapper.find(InviteMembersTrigger);
+
+ beforeEach(() => {
+ isExperimentVariant.mockReturnValue(true);
+ createMountedWrapper();
+ });
+
+ it('should render the invite members trigger', () => {
+ expect(findInviteLink().exists()).toBe(true);
+ });
+
+ it('should have correct props', () => {
+ expect(findInviteLink().props().displayText).toBe('Invite Member');
+ expect(findInviteLink().props().trackExperiment).toBe(INVITE_MEMBERS_IN_COMMENT);
+ expect(findInviteLink().props().triggerSource).toBe(INVITE_MEMBERS_IN_COMMENT);
+ });
+ });
+
+ describe('user can not invite member', () => {
+ const findInviteLink = () => wrapper.find(InviteMembersTrigger);
+
+ beforeEach(() => {
+ isExperimentVariant.mockReturnValue(false);
+ createMountedWrapper();
+ });
+
+ it('should render the invite members trigger', () => {
+ expect(findInviteLink().exists()).toBe(false);
});
});
});
diff --git a/spec/frontend/vue_shared/components/recaptcha_eventhub_spec.js b/spec/frontend/vue_shared/components/recaptcha_eventhub_spec.js
deleted file mode 100644
index d86d627886f..00000000000
--- a/spec/frontend/vue_shared/components/recaptcha_eventhub_spec.js
+++ /dev/null
@@ -1,21 +0,0 @@
-import { eventHub, callbackName } from '~/vue_shared/components/recaptcha_eventhub';
-
-describe('reCAPTCHA event hub', () => {
- // the following test case currently crashes
- // see https://gitlab.com/gitlab-org/gitlab/issues/29192#note_217840035
- // eslint-disable-next-line jest/no-disabled-tests
- it.skip('throws an error for overriding the callback', () => {
- expect(() => {
- window[callbackName] = 'something';
- }).toThrow();
- });
-
- it('triggering callback emits a submit event', () => {
- const eventHandler = jest.fn();
- eventHub.$once('submit', eventHandler);
-
- window[callbackName]();
-
- expect(eventHandler).toHaveBeenCalled();
- });
-});
diff --git a/spec/frontend/vue_shared/components/recaptcha_modal_spec.js b/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
deleted file mode 100644
index 8ab65efd388..00000000000
--- a/spec/frontend/vue_shared/components/recaptcha_modal_spec.js
+++ /dev/null
@@ -1,35 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-
-import { eventHub } from '~/vue_shared/components/recaptcha_eventhub';
-
-import RecaptchaModal from '~/vue_shared/components/recaptcha_modal.vue';
-
-describe('RecaptchaModal', () => {
- const recaptchaFormId = 'recaptcha-form';
- const recaptchaHtml = `<form id="${recaptchaFormId}"></form>`;
-
- let wrapper;
-
- const findRecaptchaForm = () => wrapper.find(`#${recaptchaFormId}`).element;
-
- beforeEach(() => {
- wrapper = shallowMount(RecaptchaModal, {
- propsData: {
- html: recaptchaHtml,
- },
- });
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('submits the form if event hub emits submit event', () => {
- const form = findRecaptchaForm();
- jest.spyOn(form, 'submit').mockImplementation();
-
- eventHub.$emit('submit');
-
- expect(form.submit).toHaveBeenCalled();
- });
-});
diff --git a/spec/frontend/vue_shared/components/registry/registry_search_spec.js b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
index 28bdb275756..f5ef5b3d443 100644
--- a/spec/frontend/vue_shared/components/registry/registry_search_spec.js
+++ b/spec/frontend/vue_shared/components/registry/registry_search_spec.js
@@ -1,5 +1,6 @@
import { GlSorting, GlSortingItem, GlFilteredSearch } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import { FILTERED_SEARCH_TERM } from '~/packages_and_registries/shared/constants';
import component from '~/vue_shared/components/registry/registry_search.vue';
describe('Registry Search', () => {
@@ -12,8 +13,18 @@ describe('Registry Search', () => {
const defaultProps = {
filter: [],
sorting: { sort: 'asc', orderBy: 'name' },
- tokens: ['foo'],
- sortableFields: [{ label: 'name', orderBy: 'name' }, { label: 'baz' }],
+ tokens: [{ type: 'foo' }],
+ sortableFields: [
+ { label: 'name', orderBy: 'name' },
+ { label: 'baz', orderBy: 'bar' },
+ ],
+ };
+
+ const defaultQueryChangedPayload = {
+ foo: '',
+ orderBy: 'name',
+ search: [],
+ sort: 'asc',
};
const mountComponent = (propsData = defaultProps) => {
@@ -55,20 +66,22 @@ describe('Registry Search', () => {
expect(wrapper.emitted('filter:changed')).toEqual([['foo']]);
});
- it('emits filter:submit on submit event', () => {
+ it('emits filter:submit and query:changed on submit event', () => {
mountComponent();
findFilteredSearch().vm.$emit('submit');
expect(wrapper.emitted('filter:submit')).toEqual([[]]);
+ expect(wrapper.emitted('query:changed')).toEqual([[defaultQueryChangedPayload]]);
});
- it('emits filter:changed and filter:submit on clear event', () => {
+ it('emits filter:changed, filter:submit and query:changed on clear event', () => {
mountComponent();
findFilteredSearch().vm.$emit('clear');
expect(wrapper.emitted('filter:changed')).toEqual([[[]]]);
expect(wrapper.emitted('filter:submit')).toEqual([[]]);
+ expect(wrapper.emitted('query:changed')).toEqual([[defaultQueryChangedPayload]]);
});
it('binds tokens prop', () => {
@@ -90,15 +103,47 @@ describe('Registry Search', () => {
findPackageListSorting().vm.$emit('sortDirectionChange');
expect(wrapper.emitted('sorting:changed')).toEqual([[{ sort: 'desc' }]]);
+ expect(wrapper.emitted('query:changed')).toEqual([
+ [{ ...defaultQueryChangedPayload, sort: 'desc' }],
+ ]);
});
it('on sort item click emits sorting:changed event ', () => {
mountComponent();
- findSortingItems().at(0).vm.$emit('click');
+ findSortingItems().at(1).vm.$emit('click');
expect(wrapper.emitted('sorting:changed')).toEqual([
- [{ orderBy: defaultProps.sortableFields[0].orderBy }],
+ [{ orderBy: defaultProps.sortableFields[1].orderBy }],
+ ]);
+ expect(wrapper.emitted('query:changed')).toEqual([
+ [{ ...defaultQueryChangedPayload, orderBy: 'bar' }],
+ ]);
+ });
+ });
+
+ describe('query string calculation', () => {
+ const filter = [
+ { type: FILTERED_SEARCH_TERM, value: { data: 'one' } },
+ { type: FILTERED_SEARCH_TERM, value: { data: 'two' } },
+ { type: 'typeOne', value: { data: 'value_one' } },
+ { type: 'typeTwo', value: { data: 'value_two' } },
+ ];
+
+ it('aggregates the filter in the correct object', () => {
+ mountComponent({ ...defaultProps, filter });
+
+ findFilteredSearch().vm.$emit('submit');
+
+ expect(wrapper.emitted('query:changed')).toEqual([
+ [
+ {
+ ...defaultQueryChangedPayload,
+ search: ['one', 'two'],
+ typeOne: 'value_one',
+ typeTwo: 'value_two',
+ },
+ ],
]);
});
});
diff --git a/spec/frontend/vue_shared/components/remove_member_modal_spec.js b/spec/frontend/vue_shared/components/remove_member_modal_spec.js
index 78fe6d53eee..ce9de28d53c 100644
--- a/spec/frontend/vue_shared/components/remove_member_modal_spec.js
+++ b/spec/frontend/vue_shared/components/remove_member_modal_spec.js
@@ -1,13 +1,25 @@
-import { GlFormCheckbox, GlModal } from '@gitlab/ui';
+import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
+import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
import RemoveMemberModal from '~/vue_shared/components/remove_member_modal.vue';
+const mockSchedules = JSON.stringify({
+ schedules: [
+ {
+ id: 1,
+ name: 'Schedule 1',
+ },
+ ],
+ name: 'User1',
+});
+
describe('RemoveMemberModal', () => {
const memberPath = '/gitlab-org/gitlab-test/-/project_members/90';
let wrapper;
const findForm = () => wrapper.find({ ref: 'form' });
- const findGlModal = () => wrapper.find(GlModal);
+ const findGlModal = () => wrapper.findComponent(GlModal);
+ const findOnCallSchedulesList = () => wrapper.findComponent(OncallSchedulesList);
afterEach(() => {
wrapper.destroy();
@@ -15,26 +27,43 @@ describe('RemoveMemberModal', () => {
});
describe.each`
- state | isAccessRequest | actionText | checkboxTestDescription | checkboxExpected | message
- ${'removing a member'} | ${'false'} | ${'Remove member'} | ${'shows a checkbox to allow removal from related issues and MRs'} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'}
- ${'denying an access request'} | ${'true'} | ${'Deny access request'} | ${'does not show a checkbox'} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"}
+ state | memberType | isAccessRequest | isInvite | actionText | removeSubMembershipsCheckboxExpected | unassignIssuablesCheckboxExpected | message | onCallSchedules
+ ${'removing a group member'} | ${'GroupMember'} | ${false} | ${'false'} | ${'Remove member'} | ${true} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${`{}`}
+ ${'removing a project member'} | ${'ProjectMember'} | ${false} | ${'false'} | ${'Remove member'} | ${false} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
+ ${'denying an access request'} | ${'ProjectMember'} | ${true} | ${'false'} | ${'Deny access request'} | ${false} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"} | ${`{}`}
+ ${'revoking invite'} | ${'ProjectMember'} | ${false} | ${'true'} | ${'Revoke invite'} | ${false} | ${false} | ${'Are you sure you want to revoke the invitation for foo@bar.com to join the Gitlab Org / Gitlab Test project?'} | ${mockSchedules}
`(
'when $state',
- ({ actionText, isAccessRequest, message, checkboxTestDescription, checkboxExpected }) => {
+ ({
+ actionText,
+ memberType,
+ isAccessRequest,
+ isInvite,
+ message,
+ removeSubMembershipsCheckboxExpected,
+ unassignIssuablesCheckboxExpected,
+ onCallSchedules,
+ }) => {
beforeEach(() => {
wrapper = shallowMount(RemoveMemberModal, {
data() {
return {
modalData: {
isAccessRequest,
+ isInvite,
message,
memberPath,
+ memberType,
+ onCallSchedules,
},
};
},
});
});
+ const parsedSchedules = JSON.parse(onCallSchedules);
+ const isPartOfOncallSchedules = Boolean(isAccessRequest && parsedSchedules.schedules?.length);
+
it(`has the title ${actionText}`, () => {
expect(findGlModal().attributes('title')).toBe(actionText);
});
@@ -47,8 +76,24 @@ describe('RemoveMemberModal', () => {
expect(wrapper.find('[data-testid=modal-message]').text()).toBe(message);
});
- it(`${checkboxTestDescription}`, () => {
- expect(wrapper.find(GlFormCheckbox).exists()).toBe(checkboxExpected);
+ it(`shows ${
+ removeSubMembershipsCheckboxExpected ? 'a' : 'no'
+ } checkbox to remove direct memberships of subgroups/projects`, () => {
+ expect(wrapper.find('[name=remove_sub_memberships]').exists()).toBe(
+ removeSubMembershipsCheckboxExpected,
+ );
+ });
+
+ it(`shows ${
+ unassignIssuablesCheckboxExpected ? 'a' : 'no'
+ } checkbox to allow removal from related issues and MRs`, () => {
+ expect(wrapper.find('[name=unassign_issuables]').exists()).toBe(
+ unassignIssuablesCheckboxExpected,
+ );
+ });
+
+ it(`shows ${isPartOfOncallSchedules ? 'all' : 'no'} related on-call schedules`, () => {
+ expect(findOnCallSchedulesList().exists()).toBe(isPartOfOncallSchedules);
});
it('submits the form when the modal is submitted', () => {
diff --git a/spec/frontend/vue_shared/components/runner_instructions/mock_data.js b/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
index 01f7f3d49c7..bc1545014d7 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
@@ -98,9 +98,21 @@ export const mockGraphqlInstructions = {
data: {
runnerSetup: {
installInstructions:
- "# Download the binary for your system\nsudo curl -L --output /usr/local/bin/gitlab-runner https://gitlab-runner-downloads.s3.amazonaws.com/latest/binaries/gitlab-runner-linux-amd64\n\n# Give it permissions to execute\nsudo chmod +x /usr/local/bin/gitlab-runner\n\n# Create a GitLab CI user\nsudo useradd --comment 'GitLab Runner' --create-home gitlab-runner --shell /bin/bash\n\n# Install and run as service\nsudo gitlab-runner install --user=gitlab-runner --working-directory=/home/gitlab-runner\nsudo gitlab-runner start\n",
+ '# Install and run as service\nsudo gitlab-runner install --user=gitlab-runner --working-directory=/home/gitlab-runner\nsudo gitlab-runner start',
registerInstructions:
- 'sudo gitlab-runner register --url http://192.168.1.81:3000/ --registration-token GE5gsjeep_HAtBf9s3Yz',
+ 'sudo gitlab-runner register --url http://gdk.test:3000/ --registration-token $REGISTRATION_TOKEN',
+ __typename: 'RunnerSetup',
+ },
+ },
+};
+
+export const mockGraphqlInstructionsWindows = {
+ data: {
+ runnerSetup: {
+ installInstructions:
+ '# Windows runner, then run\n.gitlab-runner.exe install\n.gitlab-runner.exe start',
+ registerInstructions:
+ './gitlab-runner.exe register --url http://gdk.test:3000/ --registration-token $REGISTRATION_TOKEN',
__typename: 'RunnerSetup',
},
},
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
new file mode 100644
index 00000000000..4033c943b82
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
@@ -0,0 +1,184 @@
+import { GlAlert, GlLoadingIcon, GlSkeletonLoader } from '@gitlab/ui';
+import { shallowMount, createLocalVue } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import getRunnerPlatformsQuery from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_platforms.query.graphql';
+import getRunnerSetupInstructionsQuery from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_setup.query.graphql';
+import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
+
+import {
+ mockGraphqlRunnerPlatforms,
+ mockGraphqlInstructions,
+ mockGraphqlInstructionsWindows,
+} from './mock_data';
+
+const localVue = createLocalVue();
+localVue.use(VueApollo);
+
+describe('RunnerInstructionsModal component', () => {
+ let wrapper;
+ let fakeApollo;
+ let runnerPlatformsHandler;
+ let runnerSetupInstructionsHandler;
+
+ const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findPlatformButtons = () => wrapper.findAllByTestId('platform-button');
+ const findArchitectureDropdownItems = () => wrapper.findAllByTestId('architecture-dropdown-item');
+ const findBinaryInstructions = () => wrapper.findByTestId('binary-instructions');
+ const findRegisterCommand = () => wrapper.findByTestId('register-command');
+
+ const createComponent = () => {
+ const requestHandlers = [
+ [getRunnerPlatformsQuery, runnerPlatformsHandler],
+ [getRunnerSetupInstructionsQuery, runnerSetupInstructionsHandler],
+ ];
+
+ fakeApollo = createMockApollo(requestHandlers);
+
+ wrapper = extendedWrapper(
+ shallowMount(RunnerInstructionsModal, {
+ propsData: {
+ modalId: 'runner-instructions-modal',
+ },
+ localVue,
+ apolloProvider: fakeApollo,
+ }),
+ );
+ };
+
+ beforeEach(async () => {
+ runnerPlatformsHandler = jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms);
+ runnerSetupInstructionsHandler = jest.fn().mockResolvedValue(mockGraphqlInstructions);
+
+ createComponent();
+
+ await nextTick();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('should not show alert', () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should contain a number of platforms buttons', () => {
+ expect(runnerPlatformsHandler).toHaveBeenCalledWith({});
+
+ const buttons = findPlatformButtons();
+
+ expect(buttons).toHaveLength(mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes.length);
+ });
+
+ it('should contain a number of dropdown items for the architecture options', () => {
+ expect(findArchitectureDropdownItems()).toHaveLength(
+ mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes.length,
+ );
+ });
+
+ describe('should display default instructions', () => {
+ const { installInstructions, registerInstructions } = mockGraphqlInstructions.data.runnerSetup;
+
+ it('runner instructions are requested', () => {
+ expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
+ platform: 'linux',
+ architecture: 'amd64',
+ });
+ });
+
+ it('binary instructions are shown', () => {
+ const instructions = findBinaryInstructions().text();
+
+ expect(instructions).toBe(installInstructions);
+ });
+
+ it('register command is shown', () => {
+ const instructions = findRegisterCommand().text();
+
+ expect(instructions).toBe(registerInstructions);
+ });
+ });
+
+ describe('after a platform and architecture are selected', () => {
+ const {
+ installInstructions,
+ registerInstructions,
+ } = mockGraphqlInstructionsWindows.data.runnerSetup;
+
+ beforeEach(async () => {
+ runnerSetupInstructionsHandler.mockResolvedValue(mockGraphqlInstructionsWindows);
+
+ findPlatformButtons().at(2).vm.$emit('click'); // another option, happens to be windows
+ await nextTick();
+
+ findArchitectureDropdownItems().at(1).vm.$emit('click'); // another option
+ await nextTick();
+ });
+
+ it('runner instructions are requested', () => {
+ expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
+ platform: 'windows',
+ architecture: '386',
+ });
+ });
+
+ it('other binary instructions are shown', () => {
+ const instructions = findBinaryInstructions().text();
+
+ expect(instructions).toBe(installInstructions);
+ });
+
+ it('register command is shown', () => {
+ const command = findRegisterCommand().text();
+
+ expect(command).toBe(registerInstructions);
+ });
+ });
+
+ describe('when apollo is loading', () => {
+ it('should show a skeleton loader', async () => {
+ createComponent();
+ expect(findSkeletonLoader().exists()).toBe(true);
+ expect(findGlLoadingIcon().exists()).toBe(false);
+
+ await nextTick(); // wait for platforms
+
+ expect(findGlLoadingIcon().exists()).toBe(true);
+ });
+
+ it('once loaded, should not show a loading state', async () => {
+ createComponent();
+
+ await nextTick(); // wait for platforms
+ await nextTick(); // wait for architectures
+
+ expect(findSkeletonLoader().exists()).toBe(false);
+ expect(findGlLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when instructions cannot be loaded', () => {
+ beforeEach(async () => {
+ runnerSetupInstructionsHandler.mockRejectedValue();
+
+ createComponent();
+
+ await waitForPromises();
+ });
+
+ it('should show alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+
+ it('should not show instructions', () => {
+ expect(findBinaryInstructions().exists()).toBe(false);
+ expect(findRegisterCommand().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
index 48db60bfd33..23f8d6afcb5 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_spec.js
@@ -1,113 +1,41 @@
-import { shallowMount, createLocalVue } from '@vue/test-utils';
-import VueApollo from 'vue-apollo';
-import createMockApollo from 'helpers/mock_apollo_helper';
-import getRunnerPlatforms from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_platforms.query.graphql';
-import getRunnerSetupInstructions from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_setup.query.graphql';
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import RunnerInstructions from '~/vue_shared/components/runner_instructions/runner_instructions.vue';
-
-import { mockGraphqlRunnerPlatforms, mockGraphqlInstructions } from './mock_data';
-
-const projectPath = 'gitlab-org/gitlab';
-const localVue = createLocalVue();
-localVue.use(VueApollo);
+import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
describe('RunnerInstructions component', () => {
let wrapper;
- let fakeApollo;
-
- const findModalButton = () => wrapper.find('[data-testid="show-modal-button"]');
- const findPlatformButtons = () => wrapper.findAll('[data-testid="platform-button"]');
- const findArchitectureDropdownItems = () =>
- wrapper.findAll('[data-testid="architecture-dropdown-item"]');
- const findBinaryInstructionsSection = () => wrapper.find('[data-testid="binary-instructions"]');
- const findRunnerInstructionsSection = () => wrapper.find('[data-testid="runner-instructions"]');
- beforeEach(async () => {
- const requestHandlers = [
- [getRunnerPlatforms, jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms)],
- [getRunnerSetupInstructions, jest.fn().mockResolvedValue(mockGraphqlInstructions)],
- ];
+ const findModalButton = () => wrapper.findByTestId('show-modal-button');
+ const findModal = () => wrapper.findComponent(RunnerInstructionsModal);
- fakeApollo = createMockApollo(requestHandlers);
+ const createComponent = () => {
+ wrapper = extendedWrapper(shallowMount(RunnerInstructions));
+ };
- wrapper = shallowMount(RunnerInstructions, {
- provide: {
- projectPath,
- },
- localVue,
- apolloProvider: fakeApollo,
- });
-
- await wrapper.vm.$nextTick();
+ beforeEach(() => {
+ createComponent();
});
afterEach(() => {
wrapper.destroy();
- wrapper = null;
});
it('should show the "Show Runner installation instructions" button', () => {
- const button = findModalButton();
-
- expect(button.exists()).toBe(true);
- expect(button.text()).toBe('Show Runner installation instructions');
- });
-
- it('should contain a number of platforms buttons', () => {
- const buttons = findPlatformButtons();
-
- expect(buttons).toHaveLength(mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes.length);
- });
-
- it('should contain a number of dropdown items for the architecture options', () => {
- const platformButton = findPlatformButtons().at(0);
- platformButton.vm.$emit('click');
-
- return wrapper.vm.$nextTick(() => {
- const dropdownItems = findArchitectureDropdownItems();
-
- expect(dropdownItems).toHaveLength(
- mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes.length,
- );
- });
+ expect(findModalButton().exists()).toBe(true);
+ expect(findModalButton().text()).toBe('Show Runner installation instructions');
});
- it('should display the binary installation instructions for a selected architecture', async () => {
- const platformButton = findPlatformButtons().at(0);
- platformButton.vm.$emit('click');
-
- await wrapper.vm.$nextTick();
-
- const dropdownItem = findArchitectureDropdownItems().at(0);
- dropdownItem.vm.$emit('click');
-
- await wrapper.vm.$nextTick();
-
- const runner = findBinaryInstructionsSection();
-
- expect(runner.text()).toMatch('sudo chmod +x /usr/local/bin/gitlab-runner');
- expect(runner.text()).toMatch(
- `sudo useradd --comment 'GitLab Runner' --create-home gitlab-runner --shell /bin/bash`,
- );
- expect(runner.text()).toMatch(
- 'sudo gitlab-runner install --user=gitlab-runner --working-directory=/home/gitlab-runner',
- );
- expect(runner.text()).toMatch('sudo gitlab-runner start');
+ it('should not render the modal once mounted', () => {
+ expect(findModal().exists()).toBe(false);
});
- it('should display the runner register instructions for a selected architecture', async () => {
- const platformButton = findPlatformButtons().at(0);
- platformButton.vm.$emit('click');
-
- await wrapper.vm.$nextTick();
-
- const dropdownItem = findArchitectureDropdownItems().at(0);
- dropdownItem.vm.$emit('click');
-
- await wrapper.vm.$nextTick();
+ it('should render the modal once clicked', async () => {
+ findModalButton().vm.$emit('click');
- const runner = findRunnerInstructionsSection();
+ await nextTick();
- expect(runner.text()).toMatch(mockGraphqlInstructions.data.runnerSetup.registerInstructions);
+ expect(findModal().exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js b/spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js
new file mode 100644
index 00000000000..b99b1a66b79
--- /dev/null
+++ b/spec/frontend/vue_shared/components/sidebar/copyable_field_spec.js
@@ -0,0 +1,74 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import ClipboardButton from '~/vue_shared/components/clipboard_button.vue';
+import CopyableField from '~/vue_shared/components/sidebar/copyable_field.vue';
+
+describe('SidebarCopyableField', () => {
+ let wrapper;
+
+ const defaultProps = {
+ value: 'Gl-1',
+ name: 'Reference',
+ };
+
+ const createComponent = (propsData = defaultProps) => {
+ wrapper = shallowMount(CopyableField, {
+ propsData,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findClipboardButton = () => wrapper.findComponent(ClipboardButton);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+
+ describe('template', () => {
+ describe('when `isLoading` prop is `false`', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders copyable field', () => {
+ expect(wrapper.text()).toContain('Reference: Gl-1');
+ });
+
+ it('renders ClipboardButton with correct props', () => {
+ const clipboardButton = findClipboardButton();
+
+ expect(clipboardButton.exists()).toBe(true);
+ expect(clipboardButton.props('title')).toBe(`Copy ${defaultProps.name}`);
+ expect(clipboardButton.props('text')).toBe(defaultProps.value);
+ });
+
+ it('does not render loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when `isLoading` prop is `true`', () => {
+ beforeEach(() => {
+ createComponent({ ...defaultProps, isLoading: true });
+ });
+
+ it('renders loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ expect(findLoadingIcon().props('label')).toBe('Loading Reference');
+ });
+
+ it('does not render clipboard button', () => {
+ expect(findClipboardButton().exists()).toBe(false);
+ });
+ });
+
+ describe('with `clipboardTooltipText` prop', () => {
+ it('sets ClipboardButton `title` prop to `clipboardTooltipText` value', () => {
+ const mockClipboardTooltipText = 'Copy my custom value';
+ createComponent({ ...defaultProps, clipboardTooltipText: mockClipboardTooltipText });
+
+ expect(findClipboardButton().props('title')).toBe(mockClipboardTooltipText);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/url_sync_spec.js b/spec/frontend/vue_shared/components/url_sync_spec.js
new file mode 100644
index 00000000000..86bbc146c5f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/url_sync_spec.js
@@ -0,0 +1,97 @@
+import { shallowMount } from '@vue/test-utils';
+import setWindowLocation from 'helpers/set_window_location_helper';
+import { historyPushState } from '~/lib/utils/common_utils';
+import { mergeUrlParams } from '~/lib/utils/url_utility';
+import UrlSyncComponent from '~/vue_shared/components/url_sync.vue';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ mergeUrlParams: jest.fn((query, url) => `urlParams: ${query} ${url}`),
+}));
+
+jest.mock('~/lib/utils/common_utils', () => ({
+ historyPushState: jest.fn(),
+}));
+
+describe('url sync component', () => {
+ let wrapper;
+ const mockQuery = { group_id: '5014437163714', project_ids: ['5014437608314'] };
+ const TEST_HOST = 'http://testhost/';
+
+ setWindowLocation(TEST_HOST);
+
+ const findButton = () => wrapper.find('button');
+
+ const createComponent = ({ query = mockQuery, scopedSlots, slots } = {}) => {
+ wrapper = shallowMount(UrlSyncComponent, {
+ propsData: { query },
+ scopedSlots,
+ slots,
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const expectUrlSync = (query, times, mergeUrlParamsReturnValue) => {
+ expect(mergeUrlParams).toHaveBeenCalledTimes(times);
+ expect(mergeUrlParams).toHaveBeenCalledWith(query, TEST_HOST, { spreadArrays: true });
+
+ expect(historyPushState).toHaveBeenCalledTimes(times);
+ expect(historyPushState).toHaveBeenCalledWith(mergeUrlParamsReturnValue);
+ };
+
+ describe('with query as a props', () => {
+ it('immediately syncs the query to the URL', () => {
+ createComponent();
+
+ expectUrlSync(mockQuery, 1, mergeUrlParams.mock.results[0].value);
+ });
+
+ describe('when the query is modified', () => {
+ const newQuery = { foo: true };
+
+ it('updates the URL with the new query', async () => {
+ createComponent();
+ // using setProps to test the watcher
+ await wrapper.setProps({ query: newQuery });
+
+ expectUrlSync(mockQuery, 2, mergeUrlParams.mock.results[1].value);
+ });
+ });
+ });
+
+ describe('with scoped slot', () => {
+ const scopedSlots = {
+ default: `
+ <button @click="props.updateQuery({bar: 'baz'})">Update Query </button>
+ `,
+ };
+
+ it('renders the scoped slot', () => {
+ createComponent({ query: null, scopedSlots });
+
+ expect(findButton().exists()).toBe(true);
+ });
+
+ it('syncs the url with the scoped slots function', () => {
+ createComponent({ query: null, scopedSlots });
+
+ findButton().trigger('click');
+
+ expectUrlSync({ bar: 'baz' }, 1, mergeUrlParams.mock.results[0].value);
+ });
+ });
+
+ describe('with slot', () => {
+ const slots = {
+ default: '<button>Normal Slot</button>',
+ };
+
+ it('renders the default slot', () => {
+ createComponent({ query: null, slots });
+
+ expect(findButton().exists()).toBe(true);
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
index 184a1e458b5..87fe8619f28 100644
--- a/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
+++ b/spec/frontend/vue_shared/components/user_popover/user_popover_spec.js
@@ -1,4 +1,4 @@
-import { GlDeprecatedSkeletonLoading as GlSkeletonLoading, GlSprintf, GlIcon } from '@gitlab/ui';
+import { GlSkeletonLoader, GlSprintf, GlIcon } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import { AVAILABILITY_STATUS } from '~/set_status_modal/utils';
import UserNameWithStatus from '~/sidebar/components/assignees/user_name_with_status.vue';
@@ -52,7 +52,7 @@ describe('User Popover Component', () => {
};
describe('when user is loading', () => {
- it('displays skeleton loaders', () => {
+ it('displays skeleton loader', () => {
createWrapper({
user: {
name: null,
@@ -65,7 +65,7 @@ describe('User Popover Component', () => {
},
});
- expect(wrapper.findAll(GlSkeletonLoading)).toHaveLength(4);
+ expect(wrapper.find(GlSkeletonLoader).exists()).toBe(true);
});
});
diff --git a/spec/frontend/vue_shared/oncall_schedules_list_spec.js b/spec/frontend/vue_shared/oncall_schedules_list_spec.js
new file mode 100644
index 00000000000..5c30809c09b
--- /dev/null
+++ b/spec/frontend/vue_shared/oncall_schedules_list_spec.js
@@ -0,0 +1,87 @@
+import { GlLink, GlSprintf } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import OncallSchedulesList from '~/vue_shared/components/oncall_schedules_list.vue';
+
+const mockSchedules = [
+ {
+ name: 'Schedule 1',
+ scheduleUrl: 'http://gitlab.com/gitlab-org/gitlab-shell/-/oncall_schedules',
+ projectName: 'Shell',
+ projectUrl: 'http://gitlab.com/gitlab-org/gitlab-shell/',
+ },
+ {
+ name: 'Schedule 2',
+ scheduleUrl: 'http://gitlab.com/gitlab-org/gitlab-ui/-/oncall_schedules',
+ projectName: 'UI',
+ projectUrl: 'http://gitlab.com/gitlab-org/gitlab-ui/',
+ },
+];
+
+const userName = 'User 1';
+
+describe('On-call schedules list', () => {
+ let wrapper;
+
+ function createComponent(props) {
+ wrapper = extendedWrapper(
+ shallowMount(OncallSchedulesList, {
+ propsData: {
+ schedules: mockSchedules,
+ userName,
+ ...props,
+ },
+ stubs: {
+ GlSprintf,
+ },
+ }),
+ );
+ }
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findLinks = () => wrapper.findAllComponents(GlLink);
+ const findTitle = () => wrapper.findByTestId('title');
+ const findFooter = () => wrapper.findByTestId('footer');
+ const findSchedules = () => wrapper.findByTestId('schedules-list');
+
+ describe.each`
+ isCurrentUser | titleText | footerText
+ ${true} | ${'You are currently a part of:'} | ${'Removing yourself may put your on-call team at risk of missing a notification.'}
+ ${false} | ${`User ${userName} is currently part of:`} | ${'Removing this user may put their on-call team at risk of missing a notification.'}
+ `('when current user ', ({ isCurrentUser, titleText, footerText }) => {
+ it(`${isCurrentUser ? 'is' : 'is not'} a part of on-call schedule`, async () => {
+ createComponent({
+ isCurrentUser,
+ });
+
+ expect(findTitle().text()).toBe(titleText);
+ expect(findFooter().text()).toBe(footerText);
+ });
+ });
+
+ describe.each(mockSchedules)(
+ 'renders each on-call schedule data',
+ ({ name, scheduleUrl, projectName, projectUrl }) => {
+ beforeEach(() => {
+ createComponent({ schedules: [{ name, scheduleUrl, projectName, projectUrl }] });
+ });
+
+ it(`renders schedule ${name}'s name and link`, () => {
+ const msg = findSchedules().text();
+
+ expect(msg).toContain(`On-call schedule ${name}`);
+ expect(findLinks().at(0).attributes('href')).toBe(scheduleUrl);
+ });
+
+ it(`renders project ${projectName}'s name and link`, () => {
+ const msg = findSchedules().text();
+
+ expect(msg).toContain(`in Project ${projectName}`);
+ expect(findLinks().at(1).attributes('href')).toBe(projectUrl);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/whats_new/components/app_spec.js b/spec/frontend/whats_new/components/app_spec.js
index ad062d04140..45c4682208b 100644
--- a/spec/frontend/whats_new/components/app_spec.js
+++ b/spec/frontend/whats_new/components/app_spec.js
@@ -1,4 +1,4 @@
-import { GlDrawer, GlInfiniteScroll, GlTabs } from '@gitlab/ui';
+import { GlDrawer, GlInfiniteScroll } from '@gitlab/ui';
import { createLocalVue, mount } from '@vue/test-utils';
import Vuex from 'vuex';
import { mockTracking, unmockTracking, triggerEvent } from 'helpers/tracking_helper';
@@ -21,12 +21,9 @@ describe('App', () => {
let actions;
let state;
let trackingSpy;
- let gitlabDotCom = true;
const buildProps = () => ({
- storageKey: 'storage-key',
- versions: ['3.11', '3.10'],
- gitlabDotCom,
+ versionDigest: 'version-digest',
});
const buildWrapper = () => {
@@ -91,7 +88,7 @@ describe('App', () => {
});
it('dispatches openDrawer and tracking calls when mounted', () => {
- expect(actions.openDrawer).toHaveBeenCalledWith(expect.any(Object), 'storage-key');
+ expect(actions.openDrawer).toHaveBeenCalledWith(expect.any(Object), 'version-digest');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_whats_new_drawer', {
label: 'namespace_id',
value: 'namespace-840',
@@ -176,54 +173,4 @@ describe('App', () => {
);
});
});
-
- describe('self managed', () => {
- const findTabs = () => wrapper.find(GlTabs);
-
- const clickSecondTab = async () => {
- const secondTab = wrapper.findAll('.nav-link').at(1);
- await secondTab.trigger('click');
- await new Promise((resolve) => requestAnimationFrame(resolve));
- };
-
- beforeEach(() => {
- gitlabDotCom = false;
- setup();
- });
-
- it('renders tabs with drawer body height and content', () => {
- const scroll = findInfiniteScroll();
- const tabs = findTabs();
-
- expect(scroll.exists()).toBe(false);
- expect(tabs.attributes().style).toBe(`height: ${MOCK_DRAWER_BODY_HEIGHT}px;`);
- expect(wrapper.find('h5').text()).toBe('Whats New Drawer');
- });
-
- describe('fetchVersion', () => {
- beforeEach(() => {
- actions.fetchItems.mockClear();
- });
-
- it('when version isnt fetched, clicking a tab calls fetchItems', async () => {
- const fetchVersionSpy = jest.spyOn(wrapper.vm, 'fetchVersion');
- await clickSecondTab();
-
- expect(fetchVersionSpy).toHaveBeenCalledWith('3.10');
- expect(actions.fetchItems).toHaveBeenCalledWith(expect.anything(), { version: '3.10' });
- });
-
- it('when version has been fetched, clicking a tab calls fetchItems', async () => {
- wrapper.vm.$store.state.features.push({ title: 'GitLab Stories', release: 3.1 });
- await wrapper.vm.$nextTick();
-
- const fetchVersionSpy = jest.spyOn(wrapper.vm, 'fetchVersion');
- await clickSecondTab();
-
- expect(fetchVersionSpy).toHaveBeenCalledWith('3.10');
- expect(actions.fetchItems).not.toHaveBeenCalled();
- expect(wrapper.find('.tab-pane.active h5').text()).toBe('GitLab Stories');
- });
- });
- });
});
diff --git a/spec/frontend/whats_new/store/actions_spec.js b/spec/frontend/whats_new/store/actions_spec.js
index c4125d28aba..39ad526cf14 100644
--- a/spec/frontend/whats_new/store/actions_spec.js
+++ b/spec/frontend/whats_new/store/actions_spec.js
@@ -11,9 +11,12 @@ describe('whats new actions', () => {
useLocalStorageSpy();
it('should commit openDrawer', () => {
- testAction(actions.openDrawer, 'storage-key', {}, [{ type: types.OPEN_DRAWER }]);
+ testAction(actions.openDrawer, 'digest-hash', {}, [{ type: types.OPEN_DRAWER }]);
- expect(window.localStorage.setItem).toHaveBeenCalledWith('storage-key', 'false');
+ expect(window.localStorage.setItem).toHaveBeenCalledWith(
+ 'display-whats-new-notification',
+ 'digest-hash',
+ );
});
});
@@ -45,12 +48,12 @@ describe('whats new actions', () => {
axiosMock.reset();
axiosMock
- .onGet('/-/whats_new', { params: { page: 8, version: 40 } })
+ .onGet('/-/whats_new', { params: { page: 8 } })
.replyOnce(200, [{ title: 'GitLab Stories' }]);
testAction(
actions.fetchItems,
- { page: 8, version: 40 },
+ { page: 8 },
{},
expect.arrayContaining([
{ type: types.ADD_FEATURES, payload: [{ title: 'GitLab Stories' }] },
diff --git a/spec/frontend/whats_new/utils/notification_spec.js b/spec/frontend/whats_new/utils/notification_spec.js
index e3e390f4394..e1de65df30f 100644
--- a/spec/frontend/whats_new/utils/notification_spec.js
+++ b/spec/frontend/whats_new/utils/notification_spec.js
@@ -1,5 +1,5 @@
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
-import { setNotification, getStorageKey } from '~/whats_new/utils/notification';
+import { setNotification, getVersionDigest } from '~/whats_new/utils/notification';
describe('~/whats_new/utils/notification', () => {
useLocalStorageSpy();
@@ -33,10 +33,23 @@ describe('~/whats_new/utils/notification', () => {
expect(notificationEl.classList).toContain('with-notifications');
});
- it('removes class and count element when storage key is true', () => {
+ it('removes class and count element when legacy storage key is false', () => {
const notificationEl = findNotificationEl();
notificationEl.classList.add('with-notifications');
- localStorage.setItem('storage-key', 'false');
+ localStorage.setItem('display-whats-new-notification-13.10', 'false');
+
+ expect(findNotificationCountEl()).toExist();
+
+ subject();
+
+ expect(findNotificationCountEl()).not.toExist();
+ expect(notificationEl.classList).not.toContain('with-notifications');
+ });
+
+ it('removes class and count element when storage key has current digest', () => {
+ const notificationEl = findNotificationEl();
+ notificationEl.classList.add('with-notifications');
+ localStorage.setItem('display-whats-new-notification', 'version-digest');
expect(findNotificationCountEl()).toExist();
@@ -47,9 +60,9 @@ describe('~/whats_new/utils/notification', () => {
});
});
- describe('getStorageKey', () => {
+ describe('getVersionDigest', () => {
it('retrieves the storage key data attribute from the el', () => {
- expect(getStorageKey(getAppEl())).toBe('storage-key');
+ expect(getVersionDigest(getAppEl())).toBe('version-digest');
});
});
});
diff --git a/spec/frontend/wikis_spec.js b/spec/frontend/wikis_spec.js
index c4a2bf1a69a..c4e914bcf34 100644
--- a/spec/frontend/wikis_spec.js
+++ b/spec/frontend/wikis_spec.js
@@ -4,159 +4,6 @@ import Wikis from '~/pages/shared/wikis/wikis';
import Tracking from '~/tracking';
describe('Wikis', () => {
- const editFormHtmlFixture = (args) => `<form class="wiki-form ${
- args.newPage ? 'js-new-wiki-page' : ''
- }">
- <input type="text" id="wiki_title" value="My title" />
- <input type="text" id="wiki_message" />
- <select class="form-control select-control" name="wiki[format]" id="wiki_format">
- <option value="markdown">Markdown</option>
- <option selected="selected" value="rdoc">RDoc</option>
- <option value="asciidoc">AsciiDoc</option>
- <option value="org">Org</option>
- </select>
- <textarea id="wiki_content"></textarea>
- <code class="js-markup-link-example">{Link title}[link:page-slug]</code>
- <input type="submit" class="js-wiki-btn-submit">
- </input>
- </form>
- `;
-
- let wikis;
- let titleInput;
- let contentInput;
- let messageInput;
- let changeFormatSelect;
- let linkExample;
-
- const findBeforeUnloadWarning = () => window.onbeforeunload?.();
- const findForm = () => document.querySelector('.wiki-form');
- const findSubmitButton = () => document.querySelector('.js-wiki-btn-submit');
-
- describe('when the wiki page is being created', () => {
- const formHtmlFixture = editFormHtmlFixture({ newPage: true });
-
- beforeEach(() => {
- setHTMLFixture(formHtmlFixture);
-
- titleInput = document.getElementById('wiki_title');
- messageInput = document.getElementById('wiki_message');
- changeFormatSelect = document.querySelector('#wiki_format');
- linkExample = document.querySelector('.js-markup-link-example');
- wikis = new Wikis();
- });
-
- it('binds an event listener to the title input', () => {
- wikis.handleWikiTitleChange = jest.fn();
-
- titleInput.dispatchEvent(new Event('keyup'));
-
- expect(wikis.handleWikiTitleChange).toHaveBeenCalled();
- });
-
- it('sets the commit message when title changes', () => {
- titleInput.value = 'My title';
- messageInput.value = '';
-
- titleInput.dispatchEvent(new Event('keyup'));
-
- expect(messageInput.value).toEqual('Create My title');
- });
-
- it('replaces hyphens with spaces', () => {
- titleInput.value = 'my-hyphenated-title';
- titleInput.dispatchEvent(new Event('keyup'));
-
- expect(messageInput.value).toEqual('Create my hyphenated title');
- });
- });
-
- describe('when the wiki page is being updated', () => {
- const formHtmlFixture = editFormHtmlFixture({ newPage: false });
-
- beforeEach(() => {
- setHTMLFixture(formHtmlFixture);
-
- titleInput = document.getElementById('wiki_title');
- messageInput = document.getElementById('wiki_message');
- wikis = new Wikis();
- });
-
- it('sets the commit message when title changes, prefixing with "Update"', () => {
- titleInput.value = 'My title';
- messageInput.value = '';
-
- titleInput.dispatchEvent(new Event('keyup'));
-
- expect(messageInput.value).toEqual('Update My title');
- });
-
- it.each`
- value | text
- ${'markdown'} | ${'[Link Title](page-slug)'}
- ${'rdoc'} | ${'{Link title}[link:page-slug]'}
- ${'asciidoc'} | ${'link:page-slug[Link title]'}
- ${'org'} | ${'[[page-slug]]'}
- `('updates a message when value=$value is selected', ({ value, text }) => {
- changeFormatSelect.value = value;
- changeFormatSelect.dispatchEvent(new Event('change'));
-
- expect(linkExample.innerHTML).toBe(text);
- });
-
- it('starts with no unload warning', () => {
- expect(findBeforeUnloadWarning()).toBeUndefined();
- });
-
- describe('when wiki content is updated', () => {
- beforeEach(() => {
- contentInput = document.getElementById('wiki_content');
- contentInput.value = 'Lorem ipsum dolar sit!';
- contentInput.dispatchEvent(new Event('input'));
- });
-
- it('sets before unload warning', () => {
- expect(findBeforeUnloadWarning()).toBe('');
- });
-
- it('when form submitted, unsets before unload warning', () => {
- findForm().dispatchEvent(new Event('submit'));
- expect(findBeforeUnloadWarning()).toBeUndefined();
- });
- });
- });
-
- describe('submit button state', () => {
- beforeEach(() => {
- setHTMLFixture(editFormHtmlFixture({ newPage: true }));
-
- titleInput = document.getElementById('wiki_title');
- contentInput = document.getElementById('wiki_content');
-
- wikis = new Wikis();
- });
-
- it.each`
- title | text | buttonState | disabledAttr
- ${'something'} | ${'something'} | ${'enabled'} | ${null}
- ${''} | ${'something'} | ${'disabled'} | ${'true'}
- ${'something'} | ${''} | ${'disabled'} | ${'true'}
- ${''} | ${''} | ${'disabled'} | ${'true'}
- ${' '} | ${' '} | ${'disabled'} | ${'true'}
- `(
- "when title='$title', content='$content', then, buttonState='$buttonState'",
- ({ title, text, disabledAttr }) => {
- titleInput.value = title;
- titleInput.dispatchEvent(new Event('keyup'));
-
- contentInput.value = text;
- contentInput.dispatchEvent(new Event('input'));
-
- expect(findSubmitButton().getAttribute('disabled')).toBe(disabledAttr);
- },
- );
- });
-
describe('trackPageView', () => {
const trackingPage = 'projects:wikis:show';
const trackingContext = { foo: 'bar' };
diff --git a/spec/frontend_integration/diffs/diffs_interopability_api.js b/spec/frontend_integration/diffs/diffs_interopability_api.js
new file mode 100644
index 00000000000..adfb93f27a2
--- /dev/null
+++ b/spec/frontend_integration/diffs/diffs_interopability_api.js
@@ -0,0 +1,25 @@
+/**
+ * This helper module contains the API expectation of the diff output HTML.
+ *
+ * This helps simulate what third-party HTML scrapers, such as Sourcegraph,
+ * should be looking for.
+ */
+export const getDiffCodePart = (codeElement) => {
+ const el = codeElement.closest('[data-interop-type]');
+
+ return el.dataset.interopType === 'old' ? 'base' : 'head';
+};
+
+export const getCodeElementFromLineNumber = (codeView, line, part) => {
+ const type = part === 'base' ? 'old' : 'new';
+
+ const el = codeView.querySelector(`[data-interop-${type}-line="${line}"]`);
+
+ return el ? el.querySelector('span.line') : null;
+};
+
+export const getLineNumberFromCodeElement = (codeElement) => {
+ const el = codeElement.closest('[data-interop-line]');
+
+ return parseInt(el.dataset.interopLine || '', 10);
+};
diff --git a/spec/frontend_integration/diffs/diffs_interopability_spec.js b/spec/frontend_integration/diffs/diffs_interopability_spec.js
new file mode 100644
index 00000000000..cb7659e16d3
--- /dev/null
+++ b/spec/frontend_integration/diffs/diffs_interopability_spec.js
@@ -0,0 +1,161 @@
+import { waitFor } from '@testing-library/dom';
+import { TEST_HOST } from 'helpers/test_constants';
+import initDiffsApp from '~/diffs';
+import { createStore } from '~/mr_notes/stores';
+import {
+ getDiffCodePart,
+ getLineNumberFromCodeElement,
+ getCodeElementFromLineNumber,
+} from './diffs_interopability_api';
+
+jest.mock('~/vue_shared/mixins/gl_feature_flags_mixin', () => () => ({
+ inject: {
+ glFeatures: {
+ from: 'window.gon.features',
+ default: () => global.window.gon?.features,
+ },
+ },
+}));
+
+const TEST_PROJECT_PATH = 'gitlab-org/gitlab-test';
+const TEST_BASE_URL = `/${TEST_PROJECT_PATH}/-/merge_requests/1/`;
+const TEST_DIFF_FILE = 'files/js/commit.coffee';
+const EXPECT_INLINE = [
+ ['head', 1],
+ ['head', 2],
+ ['head', 3],
+ ['base', 4],
+ ['head', 4],
+ null,
+ ['base', 6],
+ ['head', 6],
+ null,
+];
+const EXPECT_PARALLEL_LEFT_SIDE = [
+ ['base', 1],
+ ['base', 2],
+ ['base', 3],
+ ['base', 4],
+ null,
+ ['base', 6],
+ null,
+];
+const EXPECT_PARALLEL_RIGHT_SIDE = [
+ ['head', 1],
+ ['head', 2],
+ ['head', 3],
+ ['head', 4],
+ null,
+ ['head', 6],
+ null,
+];
+
+const startDiffsApp = () => {
+ const el = document.createElement('div');
+ el.id = 'js-diffs-app';
+ document.body.appendChild(el);
+ Object.assign(el.dataset, {
+ endpoint: TEST_BASE_URL,
+ endpointMetadata: `${TEST_BASE_URL}diffs_metadata.json`,
+ endpointBatch: `${TEST_BASE_URL}diffs_batch.json`,
+ projectPath: TEST_PROJECT_PATH,
+ helpPagePath: '/help',
+ currentUserData: 'null',
+ changesEmptyStateIllustration: '',
+ isFluidLayout: 'false',
+ dismissEndpoint: '',
+ showSuggestPopover: 'false',
+ showWhitespaceDefault: 'true',
+ viewDiffsFileByFile: 'false',
+ defaultSuggestionCommitMessage: 'Lorem ipsum',
+ });
+
+ const store = createStore();
+
+ const vm = initDiffsApp(store);
+
+ store.dispatch('setActiveTab', 'diffs');
+
+ return vm;
+};
+
+describe('diffs third party interoperability', () => {
+ let vm;
+
+ afterEach(() => {
+ vm.$destroy();
+ document.body.innerHTML = '';
+ });
+
+ const tryOrErrorMessage = (fn) => (...args) => {
+ try {
+ return fn(...args);
+ } catch (e) {
+ return e.message;
+ }
+ };
+
+ const findDiffFile = () => document.querySelector(`.diff-file[data-path="${TEST_DIFF_FILE}"]`);
+ const hasLines = (sel = 'tr.line_holder') => findDiffFile().querySelectorAll(sel).length > 0;
+ const findLineElements = (sel = 'tr.line_holder') =>
+ Array.from(findDiffFile().querySelectorAll(sel));
+
+ const findCodeElements = (lines, sel = 'td.line_content') => {
+ return lines.map((x) => x.querySelector(`${sel} span.line`));
+ };
+
+ const getCodeElementsInteropModel = (codeElements) =>
+ codeElements.map(
+ (x) =>
+ x && [
+ tryOrErrorMessage(getDiffCodePart)(x),
+ tryOrErrorMessage(getLineNumberFromCodeElement)(x),
+ ],
+ );
+
+ describe.each`
+ desc | unifiedDiffComponents | view | rowSelector | codeSelector | expectation
+ ${'inline view'} | ${false} | ${'inline'} | ${'tr.line_holder'} | ${'td.line_content'} | ${EXPECT_INLINE}
+ ${'parallel view left side'} | ${false} | ${'parallel'} | ${'tr.line_holder'} | ${'td.line_content.left-side'} | ${EXPECT_PARALLEL_LEFT_SIDE}
+ ${'parallel view right side'} | ${false} | ${'parallel'} | ${'tr.line_holder'} | ${'td.line_content.right-side'} | ${EXPECT_PARALLEL_RIGHT_SIDE}
+ ${'inline view'} | ${true} | ${'inline'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content'} | ${EXPECT_INLINE}
+ ${'parallel view left side'} | ${true} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.left-side'} | ${EXPECT_PARALLEL_LEFT_SIDE}
+ ${'parallel view right side'} | ${true} | ${'parallel'} | ${'.diff-tr.line_holder'} | ${'.diff-td.line_content.right-side'} | ${EXPECT_PARALLEL_RIGHT_SIDE}
+ `(
+ '$desc (unifiedDiffComponents=$unifiedDiffComponents)',
+ ({ unifiedDiffComponents, view, rowSelector, codeSelector, expectation }) => {
+ beforeEach(async () => {
+ global.jsdom.reconfigure({
+ url: `${TEST_HOST}/${TEST_BASE_URL}/diffs?view=${view}`,
+ });
+ window.gon.features = { unifiedDiffComponents };
+
+ vm = startDiffsApp();
+
+ await waitFor(() => expect(hasLines(rowSelector)).toBe(true));
+ });
+
+ it('should match diff model', () => {
+ const lines = findLineElements(rowSelector);
+ const codes = findCodeElements(lines, codeSelector);
+
+ expect(getCodeElementsInteropModel(codes)).toEqual(expectation);
+ });
+
+ it.each`
+ lineNumber | part | expectedText
+ ${4} | ${'base'} | ${'new CommitFile(this)'}
+ ${4} | ${'head'} | ${'new CommitFile(@)'}
+ ${2} | ${'base'} | ${'constructor: ->'}
+ ${2} | ${'head'} | ${'constructor: ->'}
+ `(
+ 'should find code element lineNumber=$lineNumber part=$part',
+ ({ lineNumber, part, expectedText }) => {
+ const codeElement = getCodeElementFromLineNumber(findDiffFile(), lineNumber, part);
+
+ expect(codeElement.textContent.trim()).toBe(expectedText);
+ },
+ );
+ },
+ );
+});
diff --git a/spec/frontend_integration/test_helpers/fixtures.js b/spec/frontend_integration/test_helpers/fixtures.js
index b2768440607..5673e36197f 100644
--- a/spec/frontend_integration/test_helpers/fixtures.js
+++ b/spec/frontend_integration/test_helpers/fixtures.js
@@ -40,6 +40,12 @@ export const getMergeRequestVersions = factory.json(() =>
export const getRepositoryFiles = factory.json(() =>
require('test_fixtures/projects_json/files.json'),
);
+export const getDiffsMetadata = factory.json(() =>
+ require('test_fixtures/merge_request_diffs/diffs_metadata.json'),
+);
+export const getDiffsBatch = factory.json(() =>
+ require('test_fixtures/merge_request_diffs/diffs_batch.json'),
+);
export const getPipelinesEmptyResponse = factory.json(() =>
require('test_fixtures/projects_json/pipelines_empty.json'),
);
diff --git a/spec/frontend_integration/test_helpers/mock_server/graphql.js b/spec/frontend_integration/test_helpers/mock_server/graphql.js
index e2658852599..27396842523 100644
--- a/spec/frontend_integration/test_helpers/mock_server/graphql.js
+++ b/spec/frontend_integration/test_helpers/mock_server/graphql.js
@@ -1,13 +1,11 @@
import { buildSchema, graphql } from 'graphql';
+import { memoize } from 'lodash';
-/* eslint-disable import/no-unresolved */
-// This rule is disabled for the following line.
// The graphql schema is dynamically generated in CI
// during the `graphql-schema-dump` job.
-import gitlabSchemaStr from '../../../../tmp/tests/graphql/gitlab_schema.graphql';
-/* eslint-enable import/no-unresolved */
+// eslint-disable-next-line global-require, import/no-unresolved
+const getGraphqlSchema = () => require('../../../../tmp/tests/graphql/gitlab_schema.graphql');
-const graphqlSchema = buildSchema(gitlabSchemaStr.loc.source.body);
const graphqlResolvers = {
project({ fullPath }, schema) {
const result = schema.projects.findBy({ path_with_namespace: fullPath });
@@ -21,6 +19,7 @@ const graphqlResolvers = {
};
},
};
+const buildGraphqlSchema = memoize(() => buildSchema(getGraphqlSchema().loc.source.body));
export const graphqlQuery = (query, variables, schema) =>
- graphql(graphqlSchema, query, graphqlResolvers, schema, variables);
+ graphql(buildGraphqlSchema(), query, graphqlResolvers, schema, variables);
diff --git a/spec/frontend_integration/test_helpers/mock_server/index.js b/spec/frontend_integration/test_helpers/mock_server/index.js
index 20cb441daa7..486c9452dbd 100644
--- a/spec/frontend_integration/test_helpers/mock_server/index.js
+++ b/spec/frontend_integration/test_helpers/mock_server/index.js
@@ -1,4 +1,5 @@
import { Server, Model, RestSerializer } from 'miragejs';
+import setupRoutes from 'ee_else_ce_test_helpers/mock_server/routes';
import {
getProject,
getEmptyProject,
@@ -11,7 +12,6 @@ import {
getBlobImage,
getBlobZip,
} from 'test_helpers/fixtures';
-import setupRoutes from './routes';
export const createMockServerOptions = () => ({
models: {
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/diffs.js b/spec/frontend_integration/test_helpers/mock_server/routes/diffs.js
new file mode 100644
index 00000000000..8301627e842
--- /dev/null
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/diffs.js
@@ -0,0 +1,22 @@
+import { getDiffsMetadata, getDiffsBatch } from 'test_helpers/fixtures';
+import { withValues } from 'test_helpers/utils/obj';
+
+export default (server) => {
+ server.get('/:namespace/:project/-/merge_requests/:mrid/diffs_metadata.json', () => {
+ return getDiffsMetadata();
+ });
+
+ server.get('/:namespace/:project/-/merge_requests/:mrid/diffs_batch.json', () => {
+ const { pagination, ...result } = getDiffsBatch();
+
+ return {
+ ...result,
+ pagination: withValues(pagination, {
+ current_page: null,
+ next_page: null,
+ total_pages: 1,
+ next_page_href: null,
+ }),
+ };
+ });
+};
diff --git a/spec/frontend_integration/test_helpers/mock_server/routes/index.js b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
index e30fecf2f06..48eff2702dd 100644
--- a/spec/frontend_integration/test_helpers/mock_server/routes/index.js
+++ b/spec/frontend_integration/test_helpers/mock_server/routes/index.js
@@ -5,6 +5,7 @@ export default (server) => {
require('./projects'),
require('./repository'),
require('./ci'),
+ require('./diffs'),
require('./404'),
].forEach(({ default: setup }) => {
setup(server);
diff --git a/spec/graphql/features/authorization_spec.rb b/spec/graphql/features/authorization_spec.rb
index 33b11e1ca09..64e423e2bf8 100644
--- a/spec/graphql/features/authorization_spec.rb
+++ b/spec/graphql/features/authorization_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Gitlab::Graphql::Authorize' do
+RSpec.describe 'DeclarativePolicy authorization in GraphQL ' do
include GraphqlHelpers
include Graphql::ResolverFactories
@@ -10,10 +10,14 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
let(:permission_single) { :foo }
let(:permission_collection) { [:foo, :bar] }
let(:test_object) { double(name: 'My name') }
+ let(:authorizing_object) { test_object }
+ # to override when combining permissions
+ let(:permission_object_one) { authorizing_object }
+ let(:permission_object_two) { authorizing_object }
+
let(:query_string) { '{ item { name } }' }
let(:result) do
schema = empty_schema
- schema.use(Gitlab::Graphql::Authorize)
execute_query(query_type, schema: schema)
end
@@ -33,18 +37,25 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
shared_examples 'authorization with a collection of permissions' do
it 'returns the protected field when user has all permissions' do
- permit(*permission_collection)
+ permit_on(permission_object_one, permission_collection.first)
+ permit_on(permission_object_two, permission_collection.second)
expect(subject).to eq('name' => test_object.name)
end
it 'returns nil when user only has one of the permissions' do
- permit(permission_collection.first)
+ permit_on(permission_object_one, permission_collection.first)
expect(subject).to be_nil
end
- it 'returns nil when user only has none of the permissions' do
+ it 'returns nil when user only has the other of the permissions' do
+ permit_on(permission_object_two, permission_collection.second)
+
+ expect(subject).to be_nil
+ end
+
+ it 'returns nil when user has neither of the required permissions' do
expect(subject).to be_nil
end
end
@@ -56,6 +67,7 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
describe 'Field authorizations' do
let(:type) { type_factory }
+ let(:authorizing_object) { nil }
describe 'with a single permission' do
let(:query_type) do
@@ -71,9 +83,10 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
let(:query_type) do
permissions = permission_collection
query_factory do |qt|
- qt.field :item, type, null: true, resolver: new_resolver(test_object) do
- authorize permissions
- end
+ qt.field :item, type,
+ null: true,
+ resolver: new_resolver(test_object),
+ authorize: permissions
end
end
@@ -110,9 +123,9 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
let(:type) do
permissions = permission_collection
type_factory do |type|
- type.field :name, GraphQL::STRING_TYPE, null: true do
- authorize permissions
- end
+ type.field :name, GraphQL::STRING_TYPE,
+ null: true,
+ authorize: permissions
end
end
@@ -163,6 +176,7 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
end
describe 'type and field authorizations together' do
+ let(:authorizing_object) { anything }
let(:permission_1) { permission_collection.first }
let(:permission_2) { permission_collection.last }
@@ -181,7 +195,63 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
include_examples 'authorization with a collection of permissions'
end
- describe 'type authorizations when applied to a relay connection' do
+ describe 'resolver and field authorizations together' do
+ let(:permission_1) { permission_collection.first }
+ let(:permission_2) { permission_collection.last }
+ let(:type) { type_factory }
+
+ let(:query_type) do
+ query_factory do |query|
+ query.field :item, type,
+ null: true,
+ resolver: resolver,
+ authorize: permission_2
+ end
+ end
+
+ context 'when the resolver authorizes the object' do
+ let(:permission_object_one) { be_nil }
+ let(:permission_object_two) { be_nil }
+ let(:resolver) do
+ resolver = simple_resolver(test_object)
+ resolver.include(::Gitlab::Graphql::Authorize::AuthorizeResource)
+ resolver.authorize permission_1
+ resolver.authorizes_object!
+ resolver
+ end
+
+ include_examples 'authorization with a collection of permissions'
+ end
+
+ context 'when the resolver does not authorize the object, but instead calls authorized_find!' do
+ let(:permission_object_one) { test_object }
+ let(:permission_object_two) { be_nil }
+ let(:resolver) do
+ resolver = new_resolver(test_object, method: :find_object)
+ resolver.authorize permission_1
+ resolver
+ end
+
+ include_examples 'authorization with a collection of permissions'
+ end
+
+ context 'when the resolver calls authorized_find!, but does not list any permissions' do
+ let(:permission_object_two) { be_nil }
+ let(:resolver) do
+ resolver = new_resolver(test_object, method: :find_object)
+ resolver
+ end
+
+ it 'raises a configuration error' do
+ permit_on(permission_object_two, permission_collection.second)
+
+ expect { execute_query(query_type) }
+ .to raise_error(::Gitlab::Graphql::Authorize::AuthorizeResource::ConfigurationError)
+ end
+ end
+ end
+
+ describe 'when type authorizations when applied to a relay connection' do
let(:query_string) { '{ item { edges { node { name } } } }' }
let(:second_test_object) { double(name: 'Second thing') }
@@ -220,8 +290,12 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
let(:query_string) { '{ item(first: 1) { edges { node { name } } } }' }
it 'only checks permissions for the first object' do
- expect(Ability).to receive(:allowed?).with(user, permission_single, test_object) { true }
- expect(Ability).not_to receive(:allowed?).with(user, permission_single, second_test_object)
+ expect(Ability)
+ .to receive(:allowed?)
+ .with(user, permission_single, test_object)
+ .and_return(true)
+ expect(Ability)
+ .not_to receive(:allowed?).with(user, permission_single, second_test_object)
expect(subject.size).to eq(1)
end
@@ -262,10 +336,12 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
end
let(:project_type) do |type|
+ issues = Issue.where(project: [visible_project, other_project]).order(id: :asc)
type_factory do |type|
type.graphql_name 'FakeProjectType'
- type.field :test_issues, issue_type.connection_type, null: false,
- resolver: new_resolver(Issue.where(project: [visible_project, other_project]).order(id: :asc))
+ type.field :test_issues, issue_type.connection_type,
+ null: false,
+ resolver: new_resolver(issues)
end
end
@@ -300,11 +376,35 @@ RSpec.describe 'Gitlab::Graphql::Authorize' do
end
end
+ describe 'Authorization on GraphQL::Execution::Execute::SKIP' do
+ let(:type) do
+ type_factory do |type|
+ type.authorize permission_single
+ end
+ end
+
+ let(:query_type) do
+ query_factory do |query|
+ query.field :item, [type], null: true, resolver: new_resolver(GraphQL::Execution::Execute::SKIP)
+ end
+ end
+
+ it 'skips redaction' do
+ expect(Ability).not_to receive(:allowed?)
+
+ result
+ end
+ end
+
private
def permit(*permissions)
+ permit_on(authorizing_object, *permissions)
+ end
+
+ def permit_on(object, *permissions)
permissions.each do |permission|
- allow(Ability).to receive(:allowed?).with(user, permission, test_object).and_return(true)
+ allow(Ability).to receive(:allowed?).with(user, permission, object).and_return(true)
end
end
end
diff --git a/spec/graphql/gitlab_schema_spec.rb b/spec/graphql/gitlab_schema_spec.rb
index cb2bb25b098..1f2c518f83c 100644
--- a/spec/graphql/gitlab_schema_spec.rb
+++ b/spec/graphql/gitlab_schema_spec.rb
@@ -14,10 +14,6 @@ RSpec.describe GitlabSchema do
expect(field_instrumenters).to include(instance_of(::Gitlab::Graphql::GenericTracing))
end
- it 'enables the authorization instrumenter' do
- expect(field_instrumenters).to include(instance_of(::Gitlab::Graphql::Authorize::Instrumentation))
- end
-
it 'has the base mutation' do
expect(described_class.mutation).to eq(::Types::MutationType)
end
@@ -210,18 +206,22 @@ RSpec.describe GitlabSchema do
describe '.parse_gid' do
let_it_be(:global_id) { 'gid://gitlab/TestOne/2147483647' }
+ subject(:parse_gid) { described_class.parse_gid(global_id) }
+
before do
test_base = Class.new
test_one = Class.new(test_base)
test_two = Class.new(test_base)
+ test_three = Class.new(test_base)
stub_const('TestBase', test_base)
stub_const('TestOne', test_one)
stub_const('TestTwo', test_two)
+ stub_const('TestThree', test_three)
end
it 'parses the gid' do
- gid = described_class.parse_gid(global_id)
+ gid = parse_gid
expect(gid.model_id).to eq '2147483647'
expect(gid.model_class).to eq TestOne
@@ -231,7 +231,7 @@ RSpec.describe GitlabSchema do
let_it_be(:global_id) { 'malformed://gitlab/TestOne/2147483647' }
it 'raises an error' do
- expect { described_class.parse_gid(global_id) }
+ expect { parse_gid }
.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid GitLab ID.")
end
end
@@ -253,6 +253,33 @@ RSpec.describe GitlabSchema do
expect { described_class.parse_gid(global_id, expected_type: TestTwo) }
.to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid ID for TestTwo.")
end
+
+ context 'when expected_type is an array' do
+ subject(:parse_gid) { described_class.parse_gid(global_id, expected_type: [TestOne, TestTwo]) }
+
+ context 'when global_id is of type TestOne' do
+ it 'returns an object of an expected type' do
+ expect(parse_gid.model_class).to eq TestOne
+ end
+ end
+
+ context 'when global_id is of type TestTwo' do
+ let_it_be(:global_id) { 'gid://gitlab/TestTwo/2147483647' }
+
+ it 'returns an object of an expected type' do
+ expect(parse_gid.model_class).to eq TestTwo
+ end
+ end
+
+ context 'when global_id is of type TestThree' do
+ let_it_be(:global_id) { 'gid://gitlab/TestThree/2147483647' }
+
+ it 'rejects an unknown type' do
+ expect { parse_gid }
+ .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "#{global_id} is not a valid ID for TestOne, TestTwo.")
+ end
+ end
+ end
end
end
diff --git a/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
index 24104a20465..dd9305d2197 100644
--- a/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
+++ b/spec/graphql/mutations/boards/issues/issue_move_list_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Mutations::Boards::Issues::IssueMoveList do
+ include GraphqlHelpers
+
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:board) { create(:board, group: group) }
@@ -16,9 +18,8 @@ RSpec.describe Mutations::Boards::Issues::IssueMoveList do
let_it_be(:existing_issue1) { create(:labeled_issue, project: project, labels: [testing], relative_position: 10) }
let_it_be(:existing_issue2) { create(:labeled_issue, project: project, labels: [testing], relative_position: 50) }
- let(:current_user) { user }
- let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
- let(:params) { { board: board, project_path: project.full_path, iid: issue1.iid } }
+ let(:current_ctx) { { current_user: user } }
+ let(:params) { { board_id: global_id_of(board), project_path: project.full_path, iid: issue1.iid } }
let(:move_params) do
{
from_list_id: list1.id,
@@ -33,26 +34,45 @@ RSpec.describe Mutations::Boards::Issues::IssueMoveList do
group.add_guest(guest)
end
- subject do
- mutation.resolve(**params.merge(move_params))
- end
+ describe '#resolve' do
+ subject do
+ sync(resolve(described_class, args: params.merge(move_params), ctx: current_ctx))
+ end
+
+ %i[from_list_id to_list_id].each do |arg_name|
+ context "when we only pass #{arg_name}" do
+ let(:move_params) { { arg_name => list1.id } }
- describe '#ready?' do
- it 'raises an error if required arguments are missing' do
- expect { mutation.ready?(**params) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError, "At least one of the arguments " \
- "fromListId, toListId, afterId or beforeId is required")
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ Gitlab::Graphql::Errors::ArgumentError,
+ 'Both fromListId and toListId must be present'
+ )
+ end
+ end
end
- it 'raises an error if only one of fromListId and toListId is present' do
- expect { mutation.ready?(**params.merge(from_list_id: list1.id)) }
- .to raise_error(Gitlab::Graphql::Errors::ArgumentError,
- 'Both fromListId and toListId must be present'
+ context 'when required arguments are missing' do
+ let(:move_params) { {} }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(
+ Gitlab::Graphql::Errors::ArgumentError,
+ "At least one of the arguments fromListId, toListId, afterId or beforeId is required"
)
+ end
+ end
+
+ context 'when the board ID is wrong' do
+ before do
+ params[:board_id] = global_id_of(project)
+ end
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(::GraphQL::LoadApplicationObjectFailedError)
+ end
end
- end
- describe '#resolve' do
context 'when user have access to resources' do
it 'moves and repositions issue' do
subject
@@ -63,15 +83,11 @@ RSpec.describe Mutations::Boards::Issues::IssueMoveList do
end
end
- context 'when user have no access to resources' do
- shared_examples 'raises a resource not available error' do
- it { expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable) }
- end
-
- context 'when user cannot update issue' do
- let(:current_user) { guest }
+ context 'when user cannot update issue' do
+ let(:current_ctx) { { current_user: guest } }
- it_behaves_like 'raises a resource not available error'
+ specify do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
end
diff --git a/spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb b/spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb
deleted file mode 100644
index 8d1fce406fa..00000000000
--- a/spec/graphql/mutations/concerns/mutations/can_mutate_spammable_spec.rb
+++ /dev/null
@@ -1,46 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Mutations::CanMutateSpammable do
- let(:mutation_class) do
- Class.new(Mutations::BaseMutation) do
- include Mutations::CanMutateSpammable
- end
- end
-
- let(:request) { double(:request) }
- let(:query) { double(:query, schema: GitlabSchema) }
- let(:context) { GraphQL::Query::Context.new(query: query, object: nil, values: { request: request }) }
-
- subject(:mutation) { mutation_class.new(object: nil, context: context, field: nil) }
-
- describe '#additional_spam_params' do
- it 'returns additional spam-related params' do
- expect(subject.send(:additional_spam_params)).to eq({ api: true, request: request })
- end
- end
-
- describe '#with_spam_action_fields' do
- let(:spam_log) { double(:spam_log, id: 1) }
- let(:spammable) { double(:spammable, spam?: true, render_recaptcha?: true, spam_log: spam_log) }
-
- before do
- allow(Gitlab::CurrentSettings).to receive(:recaptcha_site_key) { 'abc123' }
- end
-
- it 'merges in spam action fields from spammable' do
- result = subject.send(:with_spam_action_response_fields, spammable) do
- { other_field: true }
- end
- expect(result)
- .to eq({
- spam: true,
- needs_captcha_response: true,
- spam_log_id: 1,
- captcha_site_key: 'abc123',
- other_field: true
- })
- end
- end
-end
diff --git a/spec/graphql/mutations/design_management/upload_spec.rb b/spec/graphql/mutations/design_management/upload_spec.rb
index 326d88cea80..ada88b7652c 100644
--- a/spec/graphql/mutations/design_management/upload_spec.rb
+++ b/spec/graphql/mutations/design_management/upload_spec.rb
@@ -32,6 +32,10 @@ RSpec.describe Mutations::DesignManagement::Upload do
end
context "when the feature is not available" do
+ before do
+ enable_design_management(false)
+ end
+
it_behaves_like "resource not available"
end
@@ -52,10 +56,10 @@ RSpec.describe Mutations::DesignManagement::Upload do
.map { |f| RenameableUpload.unique_file(f) }
end
- def creates_designs
+ def creates_designs(&block)
prior_count = DesignManagement::Design.count
- expect { yield }.not_to raise_error
+ expect(&block).not_to raise_error
expect(DesignManagement::Design.count).to eq(prior_count + files.size)
end
@@ -99,20 +103,20 @@ RSpec.describe Mutations::DesignManagement::Upload do
it_behaves_like "resource not available"
end
- context "a valid design" do
+ context "with a valid design" do
it "returns the updated designs" do
expect(resolve[:errors]).to eq []
expect(resolve[:designs].map(&:filename)).to contain_exactly("dk.png")
end
end
- context "context when passing an invalid project" do
+ context "when passing an invalid project" do
let(:project) { build(:project) }
it_behaves_like "resource not available"
end
- context "context when passing an invalid issue" do
+ context "when passing an invalid issue" do
let(:issue) { build(:issue) }
it_behaves_like "resource not available"
diff --git a/spec/graphql/mutations/issues/set_assignees_spec.rb b/spec/graphql/mutations/issues/set_assignees_spec.rb
index 9a27c5acdac..4cc49e76bc6 100644
--- a/spec/graphql/mutations/issues/set_assignees_spec.rb
+++ b/spec/graphql/mutations/issues/set_assignees_spec.rb
@@ -11,7 +11,12 @@ RSpec.describe Mutations::Issues::SetAssignees do
subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
- subject { mutation.resolve(project_path: issue.project.full_path, iid: issue.iid, assignee_usernames: [assignee.username]) }
+ subject do
+ mutation.resolve(project_path: issue.project.full_path,
+ iid: issue.iid,
+ operation_mode: Types::MutationOperationModeEnum.default_mode,
+ assignee_usernames: [assignee.username])
+ end
it_behaves_like 'permission level for issue mutation is correctly verified'
end
diff --git a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
index e2eab591341..9b0460bc709 100644
--- a/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -11,7 +11,12 @@ RSpec.describe Mutations::MergeRequests::SetAssignees do
subject(:mutation) { described_class.new(object: nil, context: { current_user: user }, field: nil) }
describe '#resolve' do
- subject { mutation.resolve(project_path: merge_request.project.full_path, iid: merge_request.iid, assignee_usernames: [assignee.username]) }
+ subject do
+ mutation.resolve(project_path: merge_request.project.full_path,
+ iid: merge_request.iid,
+ operation_mode: described_class.arguments['operationMode'].default_value,
+ assignee_usernames: [assignee.username])
+ end
it_behaves_like 'permission level for merge request mutation is correctly verified'
end
diff --git a/spec/graphql/mutations/release_asset_links/delete_spec.rb b/spec/graphql/mutations/release_asset_links/delete_spec.rb
new file mode 100644
index 00000000000..15d320b58ee
--- /dev/null
+++ b/spec/graphql/mutations/release_asset_links/delete_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::ReleaseAssetLinks::Delete do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :private, :repository) }
+ let_it_be_with_reload(:release) { create(:release, project: project) }
+ let_it_be(:developer) { create(:user).tap { |u| project.add_developer(u) } }
+ let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
+ let_it_be_with_reload(:release_link) { create(:release_link, release: release) }
+
+ let(:mutation) { described_class.new(object: nil, context: { current_user: current_user }, field: nil) }
+ let(:mutation_arguments) { { id: release_link.to_global_id } }
+
+ describe '#resolve' do
+ subject(:resolve) do
+ mutation.resolve(**mutation_arguments)
+ end
+
+ let(:deleted_link) { subject[:link] }
+
+ context 'when the current user has access to delete the link' do
+ let(:current_user) { maintainer }
+
+ it 'deletes the link and returns it', :aggregate_failures do
+ expect(deleted_link).to eq(release_link)
+
+ expect(release.links).to be_empty
+ end
+
+ context "when the link doesn't exist" do
+ let(:mutation_arguments) { super().merge(id: "gid://gitlab/Releases::Link/#{non_existing_record_id}") }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context "when the provided ID is invalid" do
+ let(:mutation_arguments) { super().merge(id: 'not-a-valid-gid') }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(::GraphQL::CoercionError)
+ end
+ end
+ end
+
+ context 'when the current user does not have access to delete the link' do
+ let(:current_user) { developer }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/mutations/release_asset_links/update_spec.rb b/spec/graphql/mutations/release_asset_links/update_spec.rb
index 065089066f1..20c1c8b581c 100644
--- a/spec/graphql/mutations/release_asset_links/update_spec.rb
+++ b/spec/graphql/mutations/release_asset_links/update_spec.rb
@@ -166,7 +166,7 @@ RSpec.describe Mutations::ReleaseAssetLinks::Update do
end
context "when the link doesn't exist" do
- let(:mutation_arguments) { super().merge(id: 'gid://gitlab/Releases::Link/999999') }
+ let(:mutation_arguments) { super().merge(id: "gid://gitlab/Releases::Link/#{non_existing_record_id}") }
it 'raises an error' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
diff --git a/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb b/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb
index 2cd61dd7bcf..a4d1101bc4f 100644
--- a/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb
+++ b/spec/graphql/resolvers/alert_management/http_integrations_resolver_spec.rb
@@ -14,7 +14,9 @@ RSpec.describe Resolvers::AlertManagement::HttpIntegrationsResolver do
let_it_be(:inactive_http_integration) { create(:alert_management_http_integration, :inactive, project: project) }
let_it_be(:other_proj_integration) { create(:alert_management_http_integration) }
- subject { sync(resolve_http_integrations) }
+ let(:params) { {} }
+
+ subject { sync(resolve_http_integrations(params)) }
before do
project.add_developer(developer)
@@ -41,11 +43,25 @@ RSpec.describe Resolvers::AlertManagement::HttpIntegrationsResolver do
let(:current_user) { maintainer }
it { is_expected.to contain_exactly(active_http_integration) }
+
+ context 'when HTTP Integration ID is given' do
+ context 'when integration is from the current project' do
+ let(:params) { { id: global_id_of(inactive_http_integration) } }
+
+ it { is_expected.to contain_exactly(inactive_http_integration) }
+ end
+
+ context 'when integration is from other project' do
+ let(:params) { { id: global_id_of(other_proj_integration) } }
+
+ it { is_expected.to be_empty }
+ end
+ end
end
private
def resolve_http_integrations(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: project, ctx: context)
+ resolve(described_class, obj: project, args: args, ctx: context)
end
end
diff --git a/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb b/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb
index 36e409e0677..fb0fb6729d4 100644
--- a/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb
+++ b/spec/graphql/resolvers/alert_management/integrations_resolver_spec.rb
@@ -7,12 +7,16 @@ RSpec.describe Resolvers::AlertManagement::IntegrationsResolver do
let_it_be(:current_user) { create(:user) }
let_it_be(:project) { create(:project) }
+ let_it_be(:project2) { create(:project) }
let_it_be(:prometheus_integration) { create(:prometheus_service, project: project) }
let_it_be(:active_http_integration) { create(:alert_management_http_integration, project: project) }
let_it_be(:inactive_http_integration) { create(:alert_management_http_integration, :inactive, project: project) }
- let_it_be(:other_proj_integration) { create(:alert_management_http_integration) }
+ let_it_be(:other_proj_integration) { create(:alert_management_http_integration, project: project2) }
+ let_it_be(:other_proj_prometheus_integration) { create(:prometheus_service, project: project2) }
- subject { sync(resolve_http_integrations) }
+ let(:params) { {} }
+
+ subject { sync(resolve_http_integrations(params)) }
specify do
expect(described_class).to have_nullable_graphql_type(Types::AlertManagement::IntegrationType.connection_type)
@@ -25,14 +29,43 @@ RSpec.describe Resolvers::AlertManagement::IntegrationsResolver do
context 'user has permission' do
before do
project.add_maintainer(current_user)
+ project2.add_maintainer(current_user)
end
it { is_expected.to contain_exactly(active_http_integration, prometheus_integration) }
+
+ context 'when HTTP Integration ID is given' do
+ context 'when integration is from the current project' do
+ let(:params) { { id: global_id_of(inactive_http_integration) } }
+
+ it { is_expected.to contain_exactly(inactive_http_integration) }
+ end
+
+ context 'when integration is from other project' do
+ let(:params) { { id: global_id_of(other_proj_integration) } }
+
+ it { is_expected.to be_empty }
+ end
+ end
+
+ context 'when Prometheus Integration ID is given' do
+ context 'when integration is from the current project' do
+ let(:params) { { id: global_id_of(prometheus_integration) } }
+
+ it { is_expected.to contain_exactly(prometheus_integration) }
+ end
+
+ context 'when integration is from other project' do
+ let(:params) { { id: global_id_of(other_proj_prometheus_integration) } }
+
+ it { is_expected.to be_empty }
+ end
+ end
end
private
def resolve_http_integrations(args = {}, context = { current_user: current_user })
- resolve(described_class, obj: project, ctx: context)
+ resolve(described_class, obj: project, args: args, ctx: context)
end
end
diff --git a/spec/graphql/resolvers/blobs_resolver_spec.rb b/spec/graphql/resolvers/blobs_resolver_spec.rb
new file mode 100644
index 00000000000..bc0344796ee
--- /dev/null
+++ b/spec/graphql/resolvers/blobs_resolver_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::BlobsResolver do
+ include GraphqlHelpers
+
+ describe '.resolver_complexity' do
+ it 'adds one per path being resolved' do
+ control = described_class.resolver_complexity({}, child_complexity: 1)
+
+ expect(described_class.resolver_complexity({ paths: %w[a b c] }, child_complexity: 1))
+ .to eq(control + 3)
+ end
+ end
+
+ describe '#resolve' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:repository) { project.repository }
+ let(:args) { { paths: paths, ref: ref } }
+ let(:paths) { [] }
+ let(:ref) { nil }
+
+ subject(:resolve_blobs) { resolve(described_class, obj: repository, args: args, ctx: { current_user: user }) }
+
+ context 'when unauthorized' do
+ it 'raises an exception' do
+ expect { resolve_blobs }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ end
+ end
+
+ context 'when authorized' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'using no filter' do
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'using paths filter' do
+ let(:paths) { ['README.md'] }
+
+ it 'returns the specified blobs for HEAD' do
+ is_expected.to contain_exactly(have_attributes(path: 'README.md'))
+ end
+
+ context 'specifying a non-existent blob' do
+ let(:paths) { ['non-existent'] }
+
+ it 'returns nothing' do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'specifying a different ref' do
+ let(:ref) { 'add-pdf-file' }
+ let(:paths) { ['files/pdf/test.pdf', 'README.md'] }
+
+ it 'returns the specified blobs for that ref' do
+ is_expected.to contain_exactly(
+ have_attributes(path: 'files/pdf/test.pdf'),
+ have_attributes(path: 'README.md')
+ )
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
index 5eda840854a..6ffc8b045e9 100644
--- a/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/board_list_issues_resolver_spec.rb
@@ -39,6 +39,24 @@ RSpec.describe Resolvers::BoardListIssuesResolver do
expect(result).to match_array([issue1])
end
+
+ it 'raises an exception if both assignee_username and assignee_wildcard_id are present' do
+ expect do
+ resolve_board_list_issues(args: { filters: { assignee_username: ['username'], assignee_wildcard_id: 'NONE' } })
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError)
+ end
+
+ it 'accepts assignee wildcard id NONE' do
+ result = resolve_board_list_issues(args: { filters: { assignee_wildcard_id: 'NONE' } })
+
+ expect(result).to match_array([issue1, issue2, issue3])
+ end
+
+ it 'accepts assignee wildcard id ANY' do
+ result = resolve_board_list_issues(args: { filters: { assignee_wildcard_id: 'ANY' } })
+
+ expect(result).to match_array([])
+ end
end
end
diff --git a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
index c44f6b623d7..1b69bf7f63a 100644
--- a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe Resolvers::Ci::JobsResolver do
create(:ci_build, :sast, name: 'DAST job', pipeline: pipeline)
create(:ci_build, :dast, name: 'SAST job', pipeline: pipeline)
create(:ci_build, :container_scanning, name: 'Container scanning job', pipeline: pipeline)
+ create(:ci_build, name: 'Job with tags', pipeline: pipeline, tag_list: ['review'])
end
describe '#resolve' do
@@ -24,7 +25,8 @@ RSpec.describe Resolvers::Ci::JobsResolver do
have_attributes(name: 'Normal job'),
have_attributes(name: 'DAST job'),
have_attributes(name: 'SAST job'),
- have_attributes(name: 'Container scanning job')
+ have_attributes(name: 'Container scanning job'),
+ have_attributes(name: 'Job with tags')
)
end
end
@@ -43,5 +45,18 @@ RSpec.describe Resolvers::Ci::JobsResolver do
)
end
end
+
+ context 'when a job has tags' do
+ it "returns jobs with tags when applicable" do
+ jobs = resolve(described_class, obj: pipeline)
+ expect(jobs).to contain_exactly(
+ have_attributes(tag_list: []),
+ have_attributes(tag_list: []),
+ have_attributes(tag_list: []),
+ have_attributes(tag_list: []),
+ have_attributes(tag_list: ['review'])
+ )
+ end
+ end
end
end
diff --git a/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
index 1eb6f363d5b..3cb6e94e81e 100644
--- a/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_platforms_resolver_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Resolvers::Ci::RunnerPlatformsResolver do
subject(:resolve_subject) { resolve(described_class) }
it 'returns all possible runner platforms' do
- expect(resolve_subject).to include(
+ expect(resolve_subject).to contain_exactly(
hash_including(name: :linux), hash_including(name: :osx),
hash_including(name: :windows), hash_including(name: :docker),
hash_including(name: :kubernetes)
diff --git a/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb b/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb
index 3d004290d9b..13ef89023d9 100644
--- a/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/runner_setup_resolver_spec.rb
@@ -8,12 +8,11 @@ RSpec.describe Resolvers::Ci::RunnerSetupResolver do
describe '#resolve' do
let(:user) { create(:user) }
- subject(:resolve_subject) { resolve(described_class, ctx: { current_user: user }, args: { platform: platform, architecture: 'amd64' }.merge(target_param)) }
+ subject(:resolve_subject) { resolve(described_class, ctx: { current_user: user }, args: { platform: platform, architecture: 'amd64' }) }
context 'with container platforms' do
let(:platform) { 'docker' }
let(:project) { create(:project) }
- let(:target_param) { { project_id: project.to_global_id } }
it 'returns install instructions' do
expect(resolve_subject[:install_instructions]).not_to eq(nil)
@@ -27,77 +26,9 @@ RSpec.describe Resolvers::Ci::RunnerSetupResolver do
context 'with regular platforms' do
let(:platform) { 'linux' }
- context 'without target parameter' do
- let(:target_param) { {} }
-
- context 'when user is not admin' do
- it 'returns access error' do
- expect { resolve_subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
-
- context 'when user is admin' do
- before do
- user.update!(admin: true)
- end
-
- it 'returns install and register instructions' do
- expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
- expect(resolve_subject.values).not_to include(nil)
- end
- end
- end
-
- context 'with project target parameter' do
- let(:project) { create(:project) }
- let(:target_param) { { project_id: project.to_global_id } }
-
- context 'when user has access to admin builds on project' do
- before do
- project.add_maintainer(user)
- end
-
- it 'returns install and register instructions' do
- expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
- expect(resolve_subject.values).not_to include(nil)
- end
- end
-
- context 'when user does not have access to admin builds on project' do
- before do
- project.add_developer(user)
- end
-
- it 'returns access error' do
- expect { resolve_subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
- end
-
- context 'with group target parameter' do
- let(:group) { create(:group) }
- let(:target_param) { { group_id: group.to_global_id } }
-
- context 'when user has access to admin builds on group' do
- before do
- group.add_owner(user)
- end
-
- it 'returns install and register instructions' do
- expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
- expect(resolve_subject.values).not_to include(nil)
- end
- end
-
- context 'when user does not have access to admin builds on group' do
- before do
- group.add_developer(user)
- end
-
- it 'returns access error' do
- expect { resolve_subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
- end
- end
+ it 'returns install and register instructions' do
+ expect(resolve_subject.keys).to contain_exactly(:install_instructions, :register_instructions)
+ expect(resolve_subject.values).not_to include(nil)
end
end
end
diff --git a/spec/graphql/resolvers/ci/test_report_summary_resolver_spec.rb b/spec/graphql/resolvers/ci/test_report_summary_resolver_spec.rb
new file mode 100644
index 00000000000..e78bd06b567
--- /dev/null
+++ b/spec/graphql/resolvers/ci/test_report_summary_resolver_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::TestReportSummaryResolver do
+ include GraphqlHelpers
+
+ describe '#resolve' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project, :public, :repository) }
+
+ subject(:resolve_subject) { resolve(described_class, obj: pipeline) }
+
+ context 'when pipeline has build report results' do
+ let(:pipeline) { create(:ci_pipeline, :with_report_results, project: project) }
+
+ it 'returns test report summary data' do
+ expect(resolve_subject.keys).to contain_exactly(:total, :test_suites)
+ expect(resolve_subject[:test_suites][0].keys).to contain_exactly(:build_ids, :name, :total_time, :total_count, :success_count, :failed_count, :skipped_count, :error_count, :suite_error)
+ expect(resolve_subject[:total][:time]).to eq(0.42)
+ expect(resolve_subject[:total][:count]).to eq(2)
+ expect(resolve_subject[:total][:success]).to eq(0)
+ expect(resolve_subject[:total][:failed]).to eq(0)
+ expect(resolve_subject[:total][:skipped]).to eq(0)
+ expect(resolve_subject[:total][:error]).to eq(2)
+ expect(resolve_subject[:total][:suite_error]).to eq(nil)
+ end
+ end
+
+ context 'when pipeline does not have build report results' do
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ it 'renders test report summary data' do
+ expect(resolve_subject.keys).to contain_exactly(:total, :test_suites)
+ expect(resolve_subject[:test_suites]).to eq([])
+ expect(resolve_subject[:total][:time]).to eq(0)
+ expect(resolve_subject[:total][:count]).to eq(0)
+ expect(resolve_subject[:total][:success]).to eq(0)
+ expect(resolve_subject[:total][:failed]).to eq(0)
+ expect(resolve_subject[:total][:skipped]).to eq(0)
+ expect(resolve_subject[:total][:error]).to eq(0)
+ expect(resolve_subject[:total][:suite_error]).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/ci/test_suite_resolver_spec.rb b/spec/graphql/resolvers/ci/test_suite_resolver_spec.rb
new file mode 100644
index 00000000000..606c6eb03a3
--- /dev/null
+++ b/spec/graphql/resolvers/ci/test_suite_resolver_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::Ci::TestSuiteResolver do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+
+ describe '#resolve' do
+ subject(:test_suite) { resolve(described_class, obj: pipeline, args: { build_ids: build_ids }) }
+
+ context 'when pipeline has builds with test reports' do
+ let_it_be(:main_pipeline) { create(:ci_pipeline, :with_test_reports_with_three_failures, project: project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, :with_test_reports_with_three_failures, project: project, ref: 'new-feature') }
+
+ let(:suite_name) { 'test' }
+ let(:build_ids) { pipeline.latest_builds.pluck(:id) }
+
+ before do
+ build = main_pipeline.builds.last
+ build.update_column(:finished_at, 1.day.ago) # Just to be sure we are included in the report window
+
+ # The JUnit fixture for the given build has 3 failures.
+ # This service will create 1 test case failure record for each.
+ Ci::TestFailureHistoryService.new(main_pipeline).execute
+ end
+
+ it 'renders test suite data' do
+ expect(test_suite[:name]).to eq('test')
+
+ # Each test failure in this pipeline has a matching failure in the default branch
+ recent_failures = test_suite[:test_cases].map { |tc| tc[:recent_failures] }
+ expect(recent_failures).to eq([
+ { count: 1, base_branch: 'master' },
+ { count: 1, base_branch: 'master' },
+ { count: 1, base_branch: 'master' }
+ ])
+ end
+ end
+
+ context 'when pipeline has no builds that matches the given build_ids' do
+ let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+
+ let(:suite_name) { 'test' }
+ let(:build_ids) { [non_existing_record_id] }
+
+ it 'returns nil' do
+ expect(test_suite).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
index 27ac1572cab..4c244da5c62 100644
--- a/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
+++ b/spec/graphql/resolvers/concerns/looks_ahead_spec.rb
@@ -38,11 +38,8 @@ RSpec.describe LooksAhead do
user = Class.new(GraphQL::Schema::Object) do
graphql_name 'User'
field :name, String, null: true
- field :issues, issue.connection_type,
- null: true
- field :issues_with_lookahead, issue.connection_type,
- resolver: issues_resolver,
- null: true
+ field :issues, issue.connection_type, null: true
+ field :issues_with_lookahead, issue.connection_type, resolver: issues_resolver, null: true
end
Class.new(GraphQL::Schema) do
@@ -101,7 +98,7 @@ RSpec.describe LooksAhead do
expect(res['errors']).to be_blank
expect(res.dig('data', 'findUser', 'name')).to eq(the_user.name)
- %w(issues issuesWithLookahead).each do |field|
+ %w[issues issuesWithLookahead].each do |field|
expect(all_issue_titles(res, field)).to match_array(issue_titles)
expect(all_label_ids(res, field)).to match_array(expected_label_ids)
end
diff --git a/spec/graphql/resolvers/group_milestones_resolver_spec.rb b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
index d8ff8e9c1f2..dd3f1676538 100644
--- a/spec/graphql/resolvers/group_milestones_resolver_spec.rb
+++ b/spec/graphql/resolvers/group_milestones_resolver_spec.rb
@@ -136,5 +136,56 @@ RSpec.describe Resolvers::GroupMilestonesResolver do
expect(resolve_group_milestones(args)).to match_array([milestone1, milestone2, milestone3])
end
end
+
+ describe 'include_descendants and include_ancestors' do
+ let_it_be(:parent_group) { create(:group, :public) }
+ let_it_be(:group) { create(:group, :public, parent: parent_group) }
+ let_it_be(:accessible_group) { create(:group, :private, parent: group) }
+ let_it_be(:accessible_project) { create(:project, group: accessible_group) }
+ let_it_be(:inaccessible_group) { create(:group, :private, parent: group) }
+ let_it_be(:inaccessible_project) { create(:project, :private, group: group) }
+ let_it_be(:milestone1) { create(:milestone, group: group) }
+ let_it_be(:milestone2) { create(:milestone, group: accessible_group) }
+ let_it_be(:milestone3) { create(:milestone, project: accessible_project) }
+ let_it_be(:milestone4) { create(:milestone, group: inaccessible_group) }
+ let_it_be(:milestone5) { create(:milestone, project: inaccessible_project) }
+ let_it_be(:milestone6) { create(:milestone, group: parent_group) }
+
+ before do
+ accessible_group.add_developer(current_user)
+ end
+
+ context 'when including neither ancestor or descendant milestones in a public group' do
+ let(:args) { {} }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(resolve_group_milestones(args)).to match_array([milestone1])
+ end
+ end
+
+ context 'when including descendant milestones in a public group' do
+ let(:args) { { include_descendants: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(resolve_group_milestones(args)).to match_array([milestone1, milestone2, milestone3])
+ end
+ end
+
+ context 'when including ancestor milestones in a public group' do
+ let(:args) { { include_ancestors: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(resolve_group_milestones(args)).to match_array([milestone1, milestone6])
+ end
+ end
+
+ context 'when including both ancestor or descendant milestones in a public group' do
+ let(:args) { { include_descendants: true, include_ancestors: true } }
+
+ it 'finds milestones only in accessible projects and groups' do
+ expect(resolve_group_milestones(args)).to match_array([milestone1, milestone2, milestone3, milestone6])
+ end
+ end
+ end
end
end
diff --git a/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
index decc3569d6c..3fbd9bd2368 100644
--- a/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
+++ b/spec/graphql/resolvers/issue_status_counts_resolver_spec.rb
@@ -69,6 +69,14 @@ RSpec.describe Resolvers::IssueStatusCountsResolver do
expect(result.closed).to eq 1
end
+ context 'when both assignee_username and assignee_usernames are provided' do
+ it 'raises a mutually exclusive filter error' do
+ expect do
+ resolve_issue_status_counts(assignee_usernames: [current_user.username], assignee_username: current_user.username)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.')
+ end
+ end
+
private
def resolve_issue_status_counts(args = {}, context = { current_user: current_user })
diff --git a/spec/graphql/resolvers/issues_resolver_spec.rb b/spec/graphql/resolvers/issues_resolver_spec.rb
index 6e802bf7d25..7c2ceb50066 100644
--- a/spec/graphql/resolvers/issues_resolver_spec.rb
+++ b/spec/graphql/resolvers/issues_resolver_spec.rb
@@ -46,10 +46,6 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(milestone_title: [milestone.title])).to contain_exactly(issue1)
end
- it 'filters by assignee_username' do
- expect(resolve_issues(assignee_username: [assignee.username])).to contain_exactly(issue2)
- end
-
it 'filters by two assignees' do
assignee2 = create(:user)
issue2.update!(assignees: [assignee, assignee2])
@@ -78,6 +74,24 @@ RSpec.describe Resolvers::IssuesResolver do
expect(resolve_issues(label_name: [label1.title, label2.title])).to contain_exactly(issue2)
end
+ describe 'filters by assignee_username' do
+ it 'filters by assignee_username' do
+ expect(resolve_issues(assignee_username: [assignee.username])).to contain_exactly(issue2)
+ end
+
+ it 'filters by assignee_usernames' do
+ expect(resolve_issues(assignee_usernames: [assignee.username])).to contain_exactly(issue2)
+ end
+
+ context 'when both assignee_username and assignee_usernames are provided' do
+ it 'raises a mutually exclusive filter error' do
+ expect do
+ resolve_issues(assignee_usernames: [assignee.username], assignee_username: assignee.username)
+ end.to raise_error(Gitlab::Graphql::Errors::ArgumentError, 'only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.')
+ end
+ end
+ end
+
describe 'filters by created_at' do
it 'filters by created_before' do
expect(resolve_issues(created_before: 2.hours.ago)).to contain_exactly(issue1)
@@ -144,6 +158,29 @@ RSpec.describe Resolvers::IssuesResolver do
end
end
+ describe 'filters by negated params' do
+ it 'returns issues without the specified iids' do
+ expect(resolve_issues(not: { iids: [issue1.iid] })).to contain_exactly(issue2)
+ end
+
+ it 'returns issues without the specified label names' do
+ expect(resolve_issues(not: { label_name: [label1.title] })).to be_empty
+ expect(resolve_issues(not: { label_name: [label2.title] })).to contain_exactly(issue1)
+ end
+
+ it 'returns issues without the specified milestone' do
+ expect(resolve_issues(not: { milestone_title: [milestone.title] })).to contain_exactly(issue2)
+ end
+
+ it 'returns issues without the specified assignee_usernames' do
+ expect(resolve_issues(not: { assignee_usernames: [assignee.username] })).to contain_exactly(issue1)
+ end
+
+ it 'returns issues without the specified assignee_id' do
+ expect(resolve_issues(not: { assignee_id: [assignee.id] })).to contain_exactly(issue1)
+ end
+ end
+
describe 'sorting' do
context 'when sorting by created' do
it 'sorts issues ascending' do
diff --git a/spec/graphql/resolvers/merge_requests_resolver_spec.rb b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
index 7dd968d90a8..aec6c6c6708 100644
--- a/spec/graphql/resolvers/merge_requests_resolver_spec.rb
+++ b/spec/graphql/resolvers/merge_requests_resolver_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
include SortingHelper
let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:other_project) { create(:project, :repository) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:other_user) { create(:user) }
@@ -16,10 +17,17 @@ RSpec.describe Resolvers::MergeRequestsResolver do
let_it_be(:merge_request_3) { create(:merge_request, :unique_branches, **common_attrs) }
let_it_be(:merge_request_4) { create(:merge_request, :unique_branches, :locked, **common_attrs) }
let_it_be(:merge_request_5) { create(:merge_request, :simple, :locked, **common_attrs) }
- let_it_be(:merge_request_6) { create(:labeled_merge_request, :unique_branches, labels: create_list(:label, 2, project: project), **common_attrs) }
- let_it_be(:merge_request_with_milestone) { create(:merge_request, :unique_branches, **common_attrs, milestone: milestone) }
- let_it_be(:other_project) { create(:project, :repository) }
- let_it_be(:other_merge_request) { create(:merge_request, source_project: other_project, target_project: other_project) }
+ let_it_be(:merge_request_6) do
+ create(:labeled_merge_request, :unique_branches, **common_attrs, labels: create_list(:label, 2, project: project))
+ end
+
+ let_it_be(:merge_request_with_milestone) do
+ create(:merge_request, :unique_branches, **common_attrs, milestone: milestone)
+ end
+
+ let_it_be(:other_merge_request) do
+ create(:merge_request, source_project: other_project, target_project: other_project)
+ end
let(:iid_1) { merge_request_1.iid }
let(:iid_2) { merge_request_2.iid }
@@ -41,13 +49,16 @@ RSpec.describe Resolvers::MergeRequestsResolver do
# AND "merge_requests"."iid" = 1 ORDER BY "merge_requests"."id" DESC
# SELECT "projects".* FROM "projects" WHERE "projects"."id" = 2
# SELECT "project_features".* FROM "project_features" WHERE "project_features"."project_id" = 2
- let(:queries_per_project) { 3 }
+ let(:queries_per_project) { 4 }
- context 'no arguments' do
+ context 'without arguments' do
it 'returns all merge requests' do
result = resolve_mr(project)
- expect(result).to contain_exactly(merge_request_1, merge_request_2, merge_request_3, merge_request_4, merge_request_5, merge_request_6, merge_request_with_milestone)
+ expect(result).to contain_exactly(
+ merge_request_1, merge_request_2, merge_request_3, merge_request_4, merge_request_5,
+ merge_request_6, merge_request_with_milestone
+ )
end
it 'returns only merge requests that the current user can see' do
@@ -57,7 +68,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
- context 'by iid alone' do
+ context 'with iid alone' do
it 'batch-resolves by target project full path and individual IID', :request_store do
# 1 query for project_authorizations, and 1 for merge_requests
result = batch_sync(max_queries: queries_per_project) do
@@ -83,7 +94,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
expect(result).to contain_exactly(merge_request_1, merge_request_2, merge_request_3)
end
- it 'can batch-resolve merge requests from different projects', :request_store, :use_clean_rails_memory_store_caching do
+ it 'can batch-resolve merge requests from different projects', :request_store do
# 2 queries for project_authorizations, and 2 for merge_requests
results = batch_sync(max_queries: queries_per_project * 2) do
a = resolve_mr(project, iids: [iid_1])
@@ -121,7 +132,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
- context 'by source branches' do
+ context 'with source branches argument' do
it 'takes one argument' do
result = resolve_mr(project, source_branches: [merge_request_3.source_branch])
@@ -131,13 +142,13 @@ RSpec.describe Resolvers::MergeRequestsResolver do
it 'takes more than one argument' do
mrs = [merge_request_3, merge_request_4]
branches = mrs.map(&:source_branch)
- result = resolve_mr(project, source_branches: branches )
+ result = resolve_mr(project, source_branches: branches)
expect(result).to match_array(mrs)
end
end
- context 'by target branches' do
+ context 'with target branches argument' do
it 'takes one argument' do
result = resolve_mr(project, target_branches: [merge_request_3.target_branch])
@@ -153,7 +164,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
- context 'by state' do
+ context 'with state argument' do
it 'takes one argument' do
result = resolve_mr(project, state: 'locked')
@@ -161,7 +172,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
- context 'by label' do
+ context 'with label argument' do
let_it_be(:label) { merge_request_6.labels.first }
let_it_be(:with_label) { create(:labeled_merge_request, :closed, labels: [label], **common_attrs) }
@@ -178,7 +189,18 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
- context 'by merged_after and merged_before' do
+ context 'with negated label argument' do
+ let_it_be(:label) { merge_request_6.labels.first }
+ let_it_be(:with_label) { create(:labeled_merge_request, :closed, labels: [label], **common_attrs) }
+
+ it 'excludes merge requests with given label from selection' do
+ result = resolve_mr(project, not: { labels: [label.title] })
+
+ expect(result).not_to include(merge_request_6, with_label)
+ end
+ end
+
+ context 'with merged_after and merged_before arguments' do
before do
merge_request_1.metrics.update!(merged_at: 10.days.ago)
end
@@ -196,7 +218,7 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
- context 'by milestone' do
+ context 'with milestone argument' do
it 'filters merge requests by milestone title' do
result = resolve_mr(project, milestone_title: milestone.title)
@@ -210,9 +232,17 @@ RSpec.describe Resolvers::MergeRequestsResolver do
end
end
+ context 'with negated milestone argument' do
+ it 'filters out merge requests with given milestone title' do
+ result = resolve_mr(project, not: { milestone_title: milestone.title })
+
+ expect(result).not_to include(merge_request_with_milestone)
+ end
+ end
+
describe 'combinations' do
it 'requires all filters' do
- create(:merge_request, :closed, source_project: project, target_project: project, source_branch: merge_request_4.source_branch)
+ create(:merge_request, :closed, **common_attrs, source_branch: merge_request_4.source_branch)
result = resolve_mr(project, source_branches: [merge_request_4.source_branch], state: 'locked')
diff --git a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
index 147a02e1d79..618d012bd6d 100644
--- a/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
+++ b/spec/graphql/resolvers/namespace_projects_resolver_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Resolvers::NamespaceProjectsResolver do
subject(:projects) { resolve_projects(args) }
let(:include_subgroups) { false }
- let(:project_3) { create(:project, name: 'Project', path: 'project', namespace: namespace) }
+ let!(:project_3) { create(:project, name: 'Project', path: 'project', namespace: namespace) }
context 'when ids is provided' do
let(:ids) { [project_3.to_global_id.to_s] }
diff --git a/spec/graphql/resolvers/project_jobs_resolver_spec.rb b/spec/graphql/resolvers/project_jobs_resolver_spec.rb
new file mode 100644
index 00000000000..94df2999163
--- /dev/null
+++ b/spec/graphql/resolvers/project_jobs_resolver_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::ProjectJobsResolver do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:irrelevant_project) { create(:project, :repository) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:irrelevant_pipeline) { create(:ci_pipeline, project: irrelevant_project) }
+ let_it_be(:build_one) { create(:ci_build, :success, name: 'Build One', pipeline: pipeline) }
+ let_it_be(:build_two) { create(:ci_build, :success, name: 'Build Two', pipeline: pipeline) }
+ let_it_be(:build_three) { create(:ci_build, :failed, name: 'Build Three', pipeline: pipeline) }
+
+ let(:irrelevant_build) { create(:ci_build, name: 'Irrelevant Build', pipeline: irrelevant_pipeline)}
+ let(:args) { {} }
+ let(:current_user) { create(:user) }
+
+ subject { resolve_jobs(args) }
+
+ describe '#resolve' do
+ context 'with authorized user' do
+ before do
+ project.add_developer(current_user)
+ end
+
+ context 'with statuses argument' do
+ let(:args) { { statuses: [Types::Ci::JobStatusEnum.coerce_isolated_input('SUCCESS')] } }
+
+ it { is_expected.to contain_exactly(build_one, build_two) }
+ end
+
+ context 'without statuses argument' do
+ it { is_expected.to contain_exactly(build_one, build_two, build_three) }
+ end
+ end
+
+ context 'with unauthorized user' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ private
+
+ def resolve_jobs(args = {}, context = { current_user: current_user })
+ resolve(described_class, obj: project, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
index 69127c4b061..3d33e0b500d 100644
--- a/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
+++ b/spec/graphql/resolvers/project_pipeline_resolver_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project, iid: '1234', sha: 'sha') }
+ let_it_be(:other_project_pipeline) { create(:ci_pipeline, project: project, iid: '1235', sha: 'sha2') }
let_it_be(:other_pipeline) { create(:ci_pipeline) }
let(:current_user) { create(:user) }
@@ -23,6 +24,11 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
end
it 'resolves pipeline for the passed iid' do
+ expect(Ci::PipelinesFinder)
+ .to receive(:new)
+ .with(project, current_user, iids: ['1234'])
+ .and_call_original
+
result = batch_sync do
resolve_pipeline(project, { iid: '1234' })
end
@@ -31,6 +37,11 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
end
it 'resolves pipeline for the passed sha' do
+ expect(Ci::PipelinesFinder)
+ .to receive(:new)
+ .with(project, current_user, sha: ['sha'])
+ .and_call_original
+
result = batch_sync do
resolve_pipeline(project, { sha: 'sha' })
end
@@ -39,8 +50,6 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
end
it 'keeps the queries under the threshold for iid' do
- create(:ci_pipeline, project: project, iid: '1235')
-
control = ActiveRecord::QueryRecorder.new do
batch_sync { resolve_pipeline(project, { iid: '1234' }) }
end
@@ -54,8 +63,6 @@ RSpec.describe Resolvers::ProjectPipelineResolver do
end
it 'keeps the queries under the threshold for sha' do
- create(:ci_pipeline, project: project, sha: 'sha2')
-
control = ActiveRecord::QueryRecorder.new do
batch_sync { resolve_pipeline(project, { sha: 'sha' }) }
end
diff --git a/spec/graphql/resolvers/repository_branch_names_resolver_spec.rb b/spec/graphql/resolvers/repository_branch_names_resolver_spec.rb
new file mode 100644
index 00000000000..398dd7a2e2e
--- /dev/null
+++ b/spec/graphql/resolvers/repository_branch_names_resolver_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::RepositoryBranchNamesResolver do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository) }
+
+ describe '#resolve' do
+ subject(:resolve_branch_names) do
+ resolve(
+ described_class,
+ obj: project.repository,
+ args: { search_pattern: pattern },
+ ctx: { current_user: project.creator }
+ )
+ end
+
+ context 'with empty search pattern' do
+ let(:pattern) { '' }
+
+ it 'returns nil' do
+ expect(resolve_branch_names).to eq(nil)
+ end
+ end
+
+ context 'with a valid search pattern' do
+ let(:pattern) { 'mas*' }
+
+ it 'returns matching branches' do
+ expect(resolve_branch_names).to match_array(['master'])
+ end
+ end
+ end
+end
diff --git a/spec/graphql/resolvers/timelog_resolver_spec.rb b/spec/graphql/resolvers/timelog_resolver_spec.rb
new file mode 100644
index 00000000000..585cd657e35
--- /dev/null
+++ b/spec/graphql/resolvers/timelog_resolver_spec.rb
@@ -0,0 +1,168 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Resolvers::TimelogResolver do
+ include GraphqlHelpers
+
+ specify do
+ expect(described_class).to have_non_null_graphql_type(::Types::TimelogType.connection_type)
+ end
+
+ context "with a group" do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+
+ before_all do
+ group.add_developer(current_user)
+ project.add_developer(current_user)
+ end
+
+ before do
+ group.clear_memoization(:timelogs)
+ end
+
+ describe '#resolve' do
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project) }
+ let_it_be(:timelog1) { create(:issue_timelog, issue: issue, spent_at: 2.days.ago.beginning_of_day) }
+ let_it_be(:timelog2) { create(:issue_timelog, issue: issue2, spent_at: 2.days.ago.end_of_day) }
+ let_it_be(:timelog3) { create(:issue_timelog, issue: issue2, spent_at: 10.days.ago) }
+
+ let(:args) { { start_time: 6.days.ago, end_time: 2.days.ago.noon } }
+
+ it 'finds all timelogs within given dates' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1)
+ end
+
+ it 'return nothing when user has insufficient permissions' do
+ user = create(:user)
+ group.add_guest(current_user)
+
+ expect(resolve_timelogs(user: user, **args)).to be_empty
+ end
+
+ context 'when start_time and end_date are present' do
+ let(:args) { { start_time: 6.days.ago, end_date: 2.days.ago } }
+
+ it 'finds timelogs until the end of day of end_date' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1, timelog2)
+ end
+ end
+
+ context 'when start_date and end_time are present' do
+ let(:args) { { start_date: 6.days.ago, end_time: 2.days.ago.noon } }
+
+ it 'finds all timelogs within start_date and end_time' do
+ timelogs = resolve_timelogs(**args)
+
+ expect(timelogs).to contain_exactly(timelog1)
+ end
+ end
+
+ context 'when arguments are invalid' do
+ let_it_be(:error_class) { Gitlab::Graphql::Errors::ArgumentError }
+
+ context 'when no time or date arguments are present' do
+ let(:args) { {} }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Start and End arguments must be present/)
+ end
+ end
+
+ context 'when only start_time is present' do
+ let(:args) { { start_time: 6.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Both Start and End arguments must be present/)
+ end
+ end
+
+ context 'when only end_time is present' do
+ let(:args) { { end_time: 2.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Both Start and End arguments must be present/)
+ end
+ end
+
+ context 'when only start_date is present' do
+ let(:args) { { start_date: 6.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Both Start and End arguments must be present/)
+ end
+ end
+
+ context 'when only end_date is present' do
+ let(:args) { { end_date: 2.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Both Start and End arguments must be present/)
+ end
+ end
+
+ context 'when start_time and start_date are present' do
+ let(:args) { { start_time: 6.days.ago, start_date: 6.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Both Start and End arguments must be present/)
+ end
+ end
+
+ context 'when end_time and end_date are present' do
+ let(:args) { { end_time: 2.days.ago, end_date: 2.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Both Start and End arguments must be present/)
+ end
+ end
+
+ context 'when three arguments are present' do
+ let(:args) { { start_date: 6.days.ago, end_date: 2.days.ago, end_time: 2.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Only Time or Date arguments must be present/)
+ end
+ end
+
+ context 'when start argument is after end argument' do
+ let(:args) { { start_time: 2.days.ago, end_time: 6.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /Start argument must be before End argument/)
+ end
+ end
+
+ context 'when time range is more than 60 days' do
+ let(:args) { { start_time: 3.months.ago, end_time: 2.days.ago } }
+
+ it 'returns correct error' do
+ expect { resolve_timelogs(**args) }
+ .to raise_error(error_class, /The time range period cannot contain more than 60 days/)
+ end
+ end
+ end
+ end
+ end
+
+ def resolve_timelogs(user: current_user, **args)
+ context = { current_user: user }
+ resolve(described_class, obj: group, args: args, ctx: context)
+ end
+end
diff --git a/spec/graphql/resolvers/users/snippets_resolver_spec.rb b/spec/graphql/resolvers/users/snippets_resolver_spec.rb
index 11a5b7517e0..04fe3213a99 100644
--- a/spec/graphql/resolvers/users/snippets_resolver_spec.rb
+++ b/spec/graphql/resolvers/users/snippets_resolver_spec.rb
@@ -75,9 +75,19 @@ RSpec.describe Resolvers::Users::SnippetsResolver do
end.to raise_error(GraphQL::CoercionError)
end
end
+
+ context 'when user profile is private' do
+ it 'does not return snippets for that user' do
+ expect(resolve_snippets(obj: other_user)).to contain_exactly(other_personal_snippet, other_project_snippet)
+
+ other_user.update!(private_profile: true)
+
+ expect(resolve_snippets(obj: other_user)).to be_empty
+ end
+ end
end
- def resolve_snippets(args: {})
- resolve(described_class, args: args, ctx: { current_user: current_user }, obj: current_user)
+ def resolve_snippets(args: {}, context_user: current_user, obj: current_user)
+ resolve(described_class, args: args, ctx: { current_user: context_user }, obj: obj)
end
end
diff --git a/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb b/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb
index c50092d7f0e..d1c2b4044c1 100644
--- a/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb
+++ b/spec/graphql/types/admin/analytics/usage_trends/measurement_type_spec.rb
@@ -11,6 +11,7 @@ RSpec.describe GitlabSchema.types['UsageTrendsMeasurement'] do
describe 'authorization' do
let_it_be(:measurement) { create(:usage_trends_measurement, :project_count) }
+
let(:user) { create(:user) }
let(:query) do
@@ -44,7 +45,7 @@ RSpec.describe GitlabSchema.types['UsageTrendsMeasurement'] do
let(:user) { create(:user, :admin) }
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
it 'returns data' do
diff --git a/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb b/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb
index b10c2a2ab2a..d057afb331c 100644
--- a/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb
+++ b/spec/graphql/types/alert_management/prometheus_integration_type_spec.rb
@@ -48,15 +48,21 @@ RSpec.describe GitlabSchema.types['AlertManagementPrometheusIntegration'] do
end
end
- context 'without project' do
- let_it_be(:integration) { create(:prometheus_service, project: nil, group: create(:group)) }
-
- it_behaves_like 'has field with value', 'token' do
- let(:value) { nil }
- end
-
- it_behaves_like 'has field with value', 'url' do
- let(:value) { nil }
+ describe 'a group integration' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:integration) { create(:prometheus_service, project: nil, group: group) }
+
+ # Since it is impossible to authorize the parent here, given that the
+ # project is nil, all fields should be redacted:
+
+ described_class.fields.each_key do |field_name|
+ context "field: #{field_name}" do
+ it 'is redacted' do
+ expect do
+ resolve_field(field_name, integration, current_user: user)
+ end.to raise_error(GraphqlHelpers::UnauthorizedObject)
+ end
+ end
end
end
end
diff --git a/spec/graphql/types/base_enum_spec.rb b/spec/graphql/types/base_enum_spec.rb
index 744aee40044..bab0278ee25 100644
--- a/spec/graphql/types/base_enum_spec.rb
+++ b/spec/graphql/types/base_enum_spec.rb
@@ -3,6 +3,38 @@
require 'spec_helper'
RSpec.describe Types::BaseEnum do
+ describe '.from_rails_enum' do
+ let(:enum_type) { Class.new(described_class) }
+ let(:template) { "The name is '%{name}', James %{name}." }
+
+ let(:enum) do
+ {
+ 'foo' => 1,
+ 'bar' => 2,
+ 'baz' => 100
+ }
+ end
+
+ it 'contructs the correct values' do
+ enum_type.from_rails_enum(enum, description: template)
+
+ expect(enum_type.values).to match(
+ 'FOO' => have_attributes(
+ description: "The name is 'foo', James foo.",
+ value: 'foo'
+ ),
+ 'BAR' => have_attributes(
+ description: "The name is 'bar', James bar.",
+ value: 'bar'
+ ),
+ 'BAZ' => have_attributes(
+ description: "The name is 'baz', James baz.",
+ value: 'baz'
+ )
+ )
+ end
+ end
+
describe '.declarative_enum' do
let(:use_name) { true }
let(:use_description) { true }
@@ -26,12 +58,15 @@ RSpec.describe Types::BaseEnum do
end
end
- subject(:set_declarative_enum) { enum_type.declarative_enum(enum_module, use_name: use_name, use_description: use_description) }
+ subject(:set_declarative_enum) do
+ enum_type.declarative_enum(enum_module, use_name: use_name, use_description: use_description)
+ end
describe '#graphql_name' do
context 'when the use_name is `true`' do
it 'changes the graphql_name' do
- expect { set_declarative_enum }.to change { enum_type.graphql_name }.from('OriginalName').to('Name')
+ expect { set_declarative_enum }
+ .to change(enum_type, :graphql_name).from('OriginalName').to('Name')
end
end
@@ -39,7 +74,8 @@ RSpec.describe Types::BaseEnum do
let(:use_name) { false }
it 'does not change the graphql_name' do
- expect { set_declarative_enum }.not_to change { enum_type.graphql_name }.from('OriginalName')
+ expect { set_declarative_enum }
+ .not_to change(enum_type, :graphql_name).from('OriginalName')
end
end
end
@@ -47,7 +83,8 @@ RSpec.describe Types::BaseEnum do
describe '#description' do
context 'when the use_description is `true`' do
it 'changes the description' do
- expect { set_declarative_enum }.to change { enum_type.description }.from('Original description').to('Description')
+ expect { set_declarative_enum }
+ .to change(enum_type, :description).from('Original description').to('Description')
end
end
@@ -55,7 +92,8 @@ RSpec.describe Types::BaseEnum do
let(:use_description) { false }
it 'does not change the description' do
- expect { set_declarative_enum }.not_to change { enum_type.description }.from('Original description')
+ expect { set_declarative_enum }
+ .not_to change(enum_type, :description).from('Original description')
end
end
end
diff --git a/spec/graphql/types/base_object_spec.rb b/spec/graphql/types/base_object_spec.rb
new file mode 100644
index 00000000000..d8f2ef58ea5
--- /dev/null
+++ b/spec/graphql/types/base_object_spec.rb
@@ -0,0 +1,432 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::BaseObject do
+ include GraphqlHelpers
+
+ describe 'scoping items' do
+ let_it_be(:custom_auth) do
+ Class.new(::Gitlab::Graphql::Authorize::ObjectAuthorization) do
+ def any?
+ true
+ end
+
+ def ok?(object, _current_user)
+ return false if object == { id: 100 }
+ return false if object.try(:deactivated?)
+
+ true
+ end
+ end
+ end
+
+ let_it_be(:test_schema) do
+ auth = custom_auth.new(nil)
+
+ base_object = Class.new(described_class) do
+ # Override authorization so we don't need to mock Ability
+ define_singleton_method :authorization do
+ auth
+ end
+ end
+
+ y_type = Class.new(base_object) do
+ graphql_name 'Y'
+ authorize :read_y
+ field :id, Integer, null: false
+
+ def id
+ object[:id]
+ end
+ end
+
+ number_type = Module.new do
+ include ::Types::BaseInterface
+
+ graphql_name 'Number'
+
+ field :value, Integer, null: false
+ end
+
+ odd_type = Class.new(described_class) do
+ graphql_name 'Odd'
+ implements number_type
+
+ authorize :read_odd
+ field :odd_value, Integer, null: false
+
+ def odd_value
+ object[:value]
+ end
+ end
+
+ even_type = Class.new(described_class) do
+ graphql_name 'Even'
+ implements number_type
+
+ authorize :read_even
+ field :even_value, Integer, null: false
+
+ def even_value
+ object[:value]
+ end
+ end
+
+ # an abstract type, delegating authorization to members
+ odd_or_even = Class.new(::Types::BaseUnion) do
+ graphql_name 'OddOrEven'
+
+ possible_types odd_type, even_type
+
+ define_singleton_method :resolve_type do |object, ctx|
+ if object[:value].odd?
+ odd_type
+ else
+ even_type
+ end
+ end
+ end
+
+ number_type.define_singleton_method :resolve_type do |object, ctx|
+ odd_or_even.resolve_type(object, ctx)
+ end
+
+ x_type = Class.new(base_object) do
+ graphql_name 'X'
+ # Scalar types
+ field :title, String, null: true
+ # monomorphic types
+ field :lazy_list_of_ys, [y_type], null: true
+ field :list_of_lazy_ys, [y_type], null: true
+ field :array_ys_conn, y_type.connection_type, null: true
+ # polymorphic types
+ field :polymorphic_conn, odd_or_even.connection_type, null: true
+ field :polymorphic_object, odd_or_even, null: true do
+ argument :value, Integer, required: true
+ end
+ field :interface_conn, number_type.connection_type, null: true
+
+ def lazy_list_of_ys
+ ::Gitlab::Graphql::Lazy.new { object[:ys] }
+ end
+
+ def list_of_lazy_ys
+ object[:ys].map { |y| ::Gitlab::Graphql::Lazy.new { y } }
+ end
+
+ def array_ys_conn
+ object[:ys].dup
+ end
+
+ def polymorphic_conn
+ object[:values].dup
+ end
+ alias_method :interface_conn, :polymorphic_conn
+
+ def polymorphic_object(value)
+ value
+ end
+ end
+
+ user_type = Class.new(base_object) do
+ graphql_name 'User'
+ authorize :read_user
+ field 'name', String, null: true
+ end
+
+ Class.new(GraphQL::Schema) do
+ lazy_resolve ::Gitlab::Graphql::Lazy, :force
+ use ::GraphQL::Pagination::Connections
+ use ::Gitlab::Graphql::Pagination::Connections
+
+ query(Class.new(::Types::BaseObject) do
+ graphql_name 'Query'
+ field :x, x_type, null: true
+ field :users, user_type.connection_type, null: true
+
+ def x
+ ::Gitlab::Graphql::Lazy.new { context[:x] }
+ end
+
+ def users
+ ::Gitlab::Graphql::Lazy.new { User.id_in(context[:user_ids]).order(id: :asc) }
+ end
+ end)
+
+ def unauthorized_object(err)
+ nil
+ end
+ end
+ end
+
+ def document(path)
+ GraphQL.parse(<<~GQL)
+ query {
+ x {
+ title
+ #{query_graphql_path(path, 'id')}
+ }
+ }
+ GQL
+ end
+
+ let(:data) do
+ {
+ x: {
+ title: 'Hey',
+ ys: [{ id: 1 }, { id: 100 }, { id: 2 }]
+ }
+ }
+ end
+
+ shared_examples 'array member redaction' do |path|
+ let(:result) do
+ query = GraphQL::Query.new(test_schema, document: document(path), context: data)
+ query.result.to_h
+ end
+
+ it 'redacts the unauthorized array member' do
+ expect(graphql_dig_at(result, 'data', 'x', 'title')).to eq('Hey')
+ expect(graphql_dig_at(result, 'data', 'x', *path)).to contain_exactly(
+ eq({ 'id' => 1 }),
+ eq({ 'id' => 2 })
+ )
+ end
+ end
+
+ # For example a batchloaded association
+ describe 'a lazy list' do
+ it_behaves_like 'array member redaction', %w[lazyListOfYs]
+ end
+
+ # For example using a batchloader to map over a set of IDs
+ describe 'a list of lazy items' do
+ it_behaves_like 'array member redaction', %w[listOfLazyYs]
+ end
+
+ describe 'an array connection of items' do
+ it_behaves_like 'array member redaction', %w[arrayYsConn nodes]
+ end
+
+ describe 'an array connection of items, selecting edges' do
+ it_behaves_like 'array member redaction', %w[arrayYsConn edges node]
+ end
+
+ it 'paginates arrays correctly' do
+ n = 7
+
+ data = {
+ x: {
+ ys: (95..105).to_a.map { |id| { id: id } }
+ }
+ }
+
+ doc = lambda do |after|
+ GraphQL.parse(<<~GQL)
+ query {
+ x {
+ ys: arrayYsConn(#{attributes_to_graphql(first: n, after: after)}) {
+ pageInfo {
+ hasNextPage
+ hasPreviousPage
+ endCursor
+ }
+ nodes { id }
+ }
+ }
+ }
+ GQL
+ end
+ returned_items = ->(ids) { ids.to_a.map { |id| eq({ 'id' => id }) } }
+
+ query = GraphQL::Query.new(test_schema, document: doc[nil], context: data)
+ result = query.result.to_h
+
+ ys = result.dig('data', 'x', 'ys', 'nodes')
+ page = result.dig('data', 'x', 'ys', 'pageInfo')
+ # We expect this page to be smaller, since we paginate before redaction
+ expect(ys).to match_array(returned_items[(95..101).to_a - [100]])
+ expect(page).to include('hasNextPage' => true, 'hasPreviousPage' => false)
+
+ cursor = page['endCursor']
+ query_2 = GraphQL::Query.new(test_schema, document: doc[cursor], context: data)
+ result_2 = query_2.result.to_h
+
+ ys = result_2.dig('data', 'x', 'ys', 'nodes')
+ page = result_2.dig('data', 'x', 'ys', 'pageInfo')
+ expect(ys).to match_array(returned_items[102..105])
+ expect(page).to include('hasNextPage' => false, 'hasPreviousPage' => true)
+ end
+
+ it 'filters connections correctly' do
+ active_users = create_list(:user, 3, state: :active)
+ inactive = create(:user, state: :deactivated)
+
+ data = { user_ids: [inactive, *active_users].map(&:id) }
+
+ doc = GraphQL.parse(<<~GQL)
+ query {
+ users { nodes { name } }
+ }
+ GQL
+
+ query = GraphQL::Query.new(test_schema, document: doc, context: data)
+ result = query.result.to_h
+
+ expect(result.dig('data', 'users', 'nodes')).to match_array(active_users.map do |u|
+ eq({ 'name' => u.name })
+ end)
+ end
+
+ it 'filters polymorphic connections' do
+ data = {
+ current_user: :the_user,
+ x: {
+ values: [{ value: 1 }, { value: 2 }, { value: 3 }, { value: 4 }]
+ }
+ }
+
+ doc = GraphQL.parse(<<~GQL)
+ query {
+ x {
+ things: polymorphicConn {
+ nodes {
+ ... on Odd { oddValue }
+ ... on Even { evenValue }
+ }
+ }
+ }
+ }
+ GQL
+
+ # Each ability check happens twice: once in the collection, and once
+ # on the type. We expect the ability checks to be cached.
+ expect(Ability).to receive(:allowed?).twice
+ .with(:the_user, :read_odd, { value: 1 }).and_return(true)
+ expect(Ability).to receive(:allowed?).once
+ .with(:the_user, :read_odd, { value: 3 }).and_return(false)
+ expect(Ability).to receive(:allowed?).once
+ .with(:the_user, :read_even, { value: 2 }).and_return(false)
+ expect(Ability).to receive(:allowed?).twice
+ .with(:the_user, :read_even, { value: 4 }).and_return(true)
+
+ query = GraphQL::Query.new(test_schema, document: doc, context: data)
+ result = query.result.to_h
+
+ things = result.dig('data', 'x', 'things', 'nodes')
+
+ expect(things).to contain_exactly(
+ { 'oddValue' => 1 },
+ { 'evenValue' => 4 }
+ )
+ end
+
+ it 'filters interface connections' do
+ data = {
+ current_user: :the_user,
+ x: {
+ values: [{ value: 1 }, { value: 2 }, { value: 3 }, { value: 4 }]
+ }
+ }
+
+ doc = GraphQL.parse(<<~GQL)
+ query {
+ x {
+ things: interfaceConn {
+ nodes {
+ value
+ ... on Odd { oddValue }
+ ... on Even { evenValue }
+ }
+ }
+ }
+ }
+ GQL
+
+ # Each ability check happens twice: once in the collection, and once
+ # on the type. We expect the ability checks to be cached.
+ expect(Ability).to receive(:allowed?).twice
+ .with(:the_user, :read_odd, { value: 1 }).and_return(true)
+ expect(Ability).to receive(:allowed?).once
+ .with(:the_user, :read_odd, { value: 3 }).and_return(false)
+ expect(Ability).to receive(:allowed?).once
+ .with(:the_user, :read_even, { value: 2 }).and_return(false)
+ expect(Ability).to receive(:allowed?).twice
+ .with(:the_user, :read_even, { value: 4 }).and_return(true)
+
+ query = GraphQL::Query.new(test_schema, document: doc, context: data)
+ result = query.result.to_h
+
+ things = result.dig('data', 'x', 'things', 'nodes')
+
+ expect(things).to contain_exactly(
+ { 'value' => 1, 'oddValue' => 1 },
+ { 'value' => 4, 'evenValue' => 4 }
+ )
+ end
+
+ it 'redacts polymorphic objects' do
+ data = {
+ current_user: :the_user,
+ x: {
+ values: [{ value: 1 }]
+ }
+ }
+
+ doc = GraphQL.parse(<<~GQL)
+ query {
+ x {
+ ok: polymorphicObject(value: 1) {
+ ... on Odd { oddValue }
+ ... on Even { evenValue }
+ }
+ bad: polymorphicObject(value: 3) {
+ ... on Odd { oddValue }
+ ... on Even { evenValue }
+ }
+ }
+ }
+ GQL
+
+ # Each ability check happens twice: once in the collection, and once
+ # on the type. We expect the ability checks to be cached.
+ expect(Ability).to receive(:allowed?).once
+ .with(:the_user, :read_odd, { value: 1 }).and_return(true)
+ expect(Ability).to receive(:allowed?).once
+ .with(:the_user, :read_odd, { value: 3 }).and_return(false)
+
+ query = GraphQL::Query.new(test_schema, document: doc, context: data)
+ result = query.result.to_h
+
+ expect(result.dig('data', 'x', 'ok')).to eq({ 'oddValue' => 1 })
+ expect(result.dig('data', 'x', 'bad')).to be_nil
+ end
+
+ it 'paginates before scoping' do
+ # Inactive first so they sort first
+ n = 3
+ inactive = create_list(:user, n - 1, state: :deactivated)
+ active_users = create_list(:user, 2, state: :active)
+
+ data = { user_ids: [*inactive, *active_users].map(&:id) }
+
+ doc = GraphQL.parse(<<~GQL)
+ query {
+ users(first: #{n}) {
+ pageInfo { hasNextPage }
+ nodes { name } }
+ }
+ GQL
+
+ query = GraphQL::Query.new(test_schema, document: doc, context: data)
+ result = query.result.to_h
+
+ # We expect the page to be loaded and then filtered - i.e. to have all
+ # deactivated users removed.
+ expect(result.dig('data', 'users', 'pageInfo', 'hasNextPage')).to be_truthy
+ expect(result.dig('data', 'users', 'nodes'))
+ .to contain_exactly({ 'name' => active_users.first.name })
+ end
+ end
+end
diff --git a/spec/graphql/types/board_type_spec.rb b/spec/graphql/types/board_type_spec.rb
index dca3cfd8aaf..403fbe1f290 100644
--- a/spec/graphql/types/board_type_spec.rb
+++ b/spec/graphql/types/board_type_spec.rb
@@ -8,8 +8,18 @@ RSpec.describe GitlabSchema.types['Board'] do
specify { expect(described_class).to require_graphql_authorizations(:read_issue_board) }
it 'has specific fields' do
- expected_fields = %w[id name web_url web_path]
+ expected_fields = %w[
+ id
+ name
+ hideBacklogList
+ hideClosedList
+ createdAt
+ updatedAt
+ lists
+ webPath
+ webUrl
+ ]
- expect(described_class).to include_graphql_fields(*expected_fields)
+ expect(described_class).to have_graphql_fields(*expected_fields).at_least
end
end
diff --git a/spec/graphql/types/boards/board_issue_input_type_spec.rb b/spec/graphql/types/boards/board_issue_input_type_spec.rb
index 6319ff9a88e..5d3efb9b40d 100644
--- a/spec/graphql/types/boards/board_issue_input_type_spec.rb
+++ b/spec/graphql/types/boards/board_issue_input_type_spec.rb
@@ -5,9 +5,9 @@ require 'spec_helper'
RSpec.describe GitlabSchema.types['BoardIssueInput'] do
it { expect(described_class.graphql_name).to eq('BoardIssueInput') }
- it 'exposes negated issue arguments' do
+ it 'has specific fields' do
allowed_args = %w(labelName milestoneTitle assigneeUsername authorUsername
- releaseTag myReactionEmoji not search)
+ releaseTag myReactionEmoji not search assigneeWildcardId)
expect(described_class.arguments.keys).to include(*allowed_args)
expect(described_class.arguments['not'].type).to eq(Types::Boards::NegatedBoardIssueInputType)
diff --git a/spec/graphql/types/ci/job_status_enum_spec.rb b/spec/graphql/types/ci/job_status_enum_spec.rb
new file mode 100644
index 00000000000..e8a1a2e0aa8
--- /dev/null
+++ b/spec/graphql/types/ci/job_status_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['CiJobStatus'] do
+ it 'exposes all job status values' do
+ expect(described_class.values.values).to contain_exactly(
+ *::Ci::HasStatus::AVAILABLE_STATUSES.map do |status|
+ have_attributes(value: status, graphql_name: status.upcase)
+ end
+ )
+ end
+end
diff --git a/spec/graphql/types/ci/job_type_spec.rb b/spec/graphql/types/ci/job_type_spec.rb
index 25f626cea0f..787e2174070 100644
--- a/spec/graphql/types/ci/job_type_spec.rb
+++ b/spec/graphql/types/ci/job_type_spec.rb
@@ -8,14 +8,32 @@ RSpec.describe Types::Ci::JobType do
it 'exposes the expected fields' do
expected_fields = %i[
- pipeline
+ active
+ allow_failure
+ artifacts
+ cancelable
+ commitPath
+ coverage
+ created_at
+ detailedStatus
+ duration
+ finished_at
+ id
name
needs
- detailedStatus
+ pipeline
+ playable
+ queued_at
+ refName
+ refPath
+ retryable
scheduledAt
- artifacts
- finished_at
- duration
+ schedulingType
+ shortSha
+ stage
+ started_at
+ status
+ tags
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/pipeline_type_spec.rb b/spec/graphql/types/ci/pipeline_type_spec.rb
index e0e84a1b635..c7d2cbdb765 100644
--- a/spec/graphql/types/ci/pipeline_type_spec.rb
+++ b/spec/graphql/types/ci/pipeline_type_spec.rb
@@ -11,8 +11,9 @@ RSpec.describe Types::Ci::PipelineType do
expected_fields = %w[
id iid sha before_sha status detailed_status config_source duration
coverage created_at updated_at started_at finished_at committed_at
- stages user retryable cancelable jobs source_job downstream
- upstream path project active user_permissions warnings commit_path
+ stages user retryable cancelable jobs source_job job downstream
+ upstream path project active user_permissions warnings commit_path uses_needs
+ test_report_summary test_suite
]
if Gitlab.ee?
diff --git a/spec/graphql/types/ci/recent_failures_type_spec.rb b/spec/graphql/types/ci/recent_failures_type_spec.rb
new file mode 100644
index 00000000000..38369da46bf
--- /dev/null
+++ b/spec/graphql/types/ci/recent_failures_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::RecentFailuresType do
+ specify { expect(described_class.graphql_name).to eq('RecentFailures') }
+
+ it 'contains attributes related to a recent failure history for a test case' do
+ expected_fields = %w[
+ count base_branch
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/stage_type_spec.rb b/spec/graphql/types/ci/stage_type_spec.rb
index 9a8d4fa96a3..cb8c1cb02cd 100644
--- a/spec/graphql/types/ci/stage_type_spec.rb
+++ b/spec/graphql/types/ci/stage_type_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Types::Ci::StageType do
name
groups
detailedStatus
+ jobs
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/test_case_status_enum_spec.rb b/spec/graphql/types/ci/test_case_status_enum_spec.rb
new file mode 100644
index 00000000000..ba2d1aefb20
--- /dev/null
+++ b/spec/graphql/types/ci/test_case_status_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::TestCaseStatusEnum do
+ specify { expect(described_class.graphql_name).to eq('TestCaseStatus') }
+
+ it 'exposes all test case status types' do
+ expect(described_class.values.keys).to eq(
+ ::Gitlab::Ci::Reports::TestCase::STATUS_TYPES
+ )
+ end
+end
diff --git a/spec/graphql/types/ci/test_case_type_spec.rb b/spec/graphql/types/ci/test_case_type_spec.rb
new file mode 100644
index 00000000000..e6cd70c287e
--- /dev/null
+++ b/spec/graphql/types/ci/test_case_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::TestCaseType do
+ specify { expect(described_class.graphql_name).to eq('TestCase') }
+
+ it 'contains attributes related to a pipeline test case' do
+ expected_fields = %w[
+ name status classname file attachment_url execution_time stack_trace system_output recent_failures
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/test_report_summary_type_spec.rb b/spec/graphql/types/ci/test_report_summary_type_spec.rb
new file mode 100644
index 00000000000..06974da0b88
--- /dev/null
+++ b/spec/graphql/types/ci/test_report_summary_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::TestReportSummaryType do
+ specify { expect(described_class.graphql_name).to eq('TestReportSummary') }
+
+ it 'contains attributes related to a pipeline test report summary' do
+ expected_fields = %w[
+ total test_suites
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/test_report_total_type_spec.rb b/spec/graphql/types/ci/test_report_total_type_spec.rb
new file mode 100644
index 00000000000..e5b7b358edb
--- /dev/null
+++ b/spec/graphql/types/ci/test_report_total_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::TestReportTotalType do
+ specify { expect(described_class.graphql_name).to eq('TestReportTotal') }
+
+ it 'contains attributes related to a pipeline test report summary' do
+ expected_fields = %w[
+ time count success failed skipped error suite_error
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/test_suite_summary_type_spec.rb b/spec/graphql/types/ci/test_suite_summary_type_spec.rb
new file mode 100644
index 00000000000..e87782037c7
--- /dev/null
+++ b/spec/graphql/types/ci/test_suite_summary_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::TestSuiteSummaryType do
+ specify { expect(described_class.graphql_name).to eq('TestSuiteSummary') }
+
+ it 'contains attributes related to a pipeline test report summary' do
+ expected_fields = %w[
+ name total_time total_count success_count failed_count skipped_count error_count suite_error build_ids
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/ci/test_suite_type_spec.rb b/spec/graphql/types/ci/test_suite_type_spec.rb
new file mode 100644
index 00000000000..d9caca3e2c3
--- /dev/null
+++ b/spec/graphql/types/ci/test_suite_type_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::TestSuiteType do
+ specify { expect(described_class.graphql_name).to eq('TestSuite') }
+
+ it 'contains attributes related to a pipeline test suite' do
+ expected_fields = %w[
+ name total_time total_count success_count failed_count skipped_count error_count suite_error test_cases
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/global_id_type_spec.rb b/spec/graphql/types/global_id_type_spec.rb
index 8eb023ad2a3..4df51dc8d1b 100644
--- a/spec/graphql/types/global_id_type_spec.rb
+++ b/spec/graphql/types/global_id_type_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Types::GlobalIDType do
let_it_be(:project) { create(:project) }
+
let(:gid) { project.to_global_id }
it 'is has the correct name' do
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index 21fc530149c..6908a610aae 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe GitlabSchema.types['Issue'] do
confidential discussion_locked upvotes downvotes user_notes_count user_discussions_count web_path web_url relative_position
emails_disabled subscribed time_estimate total_time_spent human_time_estimate human_total_time_spent closed_at created_at updated_at task_completion_status
design_collection alert_management_alert severity current_user_todos moved moved_to
- create_note_email]
+ create_note_email timelogs]
fields.each do |field_name|
expect(described_class).to have_graphql_field(field_name)
diff --git a/spec/graphql/types/merge_request_review_state_enum_spec.rb b/spec/graphql/types/merge_request_review_state_enum_spec.rb
new file mode 100644
index 00000000000..486e1c4f502
--- /dev/null
+++ b/spec/graphql/types/merge_request_review_state_enum_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MergeRequestReviewState'] do
+ it 'the correct enum members' do
+ expect(described_class.values).to match(
+ 'REVIEWED' => have_attributes(
+ description: 'The merge request is reviewed.',
+ value: 'reviewed'
+ ),
+ 'UNREVIEWED' => have_attributes(
+ description: 'The merge request is unreviewed.',
+ value: 'unreviewed'
+ )
+ )
+ end
+end
diff --git a/spec/graphql/types/merge_requests/reviewer_type_spec.rb b/spec/graphql/types/merge_requests/reviewer_type_spec.rb
new file mode 100644
index 00000000000..c2182e9968c
--- /dev/null
+++ b/spec/graphql/types/merge_requests/reviewer_type_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['MergeRequestReviewer'] do
+ specify { expect(described_class).to require_graphql_authorizations(:read_user) }
+
+ it 'has the expected fields' do
+ expected_fields = %w[
+ id
+ bot
+ user_permissions
+ snippets
+ name
+ username
+ email
+ publicEmail
+ avatarUrl
+ webUrl
+ webPath
+ todos
+ state
+ status
+ location
+ authoredMergeRequests
+ assignedMergeRequests
+ reviewRequestedMergeRequests
+ groupMemberships
+ groupCount
+ projectMemberships
+ starredProjects
+ callouts
+ merge_request_interaction
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ describe '#merge_request_interaction' do
+ subject { described_class.fields['mergeRequestInteraction'] }
+
+ it 'returns the correct type' do
+ is_expected.to have_graphql_type(Types::UserMergeRequestInteractionType)
+ end
+
+ it 'has the correct arguments' do
+ is_expected.to have_attributes(arguments: be_empty)
+ end
+ end
+end
diff --git a/spec/graphql/types/milestone_type_spec.rb b/spec/graphql/types/milestone_type_spec.rb
index 806495250ac..5c2ae5cea3c 100644
--- a/spec/graphql/types/milestone_type_spec.rb
+++ b/spec/graphql/types/milestone_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['Milestone'] do
it 'has the expected fields' do
expected_fields = %w[
- id title description state web_path
+ id iid title description state web_path
due_date start_date created_at updated_at
project_milestone group_milestone subgroup_milestone
stats
diff --git a/spec/graphql/types/packages/conan/file_metadatum_type_spec.rb b/spec/graphql/types/packages/conan/file_metadatum_type_spec.rb
new file mode 100644
index 00000000000..18b17286654
--- /dev/null
+++ b/spec/graphql/types/packages/conan/file_metadatum_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ConanFileMetadata'] do
+ it 'includes conan file metadatum fields' do
+ expected_fields = %w[
+ id created_at updated_at recipe_revision package_revision conan_package_reference conan_file_type
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/conan/metadatum_file_type_enum_spec.rb b/spec/graphql/types/packages/conan/metadatum_file_type_enum_spec.rb
new file mode 100644
index 00000000000..379cb5168a8
--- /dev/null
+++ b/spec/graphql/types/packages/conan/metadatum_file_type_enum_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ConanMetadatumFileTypeEnum'] do
+ it 'uses all possible options from model' do
+ expected_keys = ::Packages::Conan::FileMetadatum.conan_file_types
+ .keys
+ .map(&:upcase)
+
+ expect(described_class.values.keys).to contain_exactly(*expected_keys)
+ end
+end
diff --git a/spec/graphql/types/packages/conan/metadatum_type_spec.rb b/spec/graphql/types/packages/conan/metadatum_type_spec.rb
new file mode 100644
index 00000000000..f8f24ffc95a
--- /dev/null
+++ b/spec/graphql/types/packages/conan/metadatum_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['ConanMetadata'] do
+ it 'includes conan metadatum fields' do
+ expected_fields = %w[
+ id created_at updated_at package_username package_channel recipe recipe_path
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/package_without_versions_type_spec.rb b/spec/graphql/types/packages/package_details_type_spec.rb
index faa79e588d5..06093813315 100644
--- a/spec/graphql/types/packages/package_without_versions_type_spec.rb
+++ b/spec/graphql/types/packages/package_details_type_spec.rb
@@ -2,10 +2,10 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['PackageWithoutVersions'] do
+RSpec.describe GitlabSchema.types['PackageDetailsType'] do
it 'includes all the package fields' do
expected_fields = %w[
- id name version created_at updated_at package_type tags project pipelines
+ id name version created_at updated_at package_type tags project pipelines versions package_files
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/packages/package_file_type_spec.rb b/spec/graphql/types/packages/package_file_type_spec.rb
new file mode 100644
index 00000000000..8e20aea5220
--- /dev/null
+++ b/spec/graphql/types/packages/package_file_type_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['PackageFile'] do
+ it 'includes package file fields' do
+ expected_fields = %w[
+ id file_name created_at updated_at size file_name download_path file_md5 file_sha1 file_sha256 file_metadata
+ ]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/packages/package_type_spec.rb b/spec/graphql/types/packages/package_type_spec.rb
index 43289a019b3..544d6ddc3af 100644
--- a/spec/graphql/types/packages/package_type_spec.rb
+++ b/spec/graphql/types/packages/package_type_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe GitlabSchema.types['Package'] do
id name version package_type
created_at updated_at
project
- tags pipelines versions
+ tags pipelines metadata versions
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/project_type_spec.rb b/spec/graphql/types/project_type_spec.rb
index 9579ef8b99b..f2c4068f048 100644
--- a/spec/graphql/types/project_type_spec.rb
+++ b/spec/graphql/types/project_type_spec.rb
@@ -106,7 +106,8 @@ RSpec.describe GitlabSchema.types['Project'] do
expect(secure_analyzers_prefix['type']).to eq('string')
expect(secure_analyzers_prefix['field']).to eq('SECURE_ANALYZERS_PREFIX')
expect(secure_analyzers_prefix['label']).to eq('Image prefix')
- expect(secure_analyzers_prefix['defaultValue']).to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
+ expect(secure_analyzers_prefix['defaultValue'])
+ .to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
expect(secure_analyzers_prefix['value']).to eq('registry.gitlab.com/gitlab-org/security-products/analyzers')
expect(secure_analyzers_prefix['size']).to eq('LARGE')
expect(secure_analyzers_prefix['options']).to be_nil
@@ -124,8 +125,8 @@ RSpec.describe GitlabSchema.types['Project'] do
it "returns the project's sast configuration for analyzer variables" do
analyzer = subject.dig('data', 'project', 'sastCiConfiguration', 'analyzers', 'nodes').first
- expect(analyzer['name']).to eq('brakeman')
- expect(analyzer['label']).to eq('Brakeman')
+ expect(analyzer['name']).to eq('bandit')
+ expect(analyzer['label']).to eq('Bandit')
expect(analyzer['enabled']).to eq(true)
end
@@ -184,9 +185,11 @@ RSpec.describe GitlabSchema.types['Project'] do
context 'when repository is accessible only by team members' do
it "returns no configuration" do
- project.project_feature.update!(merge_requests_access_level: ProjectFeature::DISABLED,
- builds_access_level: ProjectFeature::DISABLED,
- repository_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(
+ merge_requests_access_level: ProjectFeature::DISABLED,
+ builds_access_level: ProjectFeature::DISABLED,
+ repository_access_level: ProjectFeature::PRIVATE
+ )
secure_analyzers_prefix = subject.dig('data', 'project', 'sastCiConfiguration')
expect(secure_analyzers_prefix).to be_nil
@@ -240,6 +243,7 @@ RSpec.describe GitlabSchema.types['Project'] do
:assignee_username,
:reviewer_username,
:milestone_title,
+ :not,
:sort
)
end
@@ -342,8 +346,13 @@ RSpec.describe GitlabSchema.types['Project'] do
let_it_be(:project) { create(:project, :public) }
context 'when project has Jira imports' do
- let_it_be(:jira_import1) { create(:jira_import_state, :finished, project: project, jira_project_key: 'AA', created_at: 2.days.ago) }
- let_it_be(:jira_import2) { create(:jira_import_state, :finished, project: project, jira_project_key: 'BB', created_at: 5.days.ago) }
+ let_it_be(:jira_import1) do
+ create(:jira_import_state, :finished, project: project, jira_project_key: 'AA', created_at: 2.days.ago)
+ end
+
+ let_it_be(:jira_import2) do
+ create(:jira_import_state, :finished, project: project, jira_project_key: 'BB', created_at: 5.days.ago)
+ end
it 'retrieves the imports' do
expect(subject).to contain_exactly(jira_import1, jira_import2)
@@ -363,4 +372,11 @@ RSpec.describe GitlabSchema.types['Project'] do
it { is_expected.to have_graphql_type(Types::Ci::AnalyticsType) }
it { is_expected.to have_graphql_resolver(Resolvers::ProjectPipelineStatisticsResolver) }
end
+
+ describe 'jobs field' do
+ subject { described_class.fields['jobs'] }
+
+ it { is_expected.to have_graphql_type(Types::Ci::JobType.connection_type) }
+ it { is_expected.to have_graphql_arguments(:statuses) }
+ end
end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index cb8e875dbf4..d3dcdd260b0 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -98,6 +98,6 @@ RSpec.describe GitlabSchema.types['Query'] do
describe 'package field' do
subject { described_class.fields['package'] }
- it { is_expected.to have_graphql_type(Types::Packages::PackageType) }
+ it { is_expected.to have_graphql_type(Types::Packages::PackageDetailsType) }
end
end
diff --git a/spec/graphql/types/repository/blob_type_spec.rb b/spec/graphql/types/repository/blob_type_spec.rb
new file mode 100644
index 00000000000..f8647e4e964
--- /dev/null
+++ b/spec/graphql/types/repository/blob_type_spec.rb
@@ -0,0 +1,9 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Repository::BlobType do
+ specify { expect(described_class.graphql_name).to eq('RepositoryBlob') }
+
+ specify { expect(described_class).to have_graphql_fields(:id, :oid, :name, :path, :web_path, :lfs_oid, :mode) }
+end
diff --git a/spec/graphql/types/repository_type_spec.rb b/spec/graphql/types/repository_type_spec.rb
index e9199bd286e..fa1e54dfcfa 100644
--- a/spec/graphql/types/repository_type_spec.rb
+++ b/spec/graphql/types/repository_type_spec.rb
@@ -12,4 +12,8 @@ RSpec.describe GitlabSchema.types['Repository'] do
specify { expect(described_class).to have_graphql_field(:tree) }
specify { expect(described_class).to have_graphql_field(:exists, calls_gitaly?: true, complexity: 2) }
+
+ specify { expect(described_class).to have_graphql_field(:blobs) }
+
+ specify { expect(described_class).to have_graphql_field(:branch_names, calls_gitaly?: true, complexity: 170) }
end
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index 4d827186a9b..b87770ebe8d 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -161,6 +161,7 @@ RSpec.describe GitlabSchema.types['Snippet'] do
describe '#blobs' do
let_it_be(:snippet) { create(:personal_snippet, :public, author: user) }
+
let(:query_blobs) { subject.dig('data', 'snippets', 'nodes')[0].dig('blobs', 'nodes') }
let(:paths) { [] }
let(:query) do
@@ -201,6 +202,7 @@ RSpec.describe GitlabSchema.types['Snippet'] do
context 'when snippet has repository' do
let_it_be(:snippet) { create(:personal_snippet, :repository, :public, author: user) }
+
let(:blobs) { snippet.blobs }
it_behaves_like 'an array'
diff --git a/spec/graphql/types/timelog_type_spec.rb b/spec/graphql/types/timelog_type_spec.rb
new file mode 100644
index 00000000000..38bd70d5097
--- /dev/null
+++ b/spec/graphql/types/timelog_type_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Timelog'] do
+ let(:fields) { %i[spent_at time_spent user issue note] }
+
+ it { expect(described_class.graphql_name).to eq('Timelog') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+ it { expect(described_class).to require_graphql_authorizations(:read_group_timelogs) }
+
+ describe 'user field' do
+ subject { described_class.fields['user'] }
+
+ it 'returns user' do
+ is_expected.to have_non_null_graphql_type(Types::UserType)
+ end
+ end
+
+ describe 'issue field' do
+ subject { described_class.fields['issue'] }
+
+ it 'returns issue' do
+ is_expected.to have_graphql_type(Types::IssueType)
+ end
+ end
+
+ describe 'note field' do
+ subject { described_class.fields['note'] }
+
+ it 'returns note' do
+ is_expected.to have_graphql_type(Types::Notes::NoteType)
+ end
+ end
+end
diff --git a/spec/graphql/types/user_merge_request_interaction_type_spec.rb b/spec/graphql/types/user_merge_request_interaction_type_spec.rb
new file mode 100644
index 00000000000..f424c9200ab
--- /dev/null
+++ b/spec/graphql/types/user_merge_request_interaction_type_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['UserMergeRequestInteraction'] do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:interaction) { ::Users::MergeRequestInteraction.new(user: user, merge_request: merge_request.reset) }
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_merge_request) }
+
+ it 'has the expected fields' do
+ expected_fields = %w[
+ can_merge
+ can_update
+ review_state
+ reviewed
+ approved
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields).at_least
+ end
+
+ def resolve(field_name)
+ resolve_field(field_name, interaction, current_user: current_user)
+ end
+
+ describe '#can_merge' do
+ subject { resolve(:can_merge) }
+
+ context 'when the user cannot merge' do
+ it { is_expected.to be false }
+ end
+
+ context 'when the user can merge' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it { is_expected.to be true }
+ end
+ end
+
+ describe '#can_update' do
+ subject { resolve(:can_update) }
+
+ context 'when the user cannot update the MR' do
+ it { is_expected.to be false }
+ end
+
+ context 'when the user can update the MR' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to be true }
+ end
+ end
+
+ describe '#review_state' do
+ subject { resolve(:review_state) }
+
+ context 'when the user has not been asked to review the MR' do
+ it { is_expected.to be_nil }
+
+ it 'implies not reviewed' do
+ expect(resolve(:reviewed)).to be false
+ end
+ end
+
+ context 'when the user has been asked to review the MR' do
+ before do
+ merge_request.reviewers << user
+ end
+
+ it { is_expected.to eq(Types::MergeRequestReviewStateEnum.values['UNREVIEWED'].value) }
+
+ it 'implies not reviewed' do
+ expect(resolve(:reviewed)).to be false
+ end
+ end
+
+ context 'when the user has provided a review' do
+ before do
+ merge_request.merge_request_reviewers.create!(reviewer: user, state: MergeRequestReviewer.states['reviewed'])
+ end
+
+ it { is_expected.to eq(Types::MergeRequestReviewStateEnum.values['REVIEWED'].value) }
+
+ it 'implies reviewed' do
+ expect(resolve(:reviewed)).to be true
+ end
+ end
+ end
+
+ describe '#approved' do
+ subject { resolve(:approved) }
+
+ context 'when the user has not approved the MR' do
+ it { is_expected.to be false }
+ end
+
+ context 'when the user has approved the MR' do
+ before do
+ merge_request.approved_by_users << user
+ end
+
+ it { is_expected.to be true }
+ end
+ end
+end
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index c7470f31ad8..ae039c1a8b1 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -168,11 +168,13 @@ RSpec.describe ApplicationHelper do
it { expect(helper.active_when(false)).to eq(nil) }
end
- describe '#promo_host' do
- subject { helper.promo_host }
+ unless Gitlab.jh?
+ describe '#promo_host' do
+ subject { helper.promo_host }
- it 'returns the url' do
- is_expected.to eq('about.gitlab.com')
+ it 'returns the url' do
+ is_expected.to eq('about.gitlab.com')
+ end
end
end
@@ -180,7 +182,7 @@ RSpec.describe ApplicationHelper do
subject { helper.promo_url }
it 'returns the url' do
- is_expected.to eq('https://about.gitlab.com')
+ is_expected.to eq("https://#{helper.promo_host}")
end
it 'changes if promo_host changes' do
@@ -194,7 +196,7 @@ RSpec.describe ApplicationHelper do
subject { helper.contact_sales_url }
it 'returns the url' do
- is_expected.to eq('https://about.gitlab.com/sales')
+ is_expected.to eq("https://#{helper.promo_host}/sales")
end
it 'changes if promo_url changes' do
@@ -316,9 +318,7 @@ RSpec.describe ApplicationHelper do
let(:user) { create(:user, static_object_token: 'hunter1') }
before do
- allow_next_instance_of(ApplicationSetting) do |instance|
- allow(instance).to receive(:static_objects_external_storage_url).and_return('https://cdn.gitlab.com')
- end
+ stub_application_setting(static_objects_external_storage_url: 'https://cdn.gitlab.com')
allow(helper).to receive(:current_user).and_return(user)
end
diff --git a/spec/helpers/avatars_helper_spec.rb b/spec/helpers/avatars_helper_spec.rb
index 7fcd5ae880a..120dbe7cb49 100644
--- a/spec/helpers/avatars_helper_spec.rb
+++ b/spec/helpers/avatars_helper_spec.rb
@@ -121,27 +121,13 @@ RSpec.describe AvatarsHelper do
end
end
- context "when :avatar_cache_for_email flag is enabled" do
- before do
- stub_feature_flags(avatar_cache_for_email: true)
- end
-
- it_behaves_like "returns avatar for email"
+ it_behaves_like "returns avatar for email"
- it "caches the request" do
- expect(User).to receive(:find_by_any_email).once.and_call_original
-
- expect(helper.avatar_icon_for_email(user.email).to_s).to eq(user.avatar.url)
- expect(helper.avatar_icon_for_email(user.email).to_s).to eq(user.avatar.url)
- end
- end
-
- context "when :avatar_cache_for_email flag is disabled" do
- before do
- stub_feature_flags(avatar_cache_for_email: false)
- end
+ it "caches the request" do
+ expect(User).to receive(:find_by_any_email).once.and_call_original
- it_behaves_like "returns avatar for email"
+ expect(helper.avatar_icon_for_email(user.email).to_s).to eq(user.avatar.url)
+ expect(helper.avatar_icon_for_email(user.email).to_s).to eq(user.avatar.url)
end
end
diff --git a/spec/helpers/blob_helper_spec.rb b/spec/helpers/blob_helper_spec.rb
index b584a906565..885569574a4 100644
--- a/spec/helpers/blob_helper_spec.rb
+++ b/spec/helpers/blob_helper_spec.rb
@@ -304,6 +304,7 @@ RSpec.describe BlobHelper do
let_it_be(:namespace) { create(:namespace, name: 'gitlab') }
let_it_be(:project) { create(:project, :repository, namespace: namespace) }
let_it_be(:current_user) { create(:user) }
+
let(:can_push_code) { true }
let(:blob) { project.repository.blob_at('refs/heads/master', 'README.md') }
@@ -489,9 +490,18 @@ RSpec.describe BlobHelper do
expect(uri.path).to eq("/#{project.namespace.path}/#{project.path}/-/forks")
expect(params).to include("continue[to]=/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master")
+ expect(params).to include("continue[notice]=#{edit_in_new_fork_notice}")
+ expect(params).to include("continue[notice_now]=#{edit_in_new_fork_notice_now}")
expect(params).to include("namespace_key=#{current_user.namespace.id}")
end
+ it 'does not include notice params with_notice: false' do
+ uri = URI(helper.ide_fork_and_edit_path(project, "master", "", with_notice: false))
+
+ expect(uri.path).to eq("/#{project.namespace.path}/#{project.path}/-/forks")
+ expect(CGI.unescape(uri.query)).to eq("continue[to]=/-/ide/project/#{project.namespace.path}/#{project.path}/edit/master&namespace_key=#{current_user.namespace.id}")
+ end
+
context 'when user is not logged in' do
let(:current_user) { nil }
diff --git a/spec/helpers/boards_helper_spec.rb b/spec/helpers/boards_helper_spec.rb
index b00ee19cea2..00cd44809c7 100644
--- a/spec/helpers/boards_helper_spec.rb
+++ b/spec/helpers/boards_helper_spec.rb
@@ -64,6 +64,7 @@ RSpec.describe BoardsHelper do
allow(helper).to receive(:current_user) { user }
allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, project_board).and_return(true)
allow(helper).to receive(:can?).with(user, :admin_issue, project_board).and_return(true)
+ allow(helper).to receive(:can?).with(user, :admin_issue_board_list, project).and_return(false)
end
it 'returns a board_lists_path as lists_endpoint' do
@@ -86,6 +87,17 @@ RSpec.describe BoardsHelper do
it 'returns the group id of a project' do
expect(helper.board_data[:group_id]).to eq(project.group.id)
end
+
+ context 'can_admin_list' do
+ it 'returns can_admin_list as false by default' do
+ expect(helper.board_data[:can_admin_list]).to eq('false')
+ end
+ it 'returns can_admin_list as true when user can admin the board' do
+ allow(helper).to receive(:can?).with(user, :admin_issue_board_list, project).and_return(true)
+
+ expect(helper.board_data[:can_admin_list]).to eq('true')
+ end
+ end
end
context 'group board' do
@@ -96,6 +108,7 @@ RSpec.describe BoardsHelper do
allow(helper).to receive(:current_user) { user }
allow(helper).to receive(:can?).with(user, :create_non_backlog_issues, group_board).and_return(true)
allow(helper).to receive(:can?).with(user, :admin_issue, group_board).and_return(true)
+ allow(helper).to receive(:can?).with(user, :admin_issue_board_list, base_group).and_return(false)
end
it 'returns correct path for base group' do
@@ -110,6 +123,17 @@ RSpec.describe BoardsHelper do
it 'returns the group id' do
expect(helper.board_data[:group_id]).to eq(base_group.id)
end
+
+ context 'can_admin_list' do
+ it 'returns can_admin_list as false by default' do
+ expect(helper.board_data[:can_admin_list]).to eq('false')
+ end
+ it 'returns can_admin_list as true when user can admin the board' do
+ allow(helper).to receive(:can?).with(user, :admin_issue_board_list, base_group).and_return(true)
+
+ expect(helper.board_data[:can_admin_list]).to eq('true')
+ end
+ end
end
end
diff --git a/spec/helpers/broadcast_messages_helper_spec.rb b/spec/helpers/broadcast_messages_helper_spec.rb
index 21fde35954e..3e8cbdf89a0 100644
--- a/spec/helpers/broadcast_messages_helper_spec.rb
+++ b/spec/helpers/broadcast_messages_helper_spec.rb
@@ -28,6 +28,7 @@ RSpec.describe BroadcastMessagesHelper do
describe 'broadcast_message' do
let_it_be(:user) { create(:user) }
+
let(:current_broadcast_message) { BroadcastMessage.new(message: 'Current Message') }
before do
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index ecb9c98b1bf..09495bbde35 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -171,6 +171,7 @@ RSpec.describe ButtonHelper do
expect(element.attr('data-placement')).to eq('bottom')
expect(element.attr('data-container')).to eq('body')
expect(element.attr('data-clipboard-text')).to eq(nil)
+ expect(element.attr('itemprop')).to eq(nil)
expect(element.inner_text).to eq("")
expect(element.to_html).to include sprite_icon('copy-to-clipboard')
@@ -209,5 +210,11 @@ RSpec.describe ButtonHelper do
expect(element(hide_button_icon: true).to_html).not_to include sprite_icon('duplicate')
end
end
+
+ context 'with `itemprop` attribute provided' do
+ it 'shows copy to clipboard button with `itemprop` attribute' do
+ expect(element(itemprop: "identifier").attr('itemprop')).to eq("identifier")
+ end
+ end
end
end
diff --git a/spec/helpers/ci/pipeline_editor_helper_spec.rb b/spec/helpers/ci/pipeline_editor_helper_spec.rb
index 7686983eb0f..a08517d0c57 100644
--- a/spec/helpers/ci/pipeline_editor_helper_spec.rb
+++ b/spec/helpers/ci/pipeline_editor_helper_spec.rb
@@ -20,4 +20,36 @@ RSpec.describe Ci::PipelineEditorHelper do
expect(subject).to be false
end
end
+
+ describe '#js_pipeline_editor_data' do
+ let(:project) { create(:project, :repository) }
+
+ before do
+ allow(helper)
+ .to receive(:namespace_project_new_merge_request_path)
+ .and_return('/mock/project/-/merge_requests/new')
+
+ allow(helper)
+ .to receive(:image_path)
+ .and_return('foo')
+ end
+
+ subject(:pipeline_editor_data) { helper.js_pipeline_editor_data(project) }
+
+ it 'returns pipeline editor data' do
+ expect(pipeline_editor_data).to eq({
+ "ci-config-path": project.ci_config_path_or_default,
+ "commit-sha" => project.commit.sha,
+ "default-branch" => project.default_branch,
+ "empty-state-illustration-path" => 'foo',
+ "initial-branch-name": nil,
+ "lint-help-page-path" => help_page_path('ci/lint', anchor: 'validate-basic-logic-and-syntax'),
+ "new-merge-request-path" => '/mock/project/-/merge_requests/new',
+ "project-path" => project.path,
+ "project-full-path" => project.full_path,
+ "project-namespace" => project.namespace.full_path,
+ "yml-help-page-path" => help_page_path('ci/yaml/README')
+ })
+ end
+ end
end
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index 6e41afac4ee..94d4d620de9 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -3,19 +3,26 @@
require 'spec_helper'
RSpec.describe Ci::RunnersHelper do
- it "returns - not contacted yet" do
- runner = FactoryBot.build :ci_runner
- expect(runner_status_icon(runner)).to include("not connected yet")
- end
+ describe '#runner_status_icon', :clean_gitlab_redis_cache do
+ it "returns - not contacted yet" do
+ runner = create(:ci_runner)
+ expect(runner_status_icon(runner)).to include("not connected yet")
+ end
- it "returns offline text" do
- runner = FactoryBot.build(:ci_runner, contacted_at: 1.day.ago, active: true)
- expect(runner_status_icon(runner)).to include("Runner is offline")
- end
+ it "returns offline text" do
+ runner = create(:ci_runner, contacted_at: 1.day.ago, active: true)
+ expect(runner_status_icon(runner)).to include("Runner is offline")
+ end
- it "returns online text" do
- runner = FactoryBot.build(:ci_runner, contacted_at: 1.second.ago, active: true)
- expect(runner_status_icon(runner)).to include("Runner is online")
+ it "returns online text" do
+ runner = create(:ci_runner, contacted_at: 1.second.ago, active: true)
+ expect(runner_status_icon(runner)).to include("Runner is online")
+ end
+
+ it "returns paused text" do
+ runner = create(:ci_runner, contacted_at: 1.second.ago, active: false)
+ expect(runner_status_icon(runner)).to include("Runner is paused")
+ end
end
describe '#runner_contacted_at' do
@@ -77,6 +84,7 @@ RSpec.describe Ci::RunnersHelper do
describe '#toggle_shared_runners_settings_data' do
let_it_be(:group) { create(:group) }
+
let(:project_with_runners) { create(:project, namespace: group, shared_runners_enabled: true) }
let(:project_without_runners) { create(:project, namespace: group, shared_runners_enabled: false) }
diff --git a/spec/helpers/commits_helper_spec.rb b/spec/helpers/commits_helper_spec.rb
index 2a8e2e04947..86ed133e599 100644
--- a/spec/helpers/commits_helper_spec.rb
+++ b/spec/helpers/commits_helper_spec.rb
@@ -5,58 +5,6 @@ require 'spec_helper'
RSpec.describe CommitsHelper do
include ProjectForksHelper
- describe '#revert_commit_link' do
- context 'when current_user exists' do
- before do
- allow(helper).to receive(:current_user).and_return(double('User'))
- end
-
- it 'renders a div for Vue' do
- result = helper.revert_commit_link
-
- expect(result).to include('js-revert-commit-trigger')
- end
- end
-
- context 'when current_user does not exist' do
- before do
- allow(helper).to receive(:current_user).and_return(nil)
- end
-
- it 'does not render anything' do
- result = helper.revert_commit_link
-
- expect(result).to be_nil
- end
- end
- end
-
- describe '#cherry_pick_commit_link' do
- context 'when current_user exists' do
- before do
- allow(helper).to receive(:current_user).and_return(double('User'))
- end
-
- it 'renders a div for Vue' do
- result = helper.cherry_pick_commit_link
-
- expect(result).to include('js-cherry-pick-commit-trigger')
- end
- end
-
- context 'when current_user does not exist' do
- before do
- allow(helper).to receive(:current_user).and_return(nil)
- end
-
- it 'does not render anything' do
- result = helper.cherry_pick_commit_link
-
- expect(result).to be_nil
- end
- end
- end
-
describe 'commit_author_link' do
it 'escapes the author email' do
commit = double(
@@ -252,7 +200,7 @@ RSpec.describe CommitsHelper do
end
it 'returns data for cherry picking into a project' do
- expect(helper.cherry_pick_projects_data(project)).to match_array([
+ expect(helper.cherry_pick_projects_data(forked_project)).to match_array([
{ id: project.id.to_s, name: project.full_path, refsUrl: refs_project_path(project) },
{ id: forked_project.id.to_s, name: forked_project.full_path, refsUrl: refs_project_path(forked_project) }
])
@@ -268,4 +216,77 @@ RSpec.describe CommitsHelper do
end
end
end
+
+ describe "#commit_options_dropdown_data" do
+ let(:project) { build(:project, :repository) }
+ let(:commit) { build(:commit) }
+ let(:user) { build(:user) }
+
+ subject { helper.commit_options_dropdown_data(project, commit) }
+
+ context "when user is logged in" do
+ before do
+ allow(helper).to receive(:can?).with(user, :push_code, project).and_return(true)
+ allow(helper).to receive(:current_user).and_return(user)
+ end
+
+ it "returns data as expected" do
+ is_expected.to eq standard_expected_data
+ end
+
+ context "when can not collaborate on project" do
+ before do
+ allow(helper).to receive(:can_collaborate_with_project?).with(project).and_return(false)
+ end
+
+ it "returns data as expected" do
+ no_collaboration_values = {
+ can_revert: 'false',
+ can_cherry_pick: 'false'
+ }
+
+ is_expected.to eq standard_expected_data.merge(no_collaboration_values)
+ end
+ end
+
+ context "when commit has already been reverted" do
+ before do
+ allow(commit).to receive(:has_been_reverted?).with(user).and_return(true)
+ end
+
+ it "returns data as expected" do
+ is_expected.to eq standard_expected_data.merge({ can_revert: 'false' })
+ end
+ end
+ end
+
+ context "when user is not logged in" do
+ before do
+ allow(helper).to receive(:can?).with(nil, :push_code, project).and_return(false)
+ allow(helper).to receive(:current_user).and_return(nil)
+ end
+
+ it "returns data as expected" do
+ logged_out_values = {
+ can_revert: '',
+ can_cherry_pick: '',
+ can_tag: 'false'
+ }
+
+ is_expected.to eq standard_expected_data.merge(logged_out_values)
+ end
+ end
+
+ def standard_expected_data
+ {
+ new_project_tag_path: new_project_tag_path(project, ref: commit),
+ email_patches_path: project_commit_path(project, commit, format: :patch),
+ plain_diff_path: project_commit_path(project, commit, format: :diff),
+ can_revert: 'true',
+ can_cherry_pick: 'true',
+ can_tag: 'true',
+ can_email_patches: 'true'
+ }
+ end
+ end
end
diff --git a/spec/helpers/diff_helper_spec.rb b/spec/helpers/diff_helper_spec.rb
index 20fa8d62884..dfea1020c52 100644
--- a/spec/helpers/diff_helper_spec.rb
+++ b/spec/helpers/diff_helper_spec.rb
@@ -291,6 +291,8 @@ RSpec.describe DiffHelper do
end
describe '#render_overflow_warning?' do
+ using RSpec::Parameterized::TableSyntax
+
let(:diffs_collection) { instance_double(Gitlab::Diff::FileCollection::MergeRequestDiff, raw_diff_files: diff_files) }
let(:diff_files) { Gitlab::Git::DiffCollection.new(files) }
let(:safe_file) { { too_large: false, diff: '' } }
@@ -299,13 +301,42 @@ RSpec.describe DiffHelper do
before do
allow(diff_files).to receive(:overflow?).and_return(false)
+ allow(diff_files).to receive(:overflow_max_bytes?).and_return(false)
+ allow(diff_files).to receive(:overflow_max_files?).and_return(false)
+ allow(diff_files).to receive(:overflow_max_lines?).and_return(false)
+ allow(diff_files).to receive(:collapsed_safe_bytes?).and_return(false)
+ allow(diff_files).to receive(:collapsed_safe_files?).and_return(false)
+ allow(diff_files).to receive(:collapsed_safe_lines?).and_return(false)
end
- context 'when neither collection nor individual file hit the limit' do
+ context 'when no limits are hit' do
it 'returns false and does not log any overflow events' do
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_collection_limits)
expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_single_file_limits)
+ expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_max_bytes_limits)
+ expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_max_files_limits)
+ expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_max_lines_limits)
+ expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_collapsed_bytes_limits)
+ expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_collapsed_files_limits)
+ expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_collapsed_lines_limits)
+
+ expect(render_overflow_warning?(diffs_collection)).to be false
+ end
+ end
+
+ where(:overflow_method, :event_name) do
+ :overflow_max_bytes? | :diffs_overflow_max_bytes_limits
+ :overflow_max_files? | :diffs_overflow_max_files_limits
+ :overflow_max_lines? | :diffs_overflow_max_lines_limits
+ :collapsed_safe_bytes? | :diffs_overflow_collapsed_bytes_limits
+ :collapsed_safe_files? | :diffs_overflow_collapsed_files_limits
+ :collapsed_safe_lines? | :diffs_overflow_collapsed_lines_limits
+ end
+ with_them do
+ it 'returns false and only logs the correct collection overflow event' do
+ allow(diff_files).to receive(overflow_method).and_return(true)
+ expect(Gitlab::Metrics).to receive(:add_event).with(event_name).once
expect(render_overflow_warning?(diffs_collection)).to be false
end
end
@@ -315,9 +346,8 @@ RSpec.describe DiffHelper do
allow(diff_files).to receive(:overflow?).and_return(true)
end
- it 'returns false and only logs collection overflow event' do
- expect(Gitlab::Metrics).to receive(:add_event).with(:diffs_overflow_collection_limits).exactly(:once)
- expect(Gitlab::Metrics).not_to receive(:add_event).with(:diffs_overflow_single_file_limits)
+ it 'returns true and only logs all the correct collection overflow event' do
+ expect(Gitlab::Metrics).to receive(:add_event).with(:diffs_overflow_collection_limits).once
expect(render_overflow_warning?(diffs_collection)).to be true
end
diff --git a/spec/helpers/graph_helper_spec.rb b/spec/helpers/graph_helper_spec.rb
index 682f6365481..0930417accb 100644
--- a/spec/helpers/graph_helper_spec.rb
+++ b/spec/helpers/graph_helper_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe GraphHelper do
end
end
- describe '#should_render_deployment_frequency_charts' do
+ describe '#should_render_dora_charts' do
let(:project) { create(:project, :private) }
before do
@@ -24,7 +24,7 @@ RSpec.describe GraphHelper do
end
it 'always returns false' do
- expect(should_render_deployment_frequency_charts).to be(false)
+ expect(should_render_dora_charts).to be(false)
end
end
end
diff --git a/spec/helpers/groups_helper_spec.rb b/spec/helpers/groups_helper_spec.rb
index 0d2af464902..d588120bb98 100644
--- a/spec/helpers/groups_helper_spec.rb
+++ b/spec/helpers/groups_helper_spec.rb
@@ -5,33 +5,31 @@ require 'spec_helper'
RSpec.describe GroupsHelper do
include ApplicationHelper
- describe 'group_icon_url' do
+ describe '#group_icon_url' do
it 'returns an url for the avatar' do
- avatar_file_path = File.join('spec', 'fixtures', 'banana_sample.gif')
+ group = create(:group, :with_avatar)
- group = create(:group)
- group.avatar = fixture_file_upload(avatar_file_path)
- group.save!
- expect(group_icon_url(group.path).to_s)
- .to match(group.avatar.url)
+ expect(group_icon_url(group.path).to_s).to match(group.avatar.url)
end
it 'gives default avatar_icon when no avatar is present' do
- group = create(:group)
+ group = build_stubbed(:group)
+
expect(group_icon_url(group.path)).to match_asset_path('group_avatar.png')
end
end
- describe 'group_dependency_proxy_url' do
+ describe '#group_dependency_proxy_url' do
it 'converts uppercase letters to lowercase' do
- group = create(:group, path: 'GroupWithUPPERcaseLetters')
+ group = build_stubbed(:group, path: 'GroupWithUPPERcaseLetters')
+
expect(group_dependency_proxy_url(group)).to end_with("/groupwithuppercaseletters#{DependencyProxy::URL_SUFFIX}")
end
end
- describe 'group_lfs_status' do
- let(:group) { create(:group) }
- let!(:project) { create(:project, namespace_id: group.id) }
+ describe '#group_lfs_status' do
+ let_it_be_with_reload(:group) { create(:group) }
+ let_it_be_with_reload(:project) { create(:project, namespace_id: group.id) }
before do
allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
@@ -54,9 +52,7 @@ RSpec.describe GroupsHelper do
end
context 'more than one project in group' do
- before do
- create(:project, namespace_id: group.id)
- end
+ let_it_be_with_reload(:another_project) { create(:project, namespace_id: group.id) }
context 'LFS enabled in group' do
before do
@@ -92,7 +88,7 @@ RSpec.describe GroupsHelper do
end
end
- describe 'group_title' do
+ describe '#group_title' do
let_it_be(:group) { create(:group) }
let_it_be(:nested_group) { create(:group, parent: group) }
let_it_be(:deep_nested_group) { create(:group, parent: nested_group) }
@@ -113,16 +109,26 @@ RSpec.describe GroupsHelper do
subject
end
+
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ helper.group_title(nested_group)
+ end
+
+ expect do
+ helper.group_title(very_deep_nested_group)
+ end.not_to exceed_query_limit(control_count)
+ end
end
- # rubocop:disable Layout/SpaceBeforeComma
describe '#share_with_group_lock_help_text' do
- let!(:root_group) { create(:group) }
- let!(:subgroup) { create(:group, parent: root_group) }
- let!(:sub_subgroup) { create(:group, parent: subgroup) }
- let(:root_owner) { create(:user) }
- let(:sub_owner) { create(:user) }
- let(:sub_sub_owner) { create(:user) }
+ let_it_be_with_reload(:root_group) { create(:group) }
+ let_it_be_with_reload(:subgroup) { create(:group, parent: root_group) }
+ let_it_be_with_reload(:sub_subgroup) { create(:group, parent: subgroup) }
+ let_it_be(:root_owner) { create(:user) }
+ let_it_be(:sub_owner) { create(:user) }
+ let_it_be(:sub_sub_owner) { create(:user) }
+
let(:possible_help_texts) do
{
default_help: "This setting will be applied to all subgroups unless overridden by a group owner",
@@ -149,6 +155,13 @@ RSpec.describe GroupsHelper do
subject { helper.share_with_group_lock_help_text(sub_subgroup) }
+ before_all do
+ root_group.add_owner(root_owner)
+ subgroup.add_owner(sub_owner)
+ sub_subgroup.add_owner(sub_sub_owner)
+ end
+
+ # rubocop:disable Layout/SpaceBeforeComma
where(:root_share_with_group_locked, :subgroup_share_with_group_locked, :sub_subgroup_share_with_group_locked, :current_user, :help_text, :linked_ancestor) do
[
[false , false , false , :root_owner , :default_help , nil],
@@ -177,13 +190,10 @@ RSpec.describe GroupsHelper do
[true , true , true , :sub_sub_owner , :ancestor_locked_so_ask_the_owner , :root_group]
]
end
+ # rubocop:enable Layout/SpaceBeforeComma
with_them do
before do
- root_group.add_owner(root_owner)
- subgroup.add_owner(sub_owner)
- sub_subgroup.add_owner(sub_sub_owner)
-
root_group.update_column(:share_with_group_lock, true) if root_share_with_group_locked
subgroup.update_column(:share_with_group_lock, true) if subgroup_share_with_group_locked
sub_subgroup.update_column(:share_with_group_lock, true) if sub_subgroup_share_with_group_locked
@@ -212,8 +222,8 @@ RSpec.describe GroupsHelper do
end
describe '#group_container_registry_nav' do
- let(:group) { create(:group, :public) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:user) { create(:user) }
before do
stub_container_registry_config(enabled: true)
@@ -248,8 +258,8 @@ RSpec.describe GroupsHelper do
end
describe '#group_sidebar_links' do
- let(:group) { create(:group, :public) }
- let(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:user) { create(:user) }
before do
group.add_owner(user)
@@ -287,10 +297,10 @@ RSpec.describe GroupsHelper do
end
end
- describe 'parent_group_options' do
- let(:current_user) { create(:user) }
- let(:group) { create(:group, name: 'group') }
- let(:group2) { create(:group, name: 'group2') }
+ describe '#parent_group_options' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, name: 'group') }
+ let_it_be(:group2) { create(:group, name: 'group2') }
before do
group.add_owner(current_user)
@@ -321,9 +331,9 @@ RSpec.describe GroupsHelper do
end
describe '#can_disable_group_emails?' do
- let(:current_user) { create(:user) }
- let(:group) { create(:group, name: 'group') }
- let(:subgroup) { create(:group, name: 'subgroup', parent: group) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group, name: 'group') }
+ let_it_be(:subgroup) { create(:group, name: 'subgroup', parent: group) }
before do
allow(helper).to receive(:current_user) { current_user }
@@ -361,8 +371,8 @@ RSpec.describe GroupsHelper do
end
describe '#can_update_default_branch_protection?' do
- let(:current_user) { create(:user) }
- let(:group) { create(:group) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
subject { helper.can_update_default_branch_protection?(group) }
@@ -451,75 +461,42 @@ RSpec.describe GroupsHelper do
end
end
- describe '#group_open_issues_count' do
+ describe '#render_setting_to_allow_project_access_token_creation?' do
let_it_be(:current_user) { create(:user) }
- let_it_be(:group) { create(:group, :public) }
- let_it_be(:count_service) { Groups::OpenIssuesCountService }
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent) }
before do
allow(helper).to receive(:current_user) { current_user }
+ parent.add_owner(current_user)
+ group.add_owner(current_user)
end
- it 'returns count value from cache' do
- allow_next_instance_of(count_service) do |service|
- allow(service).to receive(:count).and_return(2500)
- end
-
- expect(helper.group_open_issues_count(group)).to eq('2.5k')
+ it 'returns true if group is root' do
+ expect(helper.render_setting_to_allow_project_access_token_creation?(parent)).to be_truthy
end
- context 'when cached_sidebar_open_issues_count feature flag is disabled' do
- before do
- stub_feature_flags(cached_sidebar_open_issues_count: false)
- end
-
- it 'returns not cached issues count' do
- allow(helper).to receive(:group_issues_count).and_return(2500)
-
- expect(helper.group_open_issues_count(group)).to eq('2,500')
- end
+ it 'returns false if group is subgroup' do
+ expect(helper.render_setting_to_allow_project_access_token_creation?(group)).to be_falsy
end
end
- describe '#cached_open_group_issues_count' do
+ describe '#cached_issuables_count' do
let_it_be(:current_user) { create(:user) }
let_it_be(:group) { create(:group, name: 'group') }
- let_it_be(:count_service) { Groups::OpenIssuesCountService }
- before do
- allow(helper).to receive(:current_user) { current_user }
- end
+ context 'with issues type' do
+ let(:type) { :issues }
+ let(:count_service) { Groups::OpenIssuesCountService }
- it 'returns all digits for count value under 1000' do
- allow_next_instance_of(count_service) do |service|
- allow(service).to receive(:count).and_return(999)
- end
-
- expect(helper.cached_open_group_issues_count(group)).to eq('999')
- end
-
- it 'returns truncated digits for count value over 1000' do
- allow_next_instance_of(count_service) do |service|
- allow(service).to receive(:count).and_return(2300)
- end
-
- expect(helper.cached_open_group_issues_count(group)).to eq('2.3k')
+ it_behaves_like 'cached issuables count'
end
- it 'returns truncated digits for count value over 10000' do
- allow_next_instance_of(count_service) do |service|
- allow(service).to receive(:count).and_return(12560)
- end
-
- expect(helper.cached_open_group_issues_count(group)).to eq('12.6k')
- end
-
- it 'returns truncated digits for count value over 100000' do
- allow_next_instance_of(count_service) do |service|
- allow(service).to receive(:count).and_return(112560)
- end
+ context 'with merge requests type' do
+ let(:type) { :merge_requests }
+ let(:count_service) { Groups::MergeRequestsCountService }
- expect(helper.cached_open_group_issues_count(group)).to eq('112.6k')
+ it_behaves_like 'cached issuables count'
end
end
end
diff --git a/spec/helpers/ide_helper_spec.rb b/spec/helpers/ide_helper_spec.rb
index db30446fa95..963d5953d4c 100644
--- a/spec/helpers/ide_helper_spec.rb
+++ b/spec/helpers/ide_helper_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe IdeHelper do
'branch-name' => nil,
'file-path' => nil,
'merge-request' => nil,
- 'forked-project' => nil,
+ 'fork-info' => nil,
'project' => nil
)
end
@@ -25,10 +25,12 @@ RSpec.describe IdeHelper do
context 'when instance vars are set' do
it 'returns instance data in the hash' do
+ fork_info = { ide_path: '/test/ide/path' }
+
self.instance_variable_set(:@branch, 'master')
self.instance_variable_set(:@path, 'foo/bar')
self.instance_variable_set(:@merge_request, '1')
- self.instance_variable_set(:@forked_project, project)
+ self.instance_variable_set(:@fork_info, fork_info)
self.instance_variable_set(:@project, project)
serialized_project = API::Entities::Project.represent(project).to_json
@@ -38,7 +40,7 @@ RSpec.describe IdeHelper do
'branch-name' => 'master',
'file-path' => 'foo/bar',
'merge-request' => '1',
- 'forked-project' => serialized_project,
+ 'fork-info' => fork_info.to_json,
'project' => serialized_project
)
end
diff --git a/spec/helpers/invite_members_helper_spec.rb b/spec/helpers/invite_members_helper_spec.rb
index 62bd953cce8..109b1fc4441 100644
--- a/spec/helpers/invite_members_helper_spec.rb
+++ b/spec/helpers/invite_members_helper_spec.rb
@@ -5,6 +5,7 @@ require "spec_helper"
RSpec.describe InviteMembersHelper do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user, developer_projects: [project]) }
+
let(:owner) { project.owner }
before do
@@ -253,6 +254,7 @@ RSpec.describe InviteMembersHelper do
context 'with a project' do
let_it_be(:form_model) { project }
+
let(:link_href) { "href=\"#{project_project_members_path(form_model)}\"" }
it_behaves_like 'dropdown invite members link'
@@ -260,6 +262,7 @@ RSpec.describe InviteMembersHelper do
context 'with a group' do
let_it_be(:form_model) { create(:group) }
+
let(:link_href) { "href=\"#{group_group_members_path(form_model)}\"" }
it_behaves_like 'dropdown invite members link'
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index d6b002b47eb..54524858962 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -44,6 +44,60 @@ RSpec.describe IssuablesHelper do
end
end
+ describe '#assignees_label' do
+ let(:issuable) { build(:merge_request) }
+ let(:assignee1) { build_stubbed(:user, name: 'Jane Doe') }
+ let(:assignee2) { build_stubbed(:user, name: 'John Doe') }
+
+ before do
+ allow(issuable).to receive(:assignees).and_return(assignees)
+ end
+
+ context 'when multiple assignees exist' do
+ let(:assignees) { [assignee1, assignee2] }
+
+ it 'returns assignee label with assignee names' do
+ expect(helper.assignees_label(issuable)).to eq("Assignees: Jane Doe and John Doe")
+ end
+
+ it 'returns assignee label only with include_value: false' do
+ expect(helper.assignees_label(issuable, include_value: false)).to eq("Assignees")
+ end
+
+ context 'when the name contains a URL' do
+ let(:assignees) { [build_stubbed(:user, name: 'www.gitlab.com')] }
+
+ it 'returns sanitized name' do
+ expect(helper.assignees_label(issuable)).to eq("Assignee: www_gitlab_com")
+ end
+ end
+ end
+
+ context 'when one assignee exists' do
+ let(:assignees) { [assignee1] }
+
+ it 'returns assignee label with no names' do
+ expect(helper.assignees_label(issuable)).to eq("Assignee: Jane Doe")
+ end
+
+ it 'returns assignee label only with include_value: false' do
+ expect(helper.assignees_label(issuable, include_value: false)).to eq("Assignee")
+ end
+ end
+
+ context 'when no assignees exist' do
+ let(:assignees) { [] }
+
+ it 'returns assignee label with no names' do
+ expect(helper.assignees_label(issuable)).to eq("Assignees: ")
+ end
+
+ it 'returns assignee label only with include_value: false' do
+ expect(helper.assignees_label(issuable, include_value: false)).to eq("Assignees")
+ end
+ end
+ end
+
describe '#issuable_meta' do
let(:user) { create(:user) }
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index 07e55e9b016..21a01f349b5 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -281,4 +281,55 @@ RSpec.describe IssuesHelper do
expect(helper.issue_header_actions_data(project, issue, current_user)).to include(expected)
end
end
+
+ shared_examples 'issues list data' do
+ it 'returns expected result' do
+ finder = double.as_null_object
+ allow(helper).to receive(:current_user).and_return(current_user)
+ allow(helper).to receive(:finder).and_return(finder)
+ allow(helper).to receive(:can?).and_return(true)
+ allow(helper).to receive(:image_path).and_return('#')
+ allow(helper).to receive(:import_csv_namespace_project_issues_path).and_return('#')
+ allow(helper).to receive(:url_for).and_return('#')
+
+ expected = {
+ calendar_path: '#',
+ can_bulk_update: 'true',
+ can_edit: 'true',
+ can_import_issues: 'true',
+ email: current_user&.notification_email,
+ empty_state_svg_path: '#',
+ endpoint: expose_path(api_v4_projects_issues_path(id: project.id)),
+ export_csv_path: export_csv_project_issues_path(project),
+ full_path: project.full_path,
+ has_issues: project_issues(project).exists?.to_s,
+ import_csv_issues_path: '#',
+ is_signed_in: current_user.present?.to_s,
+ issues_path: project_issues_path(project),
+ jira_integration_path: help_page_url('user/project/integrations/jira', anchor: 'view-jira-issues'),
+ max_attachment_size: number_to_human_size(Gitlab::CurrentSettings.max_attachment_size.megabytes),
+ new_issue_path: new_project_issue_path(project, issue: { assignee_id: finder.assignee.id, milestone_id: finder.milestones.first.id }),
+ project_import_jira_path: project_import_jira_path(project),
+ rss_path: '#',
+ show_new_issue_link: 'true',
+ sign_in_path: new_user_session_path
+ }
+
+ expect(helper.issues_list_data(project, current_user, finder)).to include(expected)
+ end
+ end
+
+ describe '#issues_list_data' do
+ context 'when user is signed in' do
+ it_behaves_like 'issues list data' do
+ let(:current_user) { double.as_null_object }
+ end
+ end
+
+ context 'when user is anonymous' do
+ it_behaves_like 'issues list data' do
+ let(:current_user) { nil }
+ end
+ end
+ end
end
diff --git a/spec/helpers/jira_connect_helper_spec.rb b/spec/helpers/jira_connect_helper_spec.rb
index 9695bed948b..55a5c724665 100644
--- a/spec/helpers/jira_connect_helper_spec.rb
+++ b/spec/helpers/jira_connect_helper_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe JiraConnectHelper do
describe '#jira_connect_app_data' do
let_it_be(:subscription) { create(:jira_connect_subscription) }
+
let(:user) { create(:user) }
subject { helper.jira_connect_app_data([subscription]) }
diff --git a/spec/helpers/labels_helper_spec.rb b/spec/helpers/labels_helper_spec.rb
index b93dc03e434..526983a0d5f 100644
--- a/spec/helpers/labels_helper_spec.rb
+++ b/spec/helpers/labels_helper_spec.rb
@@ -36,6 +36,7 @@ RSpec.describe LabelsHelper do
context 'with a group label' do
let_it_be(:group) { create(:group) }
+
let(:label) { create(:group_label, group: group, title: 'bug') }
context 'when asking for an issue link' do
diff --git a/spec/helpers/learn_gitlab_helper_spec.rb b/spec/helpers/learn_gitlab_helper_spec.rb
index 6cee8a9191c..82c8e4ba596 100644
--- a/spec/helpers/learn_gitlab_helper_spec.rb
+++ b/spec/helpers/learn_gitlab_helper_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe LearnGitlabHelper do
it 'has all actions' do
expect(onboarding_actions_data.keys).to contain_exactly(
+ :issue_created,
:git_write,
:pipeline_created,
:merge_request_created,
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index 3d1690f6588..00a59f037e0 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -13,6 +13,7 @@ RSpec.describe MarkupHelper do
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let_it_be(:snippet) { create(:project_snippet, project: project) }
+
let(:commit) { project.commit }
before do
@@ -382,6 +383,27 @@ RSpec.describe MarkupHelper do
end
end
+ context 'when file is Kramdown' do
+ let(:extension) { 'rmd' }
+ let(:content) do
+ <<-EOF
+{::options parse_block_html="true" /}
+
+<div>
+FooBar
+</div>
+ EOF
+ end
+
+ it 'renders using #markdown_unsafe helper method' do
+ expect(helper).to receive(:markdown_unsafe).with(content, context)
+
+ result = helper.render_wiki_content(wiki)
+
+ expect(result).to be_empty
+ end
+ end
+
context 'any other format' do
let(:extension) { 'foo' }
@@ -432,6 +454,7 @@ RSpec.describe MarkupHelper do
let_it_be(:project_base) { create(:project, :repository) }
let_it_be(:context) { { project: project_base } }
+
let(:file_name) { 'foo.bar' }
let(:text) { 'Noël' }
@@ -554,7 +577,7 @@ RSpec.describe MarkupHelper do
it 'preserves code color scheme' do
object = create_object("```ruby\ndef test\n 'hello world'\nend\n```")
- expected = "<pre class=\"code highlight js-syntax-highlight ruby\">" \
+ expected = "<pre class=\"code highlight js-syntax-highlight language-ruby\">" \
"<code><span class=\"line\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>\n" \
"</code></pre>"
diff --git a/spec/helpers/namespaces_helper_spec.rb b/spec/helpers/namespaces_helper_spec.rb
index b436f4ab0c9..8c08b06d8a8 100644
--- a/spec/helpers/namespaces_helper_spec.rb
+++ b/spec/helpers/namespaces_helper_spec.rb
@@ -194,4 +194,75 @@ RSpec.describe NamespacesHelper do
end
end
end
+
+ describe '#cascading_namespace_settings_enabled?' do
+ subject { helper.cascading_namespace_settings_enabled? }
+
+ context 'when `cascading_namespace_settings` feature flag is enabled' do
+ it 'returns `true`' do
+ expect(subject).to be(true)
+ end
+ end
+
+ context 'when `cascading_namespace_settings` feature flag is disabled' do
+ before do
+ stub_feature_flags(cascading_namespace_settings: false)
+ end
+
+ it 'returns `false`' do
+ expect(subject).to be(false)
+ end
+ end
+ end
+
+ describe '#cascading_namespace_settings_popover_data' do
+ attribute = :delayed_project_removal
+
+ subject do
+ helper.cascading_namespace_settings_popover_data(
+ attribute,
+ subgroup1,
+ -> (locked_ancestor) { edit_group_path(locked_ancestor, anchor: 'js-permissions-settings') }
+ )
+ end
+
+ context 'when locked by an application setting' do
+ before do
+ allow(subgroup1.namespace_settings).to receive("#{attribute}_locked_by_application_setting?").and_return(true)
+ allow(subgroup1.namespace_settings).to receive("#{attribute}_locked_by_ancestor?").and_return(false)
+ end
+
+ it 'returns expected hash' do
+ expect(subject).to match({
+ popover_data: {
+ locked_by_application_setting: true,
+ locked_by_ancestor: false
+ }.to_json,
+ testid: 'cascading-settings-lock-icon'
+ })
+ end
+ end
+
+ context 'when locked by an ancestor namespace' do
+ before do
+ allow(subgroup1.namespace_settings).to receive("#{attribute}_locked_by_application_setting?").and_return(false)
+ allow(subgroup1.namespace_settings).to receive("#{attribute}_locked_by_ancestor?").and_return(true)
+ allow(subgroup1.namespace_settings).to receive("#{attribute}_locked_ancestor").and_return(admin_group.namespace_settings)
+ end
+
+ it 'returns expected hash' do
+ expect(subject).to match({
+ popover_data: {
+ locked_by_application_setting: false,
+ locked_by_ancestor: true,
+ ancestor_namespace: {
+ full_name: admin_group.full_name,
+ path: edit_group_path(admin_group, anchor: 'js-permissions-settings')
+ }
+ }.to_json,
+ testid: 'cascading-settings-lock-icon'
+ })
+ end
+ end
+ end
end
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index c4795a814ba..2efff3402c5 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe NavHelper do
context 'as admin' do
let(:user) { create(:user, :admin) }
- context 'feature flag :user_mode_in_session is enabled' do
+ context 'application setting :admin_mode is enabled' do
it 'does not contain the admin mode link by default' do
expect(helper.header_links).not_to include(:admin_mode)
end
@@ -52,9 +52,9 @@ RSpec.describe NavHelper do
end
end
- context 'feature flag :user_mode_in_session is disabled' do
+ context 'application setting :admin_mode is disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
it 'does not contain the admin mode link' do
diff --git a/spec/helpers/notes_helper_spec.rb b/spec/helpers/notes_helper_spec.rb
index b8502cdf25e..fc62bbf8bf8 100644
--- a/spec/helpers/notes_helper_spec.rb
+++ b/spec/helpers/notes_helper_spec.rb
@@ -15,6 +15,7 @@ RSpec.describe NotesHelper do
let_it_be(:owner_note) { create(:note, author: owner, project: project) }
let_it_be(:maintainer_note) { create(:note, author: maintainer, project: project) }
let_it_be(:reporter_note) { create(:note, author: reporter, project: project) }
+
let!(:notes) { [owner_note, maintainer_note, reporter_note] }
before_all do
@@ -73,6 +74,7 @@ RSpec.describe NotesHelper do
describe '#discussion_path' do
let_it_be(:project) { create(:project, :repository) }
+
let(:anchor) { discussion.line_code }
context 'for a merge request discusion' do
diff --git a/spec/helpers/page_layout_helper_spec.rb b/spec/helpers/page_layout_helper_spec.rb
index 99cdee6dbb2..d03e39f2051 100644
--- a/spec/helpers/page_layout_helper_spec.rb
+++ b/spec/helpers/page_layout_helper_spec.rb
@@ -223,39 +223,37 @@ RSpec.describe PageLayoutHelper do
end
describe '#user_status_properties' do
- using RSpec::Parameterized::TableSyntax
-
let(:user) { build(:user) }
- availability_types = Types::AvailabilityEnum.enum
-
- where(:message, :emoji, :availability) do
- "Some message" | UserStatus::DEFAULT_EMOJI | availability_types[:busy]
- "Some message" | UserStatus::DEFAULT_EMOJI | availability_types[:not_set]
- "Some message" | "basketball" | availability_types[:busy]
- "Some message" | "basketball" | availability_types[:not_set]
- "Some message" | "" | availability_types[:busy]
- "Some message" | "" | availability_types[:not_set]
- "" | UserStatus::DEFAULT_EMOJI | availability_types[:busy]
- "" | UserStatus::DEFAULT_EMOJI | availability_types[:not_set]
- "" | "basketball" | availability_types[:busy]
- "" | "basketball" | availability_types[:not_set]
- "" | "" | availability_types[:busy]
- "" | "" | availability_types[:not_set]
- end
+ subject { helper.user_status_properties(user) }
- with_them do
- it "sets the default user status fields" do
- user.status = UserStatus.new(message: message, emoji: emoji, availability: availability)
- result = {
+ context 'when the user has no status' do
+ it 'returns default properties' do
+ is_expected.to eq({
+ current_emoji: '',
+ current_message: '',
can_set_user_availability: true,
- current_availability: availability,
- current_emoji: emoji,
- current_message: message,
default_emoji: UserStatus::DEFAULT_EMOJI
- }
+ })
+ end
+ end
+
+ context 'when user has a status' do
+ let(:time) { 3.hours.ago }
- expect(helper.user_status_properties(user)).to eq(result)
+ before do
+ user.status = UserStatus.new(message: 'Some message', emoji: 'basketball', availability: 'busy', clear_status_at: time)
+ end
+
+ it 'merges the status properties with the defaults' do
+ is_expected.to eq({
+ current_clear_status_after: time.to_s,
+ current_availability: 'busy',
+ current_emoji: 'basketball',
+ current_message: 'Some message',
+ can_set_user_availability: true,
+ default_emoji: UserStatus::DEFAULT_EMOJI
+ })
end
end
end
diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb
index e5420fb6729..4d7083c4ca7 100644
--- a/spec/helpers/preferences_helper_spec.rb
+++ b/spec/helpers/preferences_helper_spec.rb
@@ -33,7 +33,7 @@ RSpec.describe PreferencesHelper do
["Your Groups", 'groups'],
["Your To-Do List", 'todos'],
["Assigned Issues", 'issues'],
- ["Assigned Merge Requests", 'merge_requests']
+ ["Assigned merge requests", 'merge_requests']
]
end
end
diff --git a/spec/helpers/profiles_helper_spec.rb b/spec/helpers/profiles_helper_spec.rb
index 9687d038162..2ea832f95dc 100644
--- a/spec/helpers/profiles_helper_spec.rb
+++ b/spec/helpers/profiles_helper_spec.rb
@@ -112,6 +112,46 @@ RSpec.describe ProfilesHelper do
end
end
+ describe "#ssh_key_expiration_tooltip" do
+ using RSpec::Parameterized::TableSyntax
+
+ before do
+ allow(Key).to receive(:enforce_ssh_key_expiration_feature_available?).and_return(false)
+ end
+
+ error_message = 'Key type is forbidden. Must be DSA, ECDSA, or ED25519'
+
+ where(:error, :expired, :result) do
+ false | false | nil
+ true | false | error_message
+ false | true | 'Key usable beyond expiration date.'
+ true | true | error_message
+ end
+
+ with_them do
+ let_it_be(:key) do
+ build(:personal_key)
+ end
+
+ it do
+ key.expires_at = expired ? 2.days.ago : 2.days.from_now
+ key.errors.add(:base, error_message) if error
+
+ expect(helper.ssh_key_expiration_tooltip(key)).to eq(result)
+ end
+ end
+ end
+
+ describe "#ssh_key_expires_field_description" do
+ before do
+ allow(Key).to receive(:enforce_ssh_key_expiration_feature_available?).and_return(false)
+ end
+
+ it 'returns the description' do
+ expect(helper.ssh_key_expires_field_description).to eq('Key can still be used after expiration.')
+ end
+ end
+
def stub_cas_omniauth_provider
provider = OpenStruct.new(
'name' => 'cas3',
diff --git a/spec/helpers/projects/alert_management_helper_spec.rb b/spec/helpers/projects/alert_management_helper_spec.rb
index 0df194e460a..e836461b099 100644
--- a/spec/helpers/projects/alert_management_helper_spec.rb
+++ b/spec/helpers/projects/alert_management_helper_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Projects::AlertManagementHelper do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:current_user) { create(:user) }
+
let(:project_path) { project.full_path }
let(:project_id) { project.id }
diff --git a/spec/helpers/projects/issues/service_desk_helper_spec.rb b/spec/helpers/projects/issues/service_desk_helper_spec.rb
index 3f488fe692d..05766ee13c6 100644
--- a/spec/helpers/projects/issues/service_desk_helper_spec.rb
+++ b/spec/helpers/projects/issues/service_desk_helper_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Projects::Issues::ServiceDeskHelper do
let_it_be(:project) { create(:project, :public, service_desk_enabled: true) }
+
let(:user) { build_stubbed(:user) }
let(:current_user) { user }
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
index 1a55840a58a..0e08a18f912 100644
--- a/spec/helpers/projects/project_members_helper_spec.rb
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -128,6 +128,7 @@ RSpec.describe Projects::ProjectMembersHelper do
describe "when current user is not the owner of the project's parent group" do
let_it_be(:user) { create(:user) }
+
let(:project2) { create(:project, namespace: group) }
before do
@@ -174,6 +175,7 @@ RSpec.describe Projects::ProjectMembersHelper do
describe 'project group links' do
let_it_be(:project_group_links) { create_list(:project_group_link, 1, project: project) }
+
let(:allow_admin_project) { true }
describe '#project_group_links_data_json' do
diff --git a/spec/helpers/projects/terraform_helper_spec.rb b/spec/helpers/projects/terraform_helper_spec.rb
index 70b08f4139b..8833e23c47d 100644
--- a/spec/helpers/projects/terraform_helper_spec.rb
+++ b/spec/helpers/projects/terraform_helper_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Projects::TerraformHelper do
describe '#js_terraform_list_data' do
let_it_be(:project) { create(:project) }
+
let(:current_user) { project.creator }
subject { helper.js_terraform_list_data(current_user, project) }
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index e6cd11a4d70..124cdcec05d 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -85,6 +85,7 @@ RSpec.describe ProjectsHelper do
describe "can_change_visibility_level?" do
let_it_be(:user) { create(:project_member, :reporter, user: create(:user), project: project).user }
+
let(:forked_project) { fork_project(project, user) }
it "returns false if there are no appropriate permissions" do
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 13d3a80bd13..7b2334ab79e 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -489,6 +489,7 @@ RSpec.describe SearchHelper do
describe '#repository_ref' do
let_it_be(:project) { create(:project, :repository) }
+
let(:params) { { repository_ref: 'the-repository-ref-param' } }
subject { repository_ref(project) }
diff --git a/spec/helpers/services_helper_spec.rb b/spec/helpers/services_helper_spec.rb
index 1726a8362a7..6dd872225ba 100644
--- a/spec/helpers/services_helper_spec.rb
+++ b/spec/helpers/services_helper_spec.rb
@@ -27,17 +27,31 @@ RSpec.describe ServicesHelper do
]
end
+ let(:jira_fields) do
+ [
+ :jira_issue_transition_automatic,
+ :jira_issue_transition_id
+ ]
+ end
+
subject { helper.integration_form_data(integration) }
context 'Slack service' do
let(:integration) { build(:slack_service) }
it { is_expected.to include(*fields) }
+ it { is_expected.not_to include(*jira_fields) }
specify do
expect(subject[:reset_path]).to eq(helper.scoped_reset_integration_path(integration))
end
end
+
+ context 'Jira service' do
+ let(:integration) { build(:jira_service) }
+
+ it { is_expected.to include(*fields, *jira_fields) }
+ end
end
describe '#scoped_reset_integration_path' do
diff --git a/spec/helpers/sidebars_helper_spec.rb b/spec/helpers/sidebars_helper_spec.rb
new file mode 100644
index 00000000000..e329968e6c0
--- /dev/null
+++ b/spec/helpers/sidebars_helper_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SidebarsHelper do
+ describe '#sidebar_tracking_attributes_by_object' do
+ subject { helper.sidebar_tracking_attributes_by_object(object) }
+
+ before do
+ allow(helper).to receive(:tracking_enabled?).and_return(true)
+ end
+
+ context 'when object is a project' do
+ let(:object) { build(:project) }
+
+ it 'returns tracking attrs for project' do
+ expect(subject[:data]).to eq({ track_label: 'projects_side_navigation', track_property: 'projects_side_navigation', track_action: 'render' })
+ end
+ end
+
+ context 'when object is a group' do
+ let(:object) { build(:group) }
+
+ it 'returns tracking attrs for group' do
+ expect(subject[:data]).to eq({ track_label: 'groups_side_navigation', track_property: 'groups_side_navigation', track_action: 'render' })
+ end
+ end
+
+ context 'when object is a user' do
+ let(:object) { build(:user) }
+
+ it 'returns tracking attrs for user' do
+ expect(subject[:data]).to eq({ track_label: 'user_side_navigation', track_property: 'user_side_navigation', track_action: 'render' })
+ end
+ end
+
+ context 'when object is something else' do
+ let(:object) { build(:ci_pipeline) }
+
+ it 'returns no attributes' do
+ expect(subject).to eq({})
+ end
+ end
+ end
+end
diff --git a/spec/helpers/snippets_helper_spec.rb b/spec/helpers/snippets_helper_spec.rb
index 5a3c8e37e8c..35882c9337b 100644
--- a/spec/helpers/snippets_helper_spec.rb
+++ b/spec/helpers/snippets_helper_spec.rb
@@ -32,7 +32,7 @@ RSpec.describe SnippetsHelper do
end
def download_link(url)
- "<a class=\"btn\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{url}\">#{external_snippet_icon('doc-code')}</a>"
+ "<a class=\"gl-button btn btn-default\" target=\"_blank\" rel=\"noopener noreferrer\" title=\"Open raw\" href=\"#{url}\">#{external_snippet_icon('doc-code')}</a>"
end
end
@@ -59,31 +59,7 @@ RSpec.describe SnippetsHelper do
end
def download_link(url)
- "<a class=\"btn\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{url}?inline=false\">#{external_snippet_icon('download')}</a>"
- end
- end
-
- describe '#download_raw_snippet_button' do
- subject { download_raw_snippet_button(snippet) }
-
- context 'with personal snippet' do
- let(:snippet) { public_personal_snippet }
-
- it 'returns the download button' do
- expect(subject).to eq(download_link("/-/snippets/#{snippet.id}/raw"))
- end
- end
-
- context 'with project snippet' do
- let(:snippet) { public_project_snippet }
-
- it 'returns the download button' do
- expect(subject).to eq(download_link("/#{snippet.project.path_with_namespace}/-/snippets/#{snippet.id}/raw"))
- end
- end
-
- def download_link(url)
- "<a target=\"_blank\" rel=\"noopener noreferrer\" class=\"btn btn-sm has-tooltip\" title=\"Download\" data-container=\"body\" href=\"#{url}?inline=false\">#{sprite_icon('download')}</a>"
+ "<a class=\"gl-button btn btn-default\" target=\"_blank\" title=\"Download\" rel=\"noopener noreferrer\" href=\"#{url}?inline=false\">#{external_snippet_icon('download')}</a>"
end
end
diff --git a/spec/helpers/tab_helper_spec.rb b/spec/helpers/tab_helper_spec.rb
index f89d0ac0f5a..bd8a8fa174a 100644
--- a/spec/helpers/tab_helper_spec.rb
+++ b/spec/helpers/tab_helper_spec.rb
@@ -6,81 +6,94 @@ RSpec.describe TabHelper do
include ApplicationHelper
describe 'nav_link' do
+ using RSpec::Parameterized::TableSyntax
+
before do
allow(controller).to receive(:controller_name).and_return('foo')
allow(self).to receive(:action_name).and_return('foo')
end
context 'with the content of the li' do
- it "captures block output" do
+ it 'captures block output' do
expect(nav_link { "Testing Blocks" }).to match(/Testing Blocks/)
end
end
- context 'with controller param' do
- it "performs checks on the current controller" do
- expect(nav_link(controller: :foo)).to match(/<li class="active">/)
- expect(nav_link(controller: :bar)).not_to match(/active/)
- expect(nav_link(controller: [:foo, :bar])).to match(/active/)
- end
+ it 'passes extra html options to the list element' do
+ expect(nav_link(action: :foo, html_options: { class: 'home' })).to match(/<li class="home active">/)
+ expect(nav_link(html_options: { class: 'active' })).to match(/<li class="active">/)
+ end
- context 'with action param' do
- it "performs checks on both controller and action when both are present" do
- expect(nav_link(controller: :bar, action: :foo)).not_to match(/active/)
- expect(nav_link(controller: :foo, action: :bar)).not_to match(/active/)
- expect(nav_link(controller: :foo, action: :foo)).to match(/active/)
- end
- end
+ where(:controller_param, :action_param, :path_param, :active) do
+ nil | nil | nil | false
+ :foo | nil | nil | true
+ :bar | nil | nil | false
+ :bar | :foo | nil | false
+ :foo | :bar | nil | false
+ :foo | :foo | nil | true
+ :bar | nil | 'foo#foo' | true
+ :bar | nil | ['foo#foo', 'bar#bar'] | true
+ :bar | :bar | ['foo#foo', 'bar#bar'] | true
+ :foo | nil | 'bar#foo' | true
+ :bar | nil | 'bar#foo' | false
+ :foo | [:foo, :bar] | 'bar#foo' | true
+ :bar | :bar | 'foo#foo' | true
+ :foo | :foo | 'bar#foo' | true
+ :bar | :foo | 'bar#foo' | false
+ :foo | :bar | 'bar#foo' | false
+ [:foo, :bar] | nil | nil | true
+ [:foo, :bar] | nil | 'bar#foo' | true
+ [:foo, :bar] | :foo | 'bar#foo' | true
+ nil | :foo | nil | true
+ nil | :bar | nil | false
+ nil | nil | 'foo#bar' | false
+ nil | nil | 'foo#foo' | true
+ nil | :bar | ['foo#foo', 'bar#bar'] | true
+ nil | :bar | 'foo#foo' | true
+ nil | :foo | 'bar#foo' | true
+ nil | [:foo, :bar] | nil | true
+ nil | [:foo, :bar] | 'bar#foo' | true
+ nil | :bar | 'bar#foo' | false
+ end
- context 'with namespace in path notation' do
- before do
- allow(controller).to receive(:controller_path).and_return('bar/foo')
- end
+ with_them do
+ specify do
+ result = nav_link(controller: controller_param, action: action_param, path: path_param)
- it 'performs checks on both controller and namespace' do
- expect(nav_link(controller: 'foo/foo')).not_to match(/active/)
- expect(nav_link(controller: 'bar/foo')).to match(/active/)
- end
-
- context 'with action param' do
- it "performs checks on both namespace, controller and action when they are all present" do
- expect(nav_link(controller: 'foo/foo', action: :foo)).not_to match(/active/)
- expect(nav_link(controller: 'bar/foo', action: :bar)).not_to match(/active/)
- expect(nav_link(controller: 'bar/foo', action: :foo)).to match(/active/)
- end
+ if active
+ expect(result).to match(/active/)
+ else
+ expect(result).not_to match(/active/)
end
end
end
- context 'with action param' do
- it "performs checks on the current action" do
- expect(nav_link(action: :foo)).to match(/<li class="active">/)
- expect(nav_link(action: :bar)).not_to match(/active/)
- expect(nav_link(action: [:foo, :bar])).to match(/active/)
+ context 'with namespace in path notation' do
+ before do
+ allow(controller).to receive(:controller_path).and_return('bar/foo')
end
- end
- context 'with path param' do
- it "accepts a path shorthand" do
- expect(nav_link(path: 'foo#bar')).not_to match(/active/)
- expect(nav_link(path: 'foo#foo')).to match(/active/)
+ where(:controller_param, :action_param, :path_param, :active) do
+ 'foo/foo' | nil | nil | false
+ 'bar/foo' | nil | nil | true
+ 'foo/foo' | :foo | nil | false
+ 'bar/foo' | :bar | nil | false
+ 'bar/foo' | :foo | nil | true
+ nil | nil | 'foo/foo#foo' | false
+ nil | nil | 'bar/foo#foo' | true
end
- context 'with namespace' do
- before do
- allow(controller).to receive(:controller_path).and_return('bar/foo')
- end
+ with_them do
+ specify do
+ result = nav_link(controller: controller_param, action: action_param, path: path_param)
- it 'accepts a path shorthand with namespace' do
- expect(nav_link(path: 'bar/foo#foo')).to match(/active/)
- expect(nav_link(path: 'foo/foo#foo')).not_to match(/active/)
+ if active
+ expect(result).to match(/active/)
+ else
+ expect(result).not_to match(/active/)
+ end
end
end
end
-
- it "passes extra html options to the list element" do
- expect(nav_link(action: :foo, html_options: { class: 'home' })).to match(/<li class="home active">/)
- expect(nav_link(html_options: { class: 'active' })).to match(/<li class="active">/)
- end
end
end
diff --git a/spec/helpers/timeboxes_helper_spec.rb b/spec/helpers/timeboxes_helper_spec.rb
index 9cbed7668ac..1b9442c0a09 100644
--- a/spec/helpers/timeboxes_helper_spec.rb
+++ b/spec/helpers/timeboxes_helper_spec.rb
@@ -3,42 +3,6 @@
require 'spec_helper'
RSpec.describe TimeboxesHelper do
- describe '#milestones_filter_dropdown_path' do
- let(:project) { create(:project) }
- let(:project2) { create(:project) }
- let(:group) { create(:group) }
-
- context 'when @project present' do
- it 'returns project milestones JSON URL' do
- assign(:project, project)
-
- expect(helper.milestones_filter_dropdown_path).to eq(project_milestones_path(project, :json))
- end
- end
-
- context 'when @target_project present' do
- it 'returns targeted project milestones JSON URL' do
- assign(:target_project, project2)
-
- expect(helper.milestones_filter_dropdown_path).to eq(project_milestones_path(project2, :json))
- end
- end
-
- context 'when @group present' do
- it 'returns group milestones JSON URL' do
- assign(:group, group)
-
- expect(helper.milestones_filter_dropdown_path).to eq(group_milestones_path(group, :json))
- end
- end
-
- context 'when neither of @project/@target_project/@group present' do
- it 'returns dashboard milestones JSON URL' do
- expect(helper.milestones_filter_dropdown_path).to eq(dashboard_milestones_path(:json))
- end
- end
- end
-
describe "#timebox_date_range" do
let(:yesterday) { Date.yesterday }
let(:tomorrow) { yesterday + 2 }
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index 9481d756c16..3787864e144 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -40,26 +40,6 @@ RSpec.describe TodosHelper do
end
end
- describe '#todo_projects_options' do
- let(:projects) { create_list(:project, 3) }
- let(:user) { create(:user) }
-
- it 'returns users authorised projects in json format' do
- projects.first.add_developer(user)
- projects.second.add_developer(user)
-
- allow(helper).to receive(:current_user).and_return(user)
-
- expected_results = [
- { 'id' => '', 'text' => 'Any Project' },
- { 'id' => projects.second.id, 'text' => projects.second.full_name },
- { 'id' => projects.first.id, 'text' => projects.first.full_name }
- ]
-
- expect(Gitlab::Json.parse(helper.todo_projects_options)).to match_array(expected_results)
- end
- end
-
describe '#todo_target_link' do
context 'when given a design' do
let(:todo) { design_todo }
diff --git a/spec/helpers/tracking_helper_spec.rb b/spec/helpers/tracking_helper_spec.rb
index 47b344cfc25..cd2f8f9b7d1 100644
--- a/spec/helpers/tracking_helper_spec.rb
+++ b/spec/helpers/tracking_helper_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe TrackingHelper do
let(:results) do
{
no_data: {},
- with_data: { data: { track_label: 'a', track_event: 'b', track_property: 'c' } }
+ with_data: { data: { track_label: 'a', track_action: 'b', track_property: 'c' } }
}
end
diff --git a/spec/helpers/user_callouts_helper_spec.rb b/spec/helpers/user_callouts_helper_spec.rb
index b6607182461..3dbaa655aeb 100644
--- a/spec/helpers/user_callouts_helper_spec.rb
+++ b/spec/helpers/user_callouts_helper_spec.rb
@@ -81,45 +81,43 @@ RSpec.describe UserCalloutsHelper do
end
end
- describe '.show_service_templates_deprecated?' do
- subject { helper.show_service_templates_deprecated? }
+ describe '.show_service_templates_deprecated_callout?' do
+ using RSpec::Parameterized::TableSyntax
- context 'when user has not dismissed' do
- before do
- allow(helper).to receive(:user_dismissed?).with(described_class::SERVICE_TEMPLATES_DEPRECATED) { false }
- end
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:non_admin) { create(:user) }
- it { is_expected.to be true }
+ subject { helper.show_service_templates_deprecated_callout? }
+
+ where(:self_managed, :is_admin_user, :has_active_service_template, :callout_dismissed, :should_show_callout) do
+ true | true | true | false | true
+ true | true | true | true | false
+ true | false | true | false | false
+ false | true | true | false | false
+ true | true | false | false | false
end
- context 'when user dismissed' do
+ with_them do
before do
- allow(helper).to receive(:user_dismissed?).with(described_class::SERVICE_TEMPLATES_DEPRECATED) { true }
+ allow(::Gitlab).to receive(:com?).and_return(!self_managed)
+ allow(helper).to receive(:current_user).and_return(is_admin_user ? admin : non_admin)
+ allow(helper).to receive(:user_dismissed?).with(described_class::SERVICE_TEMPLATES_DEPRECATED_CALLOUT) { callout_dismissed }
+ create(:service, :template, type: 'MattermostService', active: has_active_service_template)
end
- it { is_expected.to be false }
+ it { is_expected.to be should_show_callout }
end
end
describe '.show_customize_homepage_banner?' do
- let(:customize_homepage) { true }
-
- subject { helper.show_customize_homepage_banner?(customize_homepage) }
+ subject { helper.show_customize_homepage_banner? }
context 'when user has not dismissed' do
before do
allow(helper).to receive(:user_dismissed?).with(described_class::CUSTOMIZE_HOMEPAGE) { false }
end
- context 'when customize_homepage is set' do
- it { is_expected.to be true }
- end
-
- context 'when customize_homepage is false' do
- let(:customize_homepage) { false }
-
- it { is_expected.to be false }
- end
+ it { is_expected.to be true }
end
context 'when user dismissed' do
diff --git a/spec/helpers/whats_new_helper_spec.rb b/spec/helpers/whats_new_helper_spec.rb
index 017826921ff..0e4b4621560 100644
--- a/spec/helpers/whats_new_helper_spec.rb
+++ b/spec/helpers/whats_new_helper_spec.rb
@@ -3,25 +3,15 @@
require 'spec_helper'
RSpec.describe WhatsNewHelper do
- describe '#whats_new_storage_key' do
- subject { helper.whats_new_storage_key }
+ include Devise::Test::ControllerHelpers
- context 'when version exist' do
- let(:release_item) { double(:item) }
+ describe '#whats_new_version_digest' do
+ let(:digest) { 'digest' }
- before do
- allow(ReleaseHighlight).to receive(:versions).and_return([84.0])
- end
-
- it { is_expected.to eq('display-whats-new-notification-84.0') }
- end
+ it 'calls ReleaseHighlight.most_recent_version_digest' do
+ expect(ReleaseHighlight).to receive(:most_recent_version_digest).and_return(digest)
- context 'when most recent release highlights do NOT exist' do
- before do
- allow(ReleaseHighlight).to receive(:versions).and_return(nil)
- end
-
- it { is_expected.to be_nil }
+ expect(helper.whats_new_version_digest).to eq(digest)
end
end
@@ -45,13 +35,29 @@ RSpec.describe WhatsNewHelper do
end
end
- describe '#whats_new_versions' do
- let(:versions) { [84.0] }
+ describe '#display_whats_new?' do
+ subject { helper.display_whats_new? }
+
+ it 'returns true when gitlab.com' do
+ allow(Gitlab).to receive(:dev_env_org_or_com?).and_return(true)
- it 'returns ReleaseHighlight.versions' do
- expect(ReleaseHighlight).to receive(:versions).and_return(versions)
+ expect(subject).to be true
+ end
+
+ context 'when self-managed' do
+ before do
+ allow(Gitlab).to receive(:dev_env_org_or_com?).and_return(false)
+ end
- expect(helper.whats_new_versions).to eq(versions)
+ it 'returns true if user is signed in' do
+ sign_in(create(:user))
+
+ expect(subject).to be true
+ end
+
+ it "returns false if user isn't signed in" do
+ expect(subject).to be false
+ end
end
end
end
diff --git a/spec/helpers/wiki_helper_spec.rb b/spec/helpers/wiki_helper_spec.rb
index 45e1859893f..dc76f92db1b 100644
--- a/spec/helpers/wiki_helper_spec.rb
+++ b/spec/helpers/wiki_helper_spec.rb
@@ -76,7 +76,7 @@ RSpec.describe WikiHelper do
describe '#wiki_sort_controls' do
let(:wiki) { create(:project_wiki) }
let(:wiki_link) { helper.wiki_sort_controls(wiki, sort, direction) }
- let(:classes) { "btn btn-default has-tooltip reverse-sort-btn qa-reverse-sort rspec-reverse-sort" }
+ let(:classes) { "gl-button btn btn-default btn-icon has-tooltip reverse-sort-btn qa-reverse-sort rspec-reverse-sort" }
def expected_link(sort, direction, icon_class)
path = "/#{wiki.project.full_path}/-/wikis/pages?direction=#{direction}&sort=#{sort}"
diff --git a/spec/initializers/active_record_locking_spec.rb b/spec/initializers/active_record_locking_spec.rb
index e979fa0b793..735ef7b916b 100644
--- a/spec/initializers/active_record_locking_spec.rb
+++ b/spec/initializers/active_record_locking_spec.rb
@@ -11,13 +11,13 @@ RSpec.describe 'ActiveRecord locking' do
end
it 'can be updated' do
- issue.update(title: "New title")
+ issue.update!(title: "New title")
expect(issue.reload.lock_version).to eq(new_lock_version)
end
it 'can be deleted' do
- expect { issue.destroy }.to change { Issue.count }.by(-1)
+ expect { issue.destroy! }.to change { Issue.count }.by(-1)
end
end
diff --git a/spec/initializers/fog_google_https_private_urls_spec.rb b/spec/initializers/fog_google_https_private_urls_spec.rb
index 4825525a3d8..f7b21bf850e 100644
--- a/spec/initializers/fog_google_https_private_urls_spec.rb
+++ b/spec/initializers/fog_google_https_private_urls_spec.rb
@@ -13,11 +13,13 @@ RSpec.describe 'Fog::Storage::GoogleXML::File', :fog_requests do
end
let(:file) do
+ # rubocop:disable Rails/SaveBang
directory = storage.directories.create(key: 'data')
directory.files.create(
body: 'Hello World!',
key: 'hello_world.txt'
)
+ # rubocop:enable Rails/SaveBang
end
it 'delegates to #get_https_url' do
diff --git a/spec/initializers/json_validator_patch_spec.rb b/spec/initializers/json_validator_patch_spec.rb
deleted file mode 100644
index 5d90364ae92..00000000000
--- a/spec/initializers/json_validator_patch_spec.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require 'rspec-parameterized'
-
-RSpec.describe 'JSON validator patch' do
- using RSpec::Parameterized::TableSyntax
-
- let(:schema) { '{"format": "string"}' }
-
- subject { JSON::Validator.validate(schema, data) }
-
- context 'with invalid JSON' do
- where(:data) do
- [
- 'https://example.com',
- '/tmp/test.txt'
- ]
- end
-
- with_them do
- it 'does not attempt to open a file or URI' do
- allow(File).to receive(:read).and_call_original
- allow(URI).to receive(:open).and_call_original
- expect(File).not_to receive(:read).with(data)
- expect(URI).not_to receive(:open).with(data)
- expect(subject).to be true
- end
- end
- end
-
- context 'with valid JSON' do
- let(:data) { %({ 'somekey': 'value' }) }
-
- it 'validates successfully' do
- expect(subject).to be true
- end
- end
-end
diff --git a/spec/initializers/pages_storage_check_spec.rb b/spec/initializers/pages_storage_check_spec.rb
new file mode 100644
index 00000000000..a76002dbdcf
--- /dev/null
+++ b/spec/initializers/pages_storage_check_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'pages storage check' do
+ let(:main_error_message) { "Please enable at least one of the two Pages storage strategy (local_store or object_store) in your config/gitlab.yml." }
+
+ subject(:initializer) { load Rails.root.join('config/initializers/pages_storage_check.rb') }
+
+ context 'when local store does not exist yet' do
+ before do
+ Settings.pages['local_store'] = nil
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when pages is not enabled' do
+ before do
+ Settings.pages['enabled'] = false
+ end
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when pages is enabled' do
+ before do
+ Settings.pages['enabled'] = true
+ Settings.pages['local_store'] = Settingslogic.new({})
+ end
+
+ context 'when pages object storage is not enabled' do
+ before do
+ Settings.pages['object_store']['enabled'] = false
+ end
+
+ context 'when pages local storage is not enabled' do
+ it 'raises an exception' do
+ Settings.pages['local_store']['enabled'] = false
+
+ expect { subject }.to raise_error(main_error_message)
+ end
+ end
+
+ context 'when pages local storage is enabled' do
+ it 'is true' do
+ Settings.pages['local_store']['enabled'] = true
+
+ expect(subject).to be_truthy
+ end
+ end
+ end
+
+ context 'when pages object storage is enabled' do
+ before do
+ Settings.pages['object_store']['enabled'] = true
+ end
+
+ context 'when pages local storage is not enabled' do
+ it 'is true' do
+ Settings.pages['local_store']['enabled'] = false
+
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when pages local storage is enabled' do
+ it 'is true' do
+ Settings.pages['local_store']['enabled'] = true
+
+ expect(subject).to be_truthy
+ end
+ end
+ end
+
+ context 'when using integers instead of booleans' do
+ it 'is true' do
+ Settings.pages['local_store']['enabled'] = 1
+ Settings.pages['object_store']['enabled'] = 0
+
+ expect(subject).to be_truthy
+ end
+ end
+
+ context 'when both enabled attributes are not set' do
+ it 'raises an exception' do
+ Settings.pages['local_store']['enabled'] = nil
+ Settings.pages['object_store']['enabled'] = nil
+
+ expect { subject }.to raise_error(main_error_message)
+ end
+ end
+ end
+end
diff --git a/spec/knapsack_env.rb b/spec/knapsack_env.rb
new file mode 100644
index 00000000000..727d18f32e2
--- /dev/null
+++ b/spec/knapsack_env.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'knapsack'
+
+module KnapsackEnv
+ def self.configure!
+ return unless ENV['CI'] && ENV['KNAPSACK_GENERATE_REPORT'] && !ENV['NO_KNAPSACK']
+
+ Knapsack::Adapters::RSpecAdapter.bind
+ end
+end
diff --git a/spec/lib/api/entities/clusters/agent_spec.rb b/spec/lib/api/entities/clusters/agent_spec.rb
new file mode 100644
index 00000000000..04f7ec28407
--- /dev/null
+++ b/spec/lib/api/entities/clusters/agent_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Entities::Clusters::Agent do
+ let_it_be(:cluster_agent) { create(:cluster_agent) }
+
+ subject { described_class.new(cluster_agent).as_json }
+
+ it 'includes basic fields' do
+ expect(subject).to include(
+ id: cluster_agent.id,
+ config_project: a_hash_including(id: cluster_agent.project_id)
+ )
+ end
+end
diff --git a/spec/lib/api/entities/design_management/design_spec.rb b/spec/lib/api/entities/design_management/design_spec.rb
index fe449e3e9bc..fe2b1dadfa7 100644
--- a/spec/lib/api/entities/design_management/design_spec.rb
+++ b/spec/lib/api/entities/design_management/design_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::Entities::DesignManagement::Design do
let_it_be(:design) { create(:design) }
+
let(:entity) { described_class.new(design, request: double) }
subject { entity.as_json }
diff --git a/spec/lib/api/entities/merge_request_changes_spec.rb b/spec/lib/api/entities/merge_request_changes_spec.rb
index f46d8981328..29bfd1da6cc 100644
--- a/spec/lib/api/entities/merge_request_changes_spec.rb
+++ b/spec/lib/api/entities/merge_request_changes_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe ::API::Entities::MergeRequestChanges do
let_it_be(:user) { create(:user) }
let_it_be(:merge_request) { create(:merge_request) }
+
let(:entity) { described_class.new(merge_request, current_user: user) }
subject(:basic_entity) { entity.as_json }
diff --git a/spec/lib/api/entities/project_import_failed_relation_spec.rb b/spec/lib/api/entities/project_import_failed_relation_spec.rb
index 51a684c4564..d3c24f6fce3 100644
--- a/spec/lib/api/entities/project_import_failed_relation_spec.rb
+++ b/spec/lib/api/entities/project_import_failed_relation_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe API::Entities::ProjectImportFailedRelation do
id: import_failure.id,
created_at: import_failure.created_at,
exception_class: import_failure.exception_class,
- exception_message: import_failure.exception_message,
+ exception_message: nil,
relation_name: import_failure.relation_key,
source: import_failure.source
)
diff --git a/spec/lib/api/entities/release_spec.rb b/spec/lib/api/entities/release_spec.rb
index d57c283c1f4..06062634015 100644
--- a/spec/lib/api/entities/release_spec.rb
+++ b/spec/lib/api/entities/release_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe API::Entities::Release do
let_it_be(:project) { create(:project) }
+
let(:release) { create(:release, project: project) }
let(:evidence) { release.evidences.first }
let(:user) { create(:user) }
diff --git a/spec/lib/api/helpers/authentication_spec.rb b/spec/lib/api/helpers/authentication_spec.rb
index 461b0d2f6f9..eea5c10d4f8 100644
--- a/spec/lib/api/helpers/authentication_spec.rb
+++ b/spec/lib/api/helpers/authentication_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe API::Helpers::Authentication do
let_it_be(:project, reload: true) { create(:project, :public) }
let_it_be(:personal_access_token) { create(:personal_access_token, user: user) }
let_it_be(:deploy_token) { create(:deploy_token, read_package_registry: true, write_package_registry: true) }
+ let_it_be(:ci_build) { create(:ci_build, :running, user: user) }
describe 'class methods' do
subject { Class.new.include(described_class::ClassMethods).new }
@@ -176,6 +177,20 @@ RSpec.describe API::Helpers::Authentication do
end
end
+ describe '#ci_build_from_namespace_inheritable' do
+ subject { object.ci_build_from_namespace_inheritable }
+
+ it 'returns #token_from_namespace_inheritable if it is a ci build' do
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(ci_build)
+ expect(subject).to be(ci_build)
+ end
+
+ it 'returns nil if #token_from_namespace_inheritable is not a ci build' do
+ expect(object).to receive(:token_from_namespace_inheritable).and_return(personal_access_token)
+ expect(subject).to eq(nil)
+ end
+ end
+
describe '#user_from_namespace_inheritable' do
subject { object.user_from_namespace_inheritable }
diff --git a/spec/lib/api/helpers/caching_spec.rb b/spec/lib/api/helpers/caching_spec.rb
new file mode 100644
index 00000000000..a8cd061e123
--- /dev/null
+++ b/spec/lib/api/helpers/caching_spec.rb
@@ -0,0 +1,139 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe API::Helpers::Caching do
+ subject(:instance) { Class.new.include(described_class).new }
+
+ describe "#present_cached" do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+
+ let(:presenter) { API::Entities::Todo }
+
+ let(:kwargs) do
+ {
+ with: presenter,
+ project: project
+ }
+ end
+
+ subject do
+ instance.present_cached(presentable, **kwargs)
+ end
+
+ before do
+ # We have to stub #body as it's a Grape method
+ # unavailable in the module by itself
+ expect(instance).to receive(:body) do |data|
+ data
+ end
+
+ allow(instance).to receive(:current_user) { user }
+ end
+
+ context "single object" do
+ let_it_be(:presentable) { create(:todo, project: project) }
+
+ it { is_expected.to be_a(Gitlab::Json::PrecompiledJson) }
+
+ it "uses the presenter" do
+ expect(presenter).to receive(:represent).with(presentable, project: project)
+
+ subject
+ end
+
+ it "is valid JSON" do
+ parsed = Gitlab::Json.parse(subject.to_s)
+
+ expect(parsed).to be_a(Hash)
+ expect(parsed["id"]).to eq(presentable.id)
+ end
+
+ it "fetches from the cache" do
+ expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
+
+ subject
+ end
+
+ context "when a cache context is supplied" do
+ before do
+ kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
+ end
+
+ it "uses the context to augment the cache key" do
+ expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{project.cache_key}", expires_in: described_class::DEFAULT_EXPIRY).once
+
+ subject
+ end
+ end
+
+ context "when expires_in is supplied" do
+ it "sets the expiry when accessing the cache" do
+ kwargs[:expires_in] = 7.days
+
+ expect(instance.cache).to receive(:fetch).with("#{presentable.cache_key}:#{user.cache_key}", expires_in: 7.days).once
+
+ subject
+ end
+ end
+ end
+
+ context "for a collection of objects" do
+ let_it_be(:presentable) { Array.new(5).map { create(:todo, project: project) } }
+
+ it { is_expected.to be_an(Gitlab::Json::PrecompiledJson) }
+
+ it "uses the presenter" do
+ presentable.each do |todo|
+ expect(presenter).to receive(:represent).with(todo, project: project)
+ end
+
+ subject
+ end
+
+ it "is valid JSON" do
+ parsed = Gitlab::Json.parse(subject.to_s)
+
+ expect(parsed).to be_an(Array)
+
+ presentable.each_with_index do |todo, i|
+ expect(parsed[i]["id"]).to eq(todo.id)
+ end
+ end
+
+ it "fetches from the cache" do
+ keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
+
+ expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
+
+ subject
+ end
+
+ context "when a cache context is supplied" do
+ before do
+ kwargs[:cache_context] = -> (todo) { todo.project.cache_key }
+ end
+
+ it "uses the context to augment the cache key" do
+ keys = presentable.map { |todo| "#{todo.cache_key}:#{project.cache_key}" }
+
+ expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: described_class::DEFAULT_EXPIRY).once.and_call_original
+
+ subject
+ end
+ end
+
+ context "expires_in is supplied" do
+ it "sets the expiry when accessing the cache" do
+ keys = presentable.map { |todo| "#{todo.cache_key}:#{user.cache_key}" }
+ kwargs[:expires_in] = 7.days
+
+ expect(instance.cache).to receive(:fetch_multi).with(*keys, expires_in: 7.days).once.and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
index 6d06fc3618d..99b52236771 100644
--- a/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages/dependency_proxy_helpers_spec.rb
@@ -12,6 +12,10 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
subject { helper.redirect_registry_request(forward_to_registry, package_type, options) { helper.fallback } }
+ before do
+ allow(helper).to receive(:options).and_return(for: API::NpmInstancePackages)
+ end
+
shared_examples 'executing fallback' do
it 'redirects to package registry' do
expect(helper).to receive(:registry_url).never
@@ -23,13 +27,14 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
end
shared_examples 'executing redirect' do
- it 'redirects to package registry' do
- expect(helper).to receive(:track_event).with('npm_request_forward').once
+ it 'redirects to package registry', :snowplow do
expect(helper).to receive(:registry_url).once
expect(helper).to receive(:redirect).once
expect(helper).to receive(:fallback).never
subject
+
+ expect_snowplow_event(category: 'API::NpmInstancePackages', action: 'npm_request_forward')
end
end
@@ -64,7 +69,6 @@ RSpec.describe API::Helpers::Packages::DependencyProxyHelpers do
let(:package_type) { pkg_type }
it 'raises an error' do
- allow(helper).to receive(:track_event)
expect { subject }.to raise_error(ArgumentError, "Can't build registry_url for package_type #{package_type}")
end
end
diff --git a/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb b/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb
index 3c40859da21..e4c5002aa68 100644
--- a/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages_manager_clients_helpers_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe API::Helpers::PackagesManagerClientsHelpers do
let_it_be(:personal_access_token) { create(:personal_access_token) }
let_it_be(:username) { personal_access_token.user.username }
let_it_be(:helper) { Class.new.include(described_class).new }
+
let(:password) { personal_access_token.token }
let(:env) do
@@ -50,6 +51,7 @@ RSpec.describe API::Helpers::PackagesManagerClientsHelpers do
describe '#find_job_from_http_basic_auth' do
let_it_be(:user) { personal_access_token.user }
+
let(:job) { create(:ci_build, user: user, status: :running) }
let(:password) { job.token }
@@ -74,6 +76,7 @@ RSpec.describe API::Helpers::PackagesManagerClientsHelpers do
describe '#find_deploy_token_from_http_basic_auth' do
let_it_be(:deploy_token) { create(:deploy_token) }
+
let(:token) { deploy_token.token }
let(:username) { deploy_token.username }
let(:password) { token }
diff --git a/spec/lib/api/helpers/variables_helpers_spec.rb b/spec/lib/api/helpers/variables_helpers_spec.rb
new file mode 100644
index 00000000000..de6bebaa827
--- /dev/null
+++ b/spec/lib/api/helpers/variables_helpers_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::VariablesHelpers do
+ let(:helper) { Class.new.include(described_class).new }
+
+ describe '#filter_variable_parameters' do
+ let(:project) { double }
+ let(:params) { double }
+
+ subject { helper.filter_variable_parameters(project, params) }
+
+ it 'returns unmodified params (overridden in EE)' do
+ expect(subject).to eq(params)
+ end
+ end
+
+ describe '#find_variable' do
+ let(:owner) { double }
+ let(:params) { double }
+ let(:variables) { [double] }
+
+ subject { helper.find_variable(owner, params) }
+
+ before do
+ expect(Ci::VariablesFinder).to receive(:new).with(owner, params)
+ .and_return(double(execute: variables))
+ end
+
+ it { is_expected.to eq(variables.first) }
+
+ context 'there are multiple variables with the supplied key' do
+ let(:variables) { [double, double] }
+
+ it 'raises a conflict!' do
+ expect(helper).to receive(:conflict!).with(/There are multiple variables with provided parameters/)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index bdf04fafaae..15b22fcf25e 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -47,6 +47,58 @@ RSpec.describe API::Helpers do
end
end
+ describe '#find_project!' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
+ shared_examples 'private project without access' do
+ before do
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
+ allow(subject).to receive(:authenticate_non_public?).and_return(false)
+ end
+
+ it 'returns not found' do
+ expect(subject).to receive(:not_found!)
+
+ subject.find_project!(project.id)
+ end
+ end
+
+ context 'when user is authenticated' do
+ before do
+ subject.instance_variable_set(:@current_user, user)
+ subject.instance_variable_set(:@initial_current_user, user)
+ end
+
+ context 'public project' do
+ it 'returns requested project' do
+ expect(subject.find_project!(project.id)).to eq(project)
+ end
+ end
+
+ context 'private project' do
+ it_behaves_like 'private project without access'
+ end
+ end
+
+ context 'when user is not authenticated' do
+ before do
+ subject.instance_variable_set(:@current_user, nil)
+ subject.instance_variable_set(:@initial_current_user, nil)
+ end
+
+ context 'public project' do
+ it 'returns requested project' do
+ expect(subject.find_project!(project.id)).to eq(project)
+ end
+ end
+
+ context 'private project' do
+ it_behaves_like 'private project without access'
+ end
+ end
+ end
+
describe '#find_namespace' do
let(:namespace) { create(:namespace) }
@@ -175,64 +227,27 @@ RSpec.describe API::Helpers do
end
end
- describe '#track_event' do
- it "creates a gitlab tracking event", :snowplow do
- subject.track_event('my_event', category: 'foo')
-
- expect_snowplow_event(category: 'foo', action: 'my_event')
- end
-
- it "logs an exception" do
- expect(Gitlab::AppLogger).to receive(:warn).with(/Tracking event failed/)
-
- subject.track_event('my_event', category: nil)
- end
- end
-
describe '#increment_unique_values' do
let(:value) { '9f302fea-f828-4ca9-aef4-e10bd723c0b3' }
let(:event_name) { 'g_compliance_dashboard' }
let(:unknown_event) { 'unknown' }
- let(:feature) { "usage_data_#{event_name}" }
-
- before do
- skip_feature_flags_yaml_validation
- end
- context 'with feature enabled' do
- before do
- stub_feature_flags(feature => true)
- end
+ it 'tracks redis hll event' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
- it 'tracks redis hll event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:track_event).with(event_name, values: value)
-
- subject.increment_unique_values(event_name, value)
- end
-
- it 'logs an exception for unknown event' do
- expect(Gitlab::AppLogger).to receive(:warn).with("Redis tracking event failed for event: #{unknown_event}, message: Unknown event #{unknown_event}")
-
- subject.increment_unique_values(unknown_event, value)
- end
+ subject.increment_unique_values(event_name, value)
+ end
- it 'does not track event for nil values' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ it 'logs an exception for unknown event' do
+ expect(Gitlab::AppLogger).to receive(:warn).with("Redis tracking event failed for event: #{unknown_event}, message: Unknown event #{unknown_event}")
- subject.increment_unique_values(unknown_event, nil)
- end
+ subject.increment_unique_values(unknown_event, value)
end
- context 'with feature disabled' do
- before do
- stub_feature_flags(feature => false)
- end
-
- it 'does not track event' do
- expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
+ it 'does not track event for nil values' do
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
- subject.increment_unique_values(event_name, value)
- end
+ subject.increment_unique_values(unknown_event, nil)
end
end
diff --git a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
index 872ba1ab43d..6399fc9053b 100644
--- a/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
+++ b/spec/lib/atlassian/jira_connect/serializers/pull_request_entity_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Atlassian::JiraConnect::Serializers::PullRequestEntity do
end
context 'with user_notes_count option' do
- let(:user_notes_count) { merge_requests.map { |merge_request| [merge_request.id, 1] }.to_h }
+ let(:user_notes_count) { merge_requests.to_h { |merge_request| [merge_request.id, 1] } }
subject { described_class.represent(merge_requests, user_notes_count: user_notes_count).as_json }
diff --git a/spec/lib/banzai/filter/commit_trailers_filter_spec.rb b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
index 03a6cc34962..f7cb6b92b48 100644
--- a/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
+++ b/spec/lib/banzai/filter/commit_trailers_filter_spec.rb
@@ -139,6 +139,12 @@ RSpec.describe Banzai::Filter::CommitTrailersFilter do
end
context "structure" do
+ it 'starts with two newlines to separate with actual commit message' do
+ doc = filter(commit_message_html)
+
+ expect(doc.xpath('pre').text).to start_with("\n\n")
+ end
+
it 'preserves the commit trailer structure' do
doc = filter(commit_message_html)
diff --git a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
index ec17bb26346..23626576c0c 100644
--- a/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
+++ b/spec/lib/banzai/filter/gollum_tags_filter_spec.rb
@@ -16,18 +16,14 @@ RSpec.describe Banzai::Filter::GollumTagsFilter do
context 'linking internal images' do
it 'creates img tag if image exists' do
- gollum_file_double = double('Gollum::File',
- mime_type: 'image/jpeg',
- name: 'images/image.jpg',
- path: 'images/image.jpg',
- raw_data: '')
- wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double)
+ blob = double(mime_type: 'image/jpeg', name: 'images/image.jpg', path: 'images/image.jpg', data: '')
+ wiki_file = Gitlab::Git::WikiFile.new(blob)
expect(wiki).to receive(:find_file).with('images/image.jpg', load_content: false).and_return(wiki_file)
tag = '[[images/image.jpg]]'
doc = filter("See #{tag}", wiki: wiki)
- expect(doc.at_css('img')['data-src']).to eq "#{wiki.wiki_base_path}/images/image.jpg"
+ expect(doc.at_css('img')['src']).to eq 'images/image.jpg'
end
it 'does not creates img tag if image does not exist' do
@@ -45,7 +41,7 @@ RSpec.describe Banzai::Filter::GollumTagsFilter do
tag = '[[http://example.com/image.jpg]]'
doc = filter("See #{tag}", wiki: wiki)
- expect(doc.at_css('img')['data-src']).to eq "http://example.com/image.jpg"
+ expect(doc.at_css('img')['src']).to eq "http://example.com/image.jpg"
end
it 'does not creates img tag for invalid URL' do
diff --git a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
index 3c736b46131..9ccea1cc3e9 100644
--- a/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_metrics_redactor_filter_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Banzai::Filter::InlineMetricsRedactorFilter do
include FilterSpecHelper
let_it_be(:project) { create(:project) }
+
let(:url) { urls.metrics_dashboard_project_environment_url(project, 1, embedded: true) }
let(:input) { %(<a href="#{url}">example</a>) }
let(:doc) { filter(input) }
@@ -38,6 +39,7 @@ RSpec.describe Banzai::Filter::InlineMetricsRedactorFilter do
context 'for a cluster metric embed' do
let_it_be(:cluster) { create(:cluster, :provided_by_gcp, :project, projects: [project]) }
+
let(:params) { [project.namespace.path, project.path, cluster.id] }
let(:query_params) { { group: 'Cluster Health', title: 'CPU Usage', y_label: 'CPU (cores)' } }
let(:url) { urls.metrics_dashboard_namespace_project_cluster_url(*params, **query_params, format: :json) }
@@ -84,6 +86,7 @@ RSpec.describe Banzai::Filter::InlineMetricsRedactorFilter do
context 'for an alert embed' do
let_it_be(:alert) { create(:prometheus_alert, project: project) }
+
let(:url) do
urls.metrics_dashboard_project_prometheus_alert_url(
project,
diff --git a/spec/lib/banzai/filter/math_filter_spec.rb b/spec/lib/banzai/filter/math_filter_spec.rb
index 9f6688f4f7d..6d22fa3a001 100644
--- a/spec/lib/banzai/filter/math_filter_spec.rb
+++ b/spec/lib/banzai/filter/math_filter_spec.rb
@@ -91,35 +91,35 @@ RSpec.describe Banzai::Filter::MathFilter do
# Display math
it 'adds data-math-style display attribute to display math' do
- doc = filter('<pre class="code highlight js-syntax-highlight math" v-pre="true"><code>2+2</code></pre>')
+ doc = filter('<pre class="code highlight js-syntax-highlight language-math" v-pre="true"><code>2+2</code></pre>')
pre = doc.xpath('descendant-or-self::pre').first
expect(pre['data-math-style']).to eq 'display'
end
it 'adds js-render-math class to display math' do
- doc = filter('<pre class="code highlight js-syntax-highlight math" v-pre="true"><code>2+2</code></pre>')
+ doc = filter('<pre class="code highlight js-syntax-highlight language-math" v-pre="true"><code>2+2</code></pre>')
pre = doc.xpath('descendant-or-self::pre').first
expect(pre[:class]).to include("js-render-math")
end
it 'ignores code blocks that are not math' do
- input = '<pre class="code highlight js-syntax-highlight plaintext" v-pre="true"><code>2+2</code></pre>'
+ input = '<pre class="code highlight js-syntax-highlight language-plaintext" v-pre="true"><code>2+2</code></pre>'
doc = filter(input)
expect(doc.to_s).to eq input
end
it 'requires the pre to contain both code and math' do
- input = '<pre class="highlight js-syntax-highlight plaintext math" v-pre="true"><code>2+2</code></pre>'
+ input = '<pre class="highlight js-syntax-highlight language-plaintext language-math" v-pre="true"><code>2+2</code></pre>'
doc = filter(input)
expect(doc.to_s).to eq input
end
it 'dollar signs around to display math' do
- doc = filter('$<pre class="code highlight js-syntax-highlight math" v-pre="true"><code>2+2</code></pre>$')
+ doc = filter('$<pre class="code highlight js-syntax-highlight language-math" v-pre="true"><code>2+2</code></pre>$')
before = doc.xpath('descendant-or-self::text()[1]').first
after = doc.xpath('descendant-or-self::text()[3]').first
diff --git a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb b/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb
index 797f1c8d52f..076c112ac87 100644
--- a/spec/lib/banzai/filter/abstract_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/abstract_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::AbstractReferenceFilter do
+RSpec.describe Banzai::Filter::References::AbstractReferenceFilter do
let_it_be(:project) { create(:project) }
let(:doc) { Nokogiri::HTML.fragment('') }
diff --git a/spec/lib/banzai/filter/alert_reference_filter_spec.rb b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
index c57a8a7321c..7c6b0cac24b 100644
--- a/spec/lib/banzai/filter/alert_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/alert_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::AlertReferenceFilter do
+RSpec.describe Banzai::Filter::References::AlertReferenceFilter do
include FilterSpecHelper
let_it_be(:project) { create(:project, :public) }
diff --git a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb
index f04d3212437..b235de06b30 100644
--- a/spec/lib/banzai/filter/commit_range_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_range_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::CommitRangeReferenceFilter do
+RSpec.describe Banzai::Filter::References::CommitRangeReferenceFilter do
include FilterSpecHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/lib/banzai/filter/commit_reference_filter_spec.rb b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
index 925fd031d95..bee8e42d12e 100644
--- a/spec/lib/banzai/filter/commit_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/commit_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::CommitReferenceFilter do
+RSpec.describe Banzai::Filter::References::CommitReferenceFilter do
include FilterSpecHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/lib/banzai/filter/design_reference_filter_spec.rb b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
index 847c398964a..52514ad17fc 100644
--- a/spec/lib/banzai/filter/design_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/design_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::DesignReferenceFilter do
+RSpec.describe Banzai::Filter::References::DesignReferenceFilter do
include FilterSpecHelper
include DesignManagementTestHelpers
diff --git a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
index 35ef2abfa63..3b274f98020 100644
--- a/spec/lib/banzai/filter/external_issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/external_issue_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
+RSpec.describe Banzai::Filter::References::ExternalIssueReferenceFilter do
include FilterSpecHelper
let_it_be_with_refind(:project) { create(:project) }
@@ -184,6 +184,7 @@ RSpec.describe Banzai::Filter::ExternalIssueReferenceFilter do
context "jira project" do
let_it_be(:service) { create(:jira_service, project: project) }
+
let(:reference) { issue.to_reference }
context "with right markdown" do
diff --git a/spec/lib/banzai/filter/feature_flag_reference_filter_spec.rb b/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb
index 2d7089853cf..c64b66f746e 100644
--- a/spec/lib/banzai/filter/feature_flag_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/feature_flag_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::FeatureFlagReferenceFilter do
+RSpec.describe Banzai::Filter::References::FeatureFlagReferenceFilter do
include FilterSpecHelper
let_it_be(:project) { create(:project, :public) }
diff --git a/spec/lib/banzai/filter/issue_reference_filter_spec.rb b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
index 4b8b575c1f0..b849355f6db 100644
--- a/spec/lib/banzai/filter/issue_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/issue_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::IssueReferenceFilter do
+RSpec.describe Banzai::Filter::References::IssueReferenceFilter do
include FilterSpecHelper
include DesignManagementTestHelpers
diff --git a/spec/lib/banzai/filter/label_reference_filter_spec.rb b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
index 726ef8c57ab..db7dda96cad 100644
--- a/spec/lib/banzai/filter/label_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/label_reference_filter_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require 'html/pipeline'
-RSpec.describe Banzai::Filter::LabelReferenceFilter do
+RSpec.describe Banzai::Filter::References::LabelReferenceFilter do
include FilterSpecHelper
let(:project) { create(:project, :public, name: 'sample-project') }
diff --git a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
index 811c2aca342..7a634b0b513 100644
--- a/spec/lib/banzai/filter/merge_request_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/merge_request_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::MergeRequestReferenceFilter do
+RSpec.describe Banzai::Filter::References::MergeRequestReferenceFilter do
include FilterSpecHelper
let(:project) { create(:project, :public) }
diff --git a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
index 276fa7952be..dafdc71ce64 100644
--- a/spec/lib/banzai/filter/milestone_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/milestone_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::MilestoneReferenceFilter do
+RSpec.describe Banzai::Filter::References::MilestoneReferenceFilter do
include FilterSpecHelper
let_it_be(:parent_group) { create(:group, :public) }
diff --git a/spec/lib/banzai/filter/project_reference_filter_spec.rb b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
index ac7a90a5893..7a77d57cd42 100644
--- a/spec/lib/banzai/filter/project_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/project_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::ProjectReferenceFilter do
+RSpec.describe Banzai::Filter::References::ProjectReferenceFilter do
include FilterSpecHelper
def invalidate_reference(reference)
diff --git a/spec/lib/banzai/filter/reference_filter_spec.rb b/spec/lib/banzai/filter/references/reference_filter_spec.rb
index 2888965dbc4..4bcb41ef2a9 100644
--- a/spec/lib/banzai/filter/reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::ReferenceFilter do
+RSpec.describe Banzai::Filter::References::ReferenceFilter do
let(:project) { build_stubbed(:project) }
describe '#each_node' do
diff --git a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
index f23fbc5be88..32a706925ba 100644
--- a/spec/lib/banzai/filter/snippet_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/snippet_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::SnippetReferenceFilter do
+RSpec.describe Banzai::Filter::References::SnippetReferenceFilter do
include FilterSpecHelper
let(:project) { create(:project, :public) }
diff --git a/spec/lib/banzai/filter/user_reference_filter_spec.rb b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
index b8baccf6658..e4703606b47 100644
--- a/spec/lib/banzai/filter/user_reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/user_reference_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::UserReferenceFilter do
+RSpec.describe Banzai::Filter::References::UserReferenceFilter do
include FilterSpecHelper
def get_reference(user)
diff --git a/spec/lib/banzai/filter/suggestion_filter_spec.rb b/spec/lib/banzai/filter/suggestion_filter_spec.rb
index 7d6092e21e9..d74bac4898e 100644
--- a/spec/lib/banzai/filter/suggestion_filter_spec.rb
+++ b/spec/lib/banzai/filter/suggestion_filter_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe Banzai::Filter::SuggestionFilter do
include FilterSpecHelper
- let(:input) { %(<pre class="code highlight js-syntax-highlight suggestion"><code>foo\n</code></pre>) }
+ let(:input) { %(<pre class="code highlight js-syntax-highlight language-suggestion"><code>foo\n</code></pre>) }
let(:default_context) do
{ suggestions_filter_enabled: true }
end
@@ -26,7 +26,7 @@ RSpec.describe Banzai::Filter::SuggestionFilter do
context 'multi-line suggestions' do
let(:data_attr) { Banzai::Filter::SyntaxHighlightFilter::LANG_PARAMS_ATTR }
- let(:input) { %(<pre class="code highlight js-syntax-highlight suggestion" #{data_attr}="-3+2"><code>foo\n</code></pre>) }
+ let(:input) { %(<pre class="code highlight js-syntax-highlight language-suggestion" #{data_attr}="-3+2"><code>foo\n</code></pre>) }
it 'element has correct data-lang-params' do
doc = filter(input, default_context)
diff --git a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
index 78f84ee44f7..16e30604c99 100644
--- a/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
+++ b/spec/lib/banzai/filter/syntax_highlight_filter_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre><code>def fun end</code></pre>')
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>')
+ expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">def fun end</span></code></pre>')
end
include_examples "XSS prevention", ""
@@ -38,7 +38,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as that language" do
result = filter('<pre><code lang="ruby">def fun end</code></pre>')
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>')
+ expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-ruby" lang="ruby" v-pre="true"><code><span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">fun</span> <span class="k">end</span></span></code></pre>')
end
include_examples "XSS prevention", "ruby"
@@ -48,7 +48,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext" do
result = filter('<pre><code lang="gnuplot">This is a test</code></pre>')
- expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
+ expect(result.to_html).to eq('<pre class="code highlight js-syntax-highlight language-plaintext" lang="plaintext" v-pre="true"><code><span id="LC1" class="line" lang="plaintext">This is a test</span></code></pre>')
end
include_examples "XSS prevention", "gnuplot"
@@ -63,7 +63,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "highlights as plaintext but with the correct language attribute and class" do
result = filter(%{<pre><code lang="#{lang}">This is a test</code></pre>})
- expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight #{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
end
include_examples "XSS prevention", lang
@@ -75,7 +75,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "includes data-lang-params tag with extra information" do
result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}">This is a test</code></pre>})
- expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight #{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
end
include_examples "XSS prevention", lang
@@ -93,7 +93,7 @@ RSpec.describe Banzai::Filter::SyntaxHighlightFilter do
it "delimits on the first appearance" do
result = filter(%{<pre><code lang="#{lang}#{delimiter}#{lang_params}#{delimiter}more-things">This is a test</code></pre>})
- expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight #{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
+ expect(result.to_html).to eq(%{<pre class="code highlight js-syntax-highlight language-#{lang}" lang="#{lang}" #{data_attr}="#{lang_params}#{delimiter}more-things" v-pre="true"><code><span id="LC1" class="line" lang="#{lang}">This is a test</span></code></pre>})
end
end
end
diff --git a/spec/lib/banzai/filter/upload_link_filter_spec.rb b/spec/lib/banzai/filter/upload_link_filter_spec.rb
index 0f8c773c68d..9ca499be665 100644
--- a/spec/lib/banzai/filter/upload_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/upload_link_filter_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe Banzai::Filter::UploadLinkFilter do
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
+
let(:group) { nil }
let(:project_path) { project.full_path }
let(:only_path) { true }
@@ -114,6 +115,7 @@ RSpec.describe Banzai::Filter::UploadLinkFilter do
context 'to a group upload' do
let(:upload_link) { link('/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg') }
let_it_be(:group) { create(:group) }
+
let(:project) { nil }
let(:relative_path) { "/groups/#{group.full_path}/-/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg" }
diff --git a/spec/lib/banzai/filter/wiki_link_filter_spec.rb b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
index d1f6ee49260..b5b5349946b 100644
--- a/spec/lib/banzai/filter/wiki_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/wiki_link_filter_spec.rb
@@ -22,6 +22,15 @@ RSpec.describe Banzai::Filter::WikiLinkFilter do
expect(filtered_link.attribute('href').value).to eq('/uploads/a.test')
end
+ describe 'when links point to the relative wiki path' do
+ it 'does not rewrite links' do
+ path = "#{wiki.wiki_base_path}/#{repository_upload_folder}/a.jpg"
+ filtered_link = filter("<a href='#{path}'>Link</a>", wiki: wiki, page_slug: 'home').children[0]
+
+ expect(filtered_link.attribute('href').value).to eq(path)
+ end
+ end
+
describe "when links point to the #{Wikis::CreateAttachmentService::ATTACHMENT_PATH} folder" do
context 'with an "a" html tag' do
it 'rewrites links' do
diff --git a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
index 31047b9494a..e24177a7043 100644
--- a/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/gfm_pipeline_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe Banzai::Pipeline::GfmPipeline do
issue = create(:issue, project: project)
markdown = "text #{issue.to_reference(project, full: true)}"
- expect_any_instance_of(Banzai::Filter::ReferenceFilter).to receive(:each_node).once
+ expect_any_instance_of(Banzai::Filter::References::ReferenceFilter).to receive(:each_node).once
described_class.call(markdown, project: project)
end
@@ -145,6 +145,7 @@ RSpec.describe Banzai::Pipeline::GfmPipeline do
describe 'emoji in references' do
let_it_be(:project) { create(:project, :public) }
+
let(:emoji) { '💯' }
it 'renders a label reference with emoji inside' do
diff --git a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
index b102de24041..007d310247b 100644
--- a/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/wiki_pipeline_spec.rb
@@ -289,4 +289,29 @@ RSpec.describe Banzai::Pipeline::WikiPipeline do
expect(output).to include('<audio src="/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/audio%20file%20name.wav"')
end
end
+
+ describe 'gollum tag filters' do
+ context 'when local image file exists' do
+ it 'sets the proper attributes for the image' do
+ gollum_file_double = double('Gollum::File',
+ mime_type: 'image/jpeg',
+ name: 'images/image.jpg',
+ path: 'images/image.jpg',
+ data: '')
+
+ wiki_file = Gitlab::Git::WikiFile.new(gollum_file_double)
+ markdown = "[[#{wiki_file.path}]]"
+
+ expect(wiki).to receive(:find_file).with(wiki_file.path, load_content: false).and_return(wiki_file)
+
+ output = described_class.to_html(markdown, project: project, wiki: wiki, page_slug: page.slug)
+ doc = Nokogiri::HTML::DocumentFragment.parse(output)
+
+ full_path = "/wiki_link_ns/wiki_link_project/-/wikis/nested/twice/#{wiki_file.path}"
+ expect(doc.css('a')[0].attr('href')).to eq(full_path)
+ expect(doc.css('img')[0].attr('class')).to eq('gfm lazy')
+ expect(doc.css('img')[0].attr('data-src')).to eq(full_path)
+ end
+ end
+ end
end
diff --git a/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb b/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb
index 5f92eb42e74..0c1b98e5ec3 100644
--- a/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb
+++ b/spec/lib/banzai/reference_parser/external_issue_parser_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Banzai::ReferenceParser::ExternalIssueParser do
levels.each do |level|
it "creates reference when the feature is #{level}" do
- project.project_feature.update(issues_access_level: level)
+ project.project_feature.update!(issues_access_level: level)
visible_nodes = subject.nodes_visible_to_user(user, [link])
diff --git a/spec/lib/banzai/reference_redactor_spec.rb b/spec/lib/banzai/reference_redactor_spec.rb
index 668e427cfa2..78cceedd0e5 100644
--- a/spec/lib/banzai/reference_redactor_spec.rb
+++ b/spec/lib/banzai/reference_redactor_spec.rb
@@ -64,7 +64,7 @@ RSpec.describe Banzai::ReferenceRedactor do
let(:redactor) { described_class.new(Banzai::RenderContext.new(project, user)) }
before do
- project.update(pending_delete: true)
+ project.update!(pending_delete: true)
end
it 'redacts an issue attached' do
diff --git a/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb
new file mode 100644
index 00000000000..721dacbe3f4
--- /dev/null
+++ b/spec/lib/bulk_imports/common/extractors/rest_extractor_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Common::Extractors::RestExtractor do
+ let(:http_client) { instance_double(BulkImports::Clients::Http) }
+ let(:options) { { query: double(to_h: { resource: nil, query: nil }) } }
+ let(:response) { double(parsed_response: { 'data' => { 'foo' => 'bar' } }, headers: { 'x-next-page' => '2' }) }
+
+ subject { described_class.new(options) }
+
+ describe '#extract' do
+ before do
+ allow(subject).to receive(:http_client).and_return(http_client)
+ allow(http_client).to receive(:get).and_return(response)
+ end
+
+ it 'returns instance of ExtractedData' do
+ entity = create(:bulk_import_entity)
+ tracker = create(:bulk_import_tracker, entity: entity)
+ context = BulkImports::Pipeline::Context.new(tracker)
+
+ extracted_data = subject.extract(context)
+
+ expect(extracted_data).to be_instance_of(BulkImports::Pipeline::ExtractedData)
+ expect(extracted_data.data).to contain_exactly(response.parsed_response)
+ expect(extracted_data.next_page).to eq(response.headers['x-next-page'])
+ expect(extracted_data.has_next_page?).to eq(true)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb b/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb
index ff11a10bfe9..ba74c173794 100644
--- a/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb
+++ b/spec/lib/bulk_imports/common/transformers/user_reference_transformer_spec.rb
@@ -8,7 +8,8 @@ RSpec.describe BulkImports::Common::Transformers::UserReferenceTransformer do
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:hash) do
{
@@ -51,19 +52,26 @@ RSpec.describe BulkImports::Common::Transformers::UserReferenceTransformer do
end
context 'when custom reference is provided' do
- it 'updates provided reference' do
- hash = {
- 'author' => {
- 'public_email' => user.email
+ shared_examples 'updates provided reference' do |reference|
+ let(:hash) do
+ {
+ 'author' => {
+ 'public_email' => user.email
+ }
}
- }
+ end
- transformer = described_class.new(reference: 'author')
- result = transformer.transform(context, hash)
+ it 'updates provided reference' do
+ transformer = described_class.new(reference: reference)
+ result = transformer.transform(context, hash)
- expect(result['author']).to be_nil
- expect(result['author_id']).to eq(user.id)
+ expect(result['author']).to be_nil
+ expect(result['author_id']).to eq(user.id)
+ end
end
+
+ include_examples 'updates provided reference', 'author'
+ include_examples 'updates provided reference', :author
end
end
end
diff --git a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
index 627247c04ab..ac8786440e9 100644
--- a/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
+++ b/spec/lib/bulk_imports/groups/extractors/subgroups_extractor_spec.rb
@@ -8,8 +8,9 @@ RSpec.describe BulkImports::Groups::Extractors::SubgroupsExtractor do
bulk_import = create(:bulk_import)
create(:bulk_import_configuration, bulk_import: bulk_import)
entity = create(:bulk_import_entity, bulk_import: bulk_import)
+ tracker = create(:bulk_import_tracker, entity: entity)
response = [{ 'test' => 'group' }]
- context = BulkImports::Pipeline::Context.new(entity)
+ context = BulkImports::Pipeline::Context.new(tracker)
allow_next_instance_of(BulkImports::Clients::Http) do |client|
allow(client).to receive(:each_page).and_return(response)
diff --git a/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
index ef46da7062b..b0f8f74783b 100644
--- a/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
+++ b/spec/lib/bulk_imports/groups/graphql/get_group_query_spec.rb
@@ -4,10 +4,10 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetGroupQuery do
describe '#variables' do
- let(:entity) { double(source_full_path: 'test', bulk_import: nil) }
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
-
it 'returns query variables based on entity information' do
+ entity = double(source_full_path: 'test', bulk_import: nil)
+ tracker = double(entity: entity)
+ context = BulkImports::Pipeline::Context.new(tracker)
expected = { full_path: entity.source_full_path }
expect(described_class.variables(context)).to eq(expected)
diff --git a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
index 85f82be7d18..61db644a372 100644
--- a/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
+++ b/spec/lib/bulk_imports/groups/graphql/get_labels_query_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetLabelsQuery do
it 'has a valid query' do
- entity = create(:bulk_import_entity)
- context = BulkImports::Pipeline::Context.new(entity)
+ tracker = create(:bulk_import_tracker)
+ context = BulkImports::Pipeline::Context.new(tracker)
query = GraphQL::Query.new(
GitlabSchema,
diff --git a/spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb
index 5d05f5a2d30..d0c4bb817b2 100644
--- a/spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb
+++ b/spec/lib/bulk_imports/groups/graphql/get_members_query_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetMembersQuery do
it 'has a valid query' do
- entity = create(:bulk_import_entity)
- context = BulkImports::Pipeline::Context.new(entity)
+ tracker = create(:bulk_import_tracker)
+ context = BulkImports::Pipeline::Context.new(tracker)
query = GraphQL::Query.new(
GitlabSchema,
diff --git a/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb b/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb
index a38505fbf85..7a0f964c5f3 100644
--- a/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb
+++ b/spec/lib/bulk_imports/groups/graphql/get_milestones_query_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Graphql::GetMilestonesQuery do
it 'has a valid query' do
- entity = create(:bulk_import_entity)
- context = BulkImports::Pipeline::Context.new(entity)
+ tracker = create(:bulk_import_tracker)
+ context = BulkImports::Pipeline::Context.new(tracker)
query = GraphQL::Query.new(
GitlabSchema,
diff --git a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
index 183292722d2..533955b057c 100644
--- a/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
+++ b/spec/lib/bulk_imports/groups/loaders/group_loader_spec.rb
@@ -4,12 +4,13 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Loaders::GroupLoader do
describe '#load' do
- let(:user) { create(:user) }
- let(:data) { { foo: :bar } }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+ let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:service_double) { instance_double(::Groups::CreateService) }
- let(:bulk_import) { create(:bulk_import, user: user) }
- let(:entity) { create(:bulk_import_entity, bulk_import: bulk_import) }
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let(:data) { { foo: :bar } }
subject { described_class.new }
diff --git a/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
new file mode 100644
index 00000000000..9fa35c4707d
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/badges_pipeline_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::BadgesPipeline do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path,
+ group: group
+ )
+ end
+
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
+
+ subject { described_class.new(context) }
+
+ describe '#run' do
+ it 'imports a group badge' do
+ first_page = extracted_data(has_next_page: true)
+ last_page = extracted_data(name: 'badge2')
+
+ allow_next_instance_of(BulkImports::Common::Extractors::RestExtractor) do |extractor|
+ allow(extractor)
+ .to receive(:extract)
+ .and_return(first_page, last_page)
+ end
+
+ expect { subject.run }.to change(Badge, :count).by(2)
+
+ badge = group.badges.last
+
+ expect(badge.name).to eq('badge2')
+ expect(badge.link_url).to eq(badge_data['link_url'])
+ expect(badge.image_url).to eq(badge_data['image_url'])
+ end
+
+ describe '#load' do
+ it 'creates a badge' do
+ expect { subject.load(context, badge_data) }.to change(Badge, :count).by(1)
+
+ badge = group.badges.first
+
+ badge_data.each do |key, value|
+ expect(badge[key]).to eq(value)
+ end
+ end
+
+ it 'does nothing when the data is blank' do
+ expect { subject.load(context, nil) }.not_to change(Badge, :count)
+ end
+ end
+
+ describe '#transform' do
+ it 'return transformed badge hash' do
+ badge = subject.transform(context, badge_data)
+
+ expect(badge[:name]).to eq('badge')
+ expect(badge[:link_url]).to eq(badge_data['link_url'])
+ expect(badge[:image_url]).to eq(badge_data['image_url'])
+ expect(badge.keys).to contain_exactly(:name, :link_url, :image_url)
+ end
+
+ context 'when data is blank' do
+ it 'does nothing when the data is blank' do
+ expect(subject.transform(context, nil)).to be_nil
+ end
+ end
+ end
+
+ describe 'pipeline parts' do
+ it { expect(described_class).to include_module(BulkImports::Pipeline) }
+ it { expect(described_class).to include_module(BulkImports::Pipeline::Runner) }
+
+ it 'has extractors' do
+ expect(described_class.get_extractor)
+ .to eq(
+ klass: BulkImports::Common::Extractors::RestExtractor,
+ options: {
+ query: BulkImports::Groups::Rest::GetBadgesQuery
+ }
+ )
+ end
+
+ it 'has transformers' do
+ expect(described_class.transformers)
+ .to contain_exactly(
+ { klass: BulkImports::Common::Transformers::ProhibitedAttributesTransformer, options: nil }
+ )
+ end
+ end
+
+ def badge_data(name = 'badge')
+ {
+ 'name' => name,
+ 'link_url' => 'https://gitlab.example.com',
+ 'image_url' => 'https://gitlab.example.com/image.png'
+ }
+ end
+
+ def extracted_data(name: 'badge', has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? '2' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: [badge_data(name)], page_info: page_info)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
new file mode 100644
index 00000000000..8276349c5f4
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/pipelines/entity_finisher_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Pipelines::EntityFinisher do
+ it 'updates the entity status to finished' do
+ entity = create(:bulk_import_entity, :started)
+ pipeline_tracker = create(:bulk_import_tracker, entity: entity)
+ context = BulkImports::Pipeline::Context.new(pipeline_tracker)
+ subject = described_class.new(context)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ bulk_import_id: entity.bulk_import.id,
+ bulk_import_entity_id: entity.id,
+ bulk_import_entity_type: entity.source_type,
+ pipeline_class: described_class.name,
+ message: 'Entity finished'
+ )
+ end
+
+ expect { subject.run }
+ .to change(entity, :status_name).to(:finished)
+ end
+
+ it 'does nothing when the entity is already finished' do
+ entity = create(:bulk_import_entity, :finished)
+ pipeline_tracker = create(:bulk_import_tracker, entity: entity)
+ context = BulkImports::Pipeline::Context.new(pipeline_tracker)
+ subject = described_class.new(context)
+
+ expect { subject.run }
+ .not_to change(entity, :status_name)
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
index 61950cdd9b0..39e782dc093 100644
--- a/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/group_pipeline_spec.rb
@@ -4,10 +4,11 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
describe '#run' do
- let(:user) { create(:user) }
- let(:parent) { create(:group) }
- let(:bulk_import) { create(:bulk_import, user: user) }
- let(:entity) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
create(
:bulk_import_entity,
bulk_import: bulk_import,
@@ -17,7 +18,8 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:group_data) do
{
@@ -37,7 +39,7 @@ RSpec.describe BulkImports::Groups::Pipelines::GroupPipeline do
before do
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
- allow(extractor).to receive(:extract).and_return([group_data])
+ allow(extractor).to receive(:extract).and_return(BulkImports::Pipeline::ExtractedData.new(data: group_data))
end
parent.add_owner(user)
diff --git a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
index 3327a30f1d5..8af646d1101 100644
--- a/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/labels_pipeline_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:cursor) { 'cursor' }
- let(:timestamp) { Time.new(2020, 01, 01).utc }
- let(:entity) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
+
+ let_it_be(:entity) do
create(
:bulk_import_entity,
source_full_path: 'source/full/path',
@@ -17,33 +17,15 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
- def label_data(title)
- {
- 'title' => title,
- 'description' => 'desc',
- 'color' => '#428BCA',
- 'created_at' => timestamp.to_s,
- 'updated_at' => timestamp.to_s
- }
- end
-
- def extractor_data(title:, has_next_page:, cursor: nil)
- page_info = {
- 'end_cursor' => cursor,
- 'has_next_page' => has_next_page
- }
-
- BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info)
- end
-
describe '#run' do
it 'imports a group labels' do
- first_page = extractor_data(title: 'label1', has_next_page: true, cursor: cursor)
- last_page = extractor_data(title: 'label2', has_next_page: false)
+ first_page = extracted_data(title: 'label1', has_next_page: true)
+ last_page = extracted_data(title: 'label2')
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
@@ -63,38 +45,6 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
end
end
- describe '#after_run' do
- context 'when extracted data has next page' do
- it 'updates tracker information and runs pipeline again' do
- data = extractor_data(title: 'label', has_next_page: true, cursor: cursor)
-
- expect(subject).to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :labels)
-
- expect(tracker.has_next_page).to eq(true)
- expect(tracker.next_page).to eq(cursor)
- end
- end
-
- context 'when extracted data has no next page' do
- it 'updates tracker information and does not run pipeline' do
- data = extractor_data(title: 'label', has_next_page: false)
-
- expect(subject).not_to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :labels)
-
- expect(tracker.has_next_page).to eq(false)
- expect(tracker.next_page).to be_nil
- end
- end
- end
-
describe '#load' do
it 'creates the label' do
data = label_data('label')
@@ -130,4 +80,23 @@ RSpec.describe BulkImports::Groups::Pipelines::LabelsPipeline do
)
end
end
+
+ def label_data(title)
+ {
+ 'title' => title,
+ 'description' => 'desc',
+ 'color' => '#428BCA',
+ 'created_at' => timestamp.to_s,
+ 'updated_at' => timestamp.to_s
+ }
+ end
+
+ def extracted_data(title:, has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(data: [label_data(title)], page_info: page_info)
+ end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
index 74d3e09d263..d8a667ec92a 100644
--- a/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/members_pipeline_spec.rb
@@ -8,17 +8,17 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:cursor) { 'cursor' }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
describe '#run' do
it 'maps existing users to the imported group' do
- first_page = member_data(email: member_user1.email, has_next_page: true, cursor: cursor)
- last_page = member_data(email: member_user2.email, has_next_page: false)
+ first_page = extracted_data(email: member_user1.email, has_next_page: true)
+ last_page = extracted_data(email: member_user2.email)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
@@ -88,7 +88,7 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
end
end
- def member_data(email:, has_next_page:, cursor: nil)
+ def extracted_data(email:, has_next_page: false)
data = {
'created_at' => '2020-01-01T00:00:00Z',
'updated_at' => '2020-01-01T00:00:00Z',
@@ -102,8 +102,8 @@ RSpec.describe BulkImports::Groups::Pipelines::MembersPipeline do
}
page_info = {
- 'end_cursor' => cursor,
- 'has_next_page' => has_next_page
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
}
BulkImports::Pipeline::ExtractedData.new(data: data, page_info: page_info)
diff --git a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
index f0c34c65257..e5cf75c566b 100644
--- a/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/milestones_pipeline_spec.rb
@@ -5,11 +5,10 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
- let_it_be(:cursor) { 'cursor' }
let_it_be(:timestamp) { Time.new(2020, 01, 01).utc }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
- let(:entity) do
+ let_it_be(:entity) do
create(
:bulk_import_entity,
bulk_import: bulk_import,
@@ -20,39 +19,19 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
- def milestone_data(title)
- {
- 'title' => title,
- 'description' => 'desc',
- 'state' => 'closed',
- 'start_date' => '2020-10-21',
- 'due_date' => '2020-10-22',
- 'created_at' => timestamp.to_s,
- 'updated_at' => timestamp.to_s
- }
- end
-
- def extracted_data(title:, has_next_page:, cursor: nil)
- page_info = {
- 'end_cursor' => cursor,
- 'has_next_page' => has_next_page
- }
-
- BulkImports::Pipeline::ExtractedData.new(data: [milestone_data(title)], page_info: page_info)
- end
-
before do
group.add_owner(user)
end
describe '#run' do
it 'imports group milestones' do
- first_page = extracted_data(title: 'milestone1', has_next_page: true, cursor: cursor)
- last_page = extracted_data(title: 'milestone2', has_next_page: false)
+ first_page = extracted_data(title: 'milestone1', iid: 1, has_next_page: true)
+ last_page = extracted_data(title: 'milestone2', iid: 2)
allow_next_instance_of(BulkImports::Common::Extractors::GraphqlExtractor) do |extractor|
allow(extractor)
@@ -75,38 +54,6 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
end
end
- describe '#after_run' do
- context 'when extracted data has next page' do
- it 'updates tracker information and runs pipeline again' do
- data = extracted_data(title: 'milestone', has_next_page: true, cursor: cursor)
-
- expect(subject).to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :milestones)
-
- expect(tracker.has_next_page).to eq(true)
- expect(tracker.next_page).to eq(cursor)
- end
- end
-
- context 'when extracted data has no next page' do
- it 'updates tracker information and does not run pipeline' do
- data = extracted_data(title: 'milestone', has_next_page: false)
-
- expect(subject).not_to receive(:run)
-
- subject.after_run(data)
-
- tracker = entity.trackers.find_by(relation: :milestones)
-
- expect(tracker.has_next_page).to eq(false)
- expect(tracker.next_page).to be_nil
- end
- end
- end
-
describe '#load' do
it 'creates the milestone' do
data = milestone_data('milestone')
@@ -120,7 +67,7 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
end
it 'raises NotAllowedError' do
- data = extracted_data(title: 'milestone', has_next_page: false)
+ data = extracted_data(title: 'milestone')
expect { subject.load(context, data) }.to raise_error(::BulkImports::Pipeline::NotAllowedError)
end
@@ -148,4 +95,29 @@ RSpec.describe BulkImports::Groups::Pipelines::MilestonesPipeline do
)
end
end
+
+ def milestone_data(title, iid: 1)
+ {
+ 'title' => title,
+ 'description' => 'desc',
+ 'iid' => iid,
+ 'state' => 'closed',
+ 'start_date' => '2020-10-21',
+ 'due_date' => '2020-10-22',
+ 'created_at' => timestamp.to_s,
+ 'updated_at' => timestamp.to_s
+ }
+ end
+
+ def extracted_data(title:, iid: 1, has_next_page: false)
+ page_info = {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
+ }
+
+ BulkImports::Pipeline::ExtractedData.new(
+ data: milestone_data(title, iid: iid),
+ page_info: page_info
+ )
+ end
end
diff --git a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
index 2a99646bb4a..e4a41428dd2 100644
--- a/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/groups/pipelines/subgroup_entities_pipeline_spec.rb
@@ -6,31 +6,23 @@ RSpec.describe BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group, path: 'group') }
let_it_be(:parent) { create(:group, name: 'imported-group', path: 'imported-group') }
- let(:context) { BulkImports::Pipeline::Context.new(parent_entity) }
+ let_it_be(:parent_entity) { create(:bulk_import_entity, destination_namespace: parent.full_path, group: parent) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: parent_entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
subject { described_class.new(context) }
- describe '#run' do
- let!(:parent_entity) do
- create(
- :bulk_import_entity,
- destination_namespace: parent.full_path,
- group: parent
- )
- end
-
- let(:subgroup_data) do
- [
- {
- "name" => "subgroup",
- "full_path" => "parent/subgroup"
- }
- ]
- end
+ let(:extracted_data) do
+ BulkImports::Pipeline::ExtractedData.new(data: {
+ 'name' => 'subgroup',
+ 'full_path' => 'parent/subgroup'
+ })
+ end
+ describe '#run' do
before do
allow_next_instance_of(BulkImports::Groups::Extractors::SubgroupsExtractor) do |extractor|
- allow(extractor).to receive(:extract).and_return(subgroup_data)
+ allow(extractor).to receive(:extract).and_return(extracted_data)
end
parent.add_owner(user)
diff --git a/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb b/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb
new file mode 100644
index 00000000000..eef6848e118
--- /dev/null
+++ b/spec/lib/bulk_imports/groups/rest/get_badges_query_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Groups::Rest::GetBadgesQuery do
+ describe '.to_h' do
+ it 'returns query resource and page info' do
+ entity = create(:bulk_import_entity)
+ tracker = create(:bulk_import_tracker, entity: entity)
+ context = BulkImports::Pipeline::Context.new(tracker)
+ encoded_full_path = ERB::Util.url_encode(entity.source_full_path)
+ expected = {
+ resource: ['groups', encoded_full_path, 'badges'].join('/'),
+ query: {
+ page: context.tracker.next_page
+ }
+ }
+
+ expect(described_class.to_h(context)).to eq(expected)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
index b3fe8a2ba25..75d8c15088a 100644
--- a/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/group_attributes_transformer_spec.rb
@@ -4,11 +4,12 @@ require 'spec_helper'
RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
describe '#transform' do
- let(:user) { create(:user) }
- let(:parent) { create(:group) }
- let(:group) { create(:group, name: 'My Source Group', parent: parent) }
- let(:bulk_import) { create(:bulk_import, user: user) }
- let(:entity) do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:group) { create(:group, name: 'My Source Group', parent: parent) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
create(
:bulk_import_entity,
bulk_import: bulk_import,
@@ -18,7 +19,8 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
)
end
- let(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
let(:data) do
{
@@ -82,14 +84,7 @@ RSpec.describe BulkImports::Groups::Transformers::GroupAttributesTransformer do
context 'when destination namespace is empty' do
it 'does not set parent id' do
- entity = create(
- :bulk_import_entity,
- bulk_import: bulk_import,
- source_full_path: 'source/full/path',
- destination_name: group.name,
- destination_namespace: ''
- )
- context = BulkImports::Pipeline::Context.new(entity)
+ entity.update!(destination_namespace: '')
transformed_data = subject.transform(context, data)
diff --git a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
index f66c67fc6a2..f3905a4b6e4 100644
--- a/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/member_attributes_transformer_spec.rb
@@ -8,7 +8,8 @@ RSpec.describe BulkImports::Groups::Transformers::MemberAttributesTransformer do
let_it_be(:group) { create(:group) }
let_it_be(:bulk_import) { create(:bulk_import, user: user) }
let_it_be(:entity) { create(:bulk_import_entity, bulk_import: bulk_import, group: group) }
- let_it_be(:context) { BulkImports::Pipeline::Context.new(entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker) }
it 'returns nil when receives no data' do
expect(subject.transform(context, nil)).to eq(nil)
diff --git a/spec/lib/bulk_imports/importers/group_importer_spec.rb b/spec/lib/bulk_imports/importers/group_importer_spec.rb
deleted file mode 100644
index 5d501b49e41..00000000000
--- a/spec/lib/bulk_imports/importers/group_importer_spec.rb
+++ /dev/null
@@ -1,57 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe BulkImports::Importers::GroupImporter do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:bulk_import) { create(:bulk_import) }
- let(:bulk_import_entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import, group: group) }
- let(:bulk_import_configuration) { create(:bulk_import_configuration, bulk_import: bulk_import) }
- let(:context) { BulkImports::Pipeline::Context.new(bulk_import_entity) }
-
- before do
- allow(BulkImports::Pipeline::Context).to receive(:new).and_return(context)
- end
-
- subject { described_class.new(bulk_import_entity) }
-
- describe '#execute' do
- it 'starts the entity and run its pipelines' do
- expect_to_run_pipeline BulkImports::Groups::Pipelines::GroupPipeline, context: context
- expect_to_run_pipeline BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline, context: context
- expect_to_run_pipeline BulkImports::Groups::Pipelines::MembersPipeline, context: context
- expect_to_run_pipeline BulkImports::Groups::Pipelines::LabelsPipeline, context: context
- expect_to_run_pipeline BulkImports::Groups::Pipelines::MilestonesPipeline, context: context
-
- if Gitlab.ee?
- expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicsPipeline'.constantize, context: context)
- expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline'.constantize, context: context)
- expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::EpicEventsPipeline'.constantize, context: context)
- expect_to_run_pipeline('EE::BulkImports::Groups::Pipelines::IterationsPipeline'.constantize, context: context)
- end
-
- subject.execute
-
- expect(bulk_import_entity.reload).to be_finished
- end
-
- context 'when failed' do
- let(:bulk_import_entity) { create(:bulk_import_entity, :failed, bulk_import: bulk_import, group: group) }
-
- it 'does not transition entity to finished state' do
- allow(bulk_import_entity).to receive(:start!)
-
- subject.execute
-
- expect(bulk_import_entity.reload).to be_failed
- end
- end
- end
-
- def expect_to_run_pipeline(klass, context:)
- expect_next_instance_of(klass, context) do |pipeline|
- expect(pipeline).to receive(:run)
- end
- end
-end
diff --git a/spec/lib/bulk_imports/pipeline/context_spec.rb b/spec/lib/bulk_imports/pipeline/context_spec.rb
index c8c3fe3a861..5b7711ad5d7 100644
--- a/spec/lib/bulk_imports/pipeline/context_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/context_spec.rb
@@ -3,29 +3,52 @@
require 'spec_helper'
RSpec.describe BulkImports::Pipeline::Context do
- let(:group) { instance_double(Group) }
- let(:user) { instance_double(User) }
- let(:bulk_import) { instance_double(BulkImport, user: user, configuration: :config) }
-
- let(:entity) do
- instance_double(
- BulkImports::Entity,
- bulk_import: bulk_import,
- group: group
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:bulk_import) { create(:bulk_import, user: user) }
+
+ let_it_be(:entity) do
+ create(
+ :bulk_import_entity,
+ source_full_path: 'source/full/path',
+ destination_name: 'My Destination Group',
+ destination_namespace: group.full_path,
+ group: group,
+ bulk_import: bulk_import
+ )
+ end
+
+ let_it_be(:tracker) do
+ create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: described_class.name
)
end
- subject { described_class.new(entity) }
+ subject { described_class.new(tracker, extra: :data) }
+
+ describe '#entity' do
+ it { expect(subject.entity).to eq(entity) }
+ end
describe '#group' do
it { expect(subject.group).to eq(group) }
end
+ describe '#bulk_import' do
+ it { expect(subject.bulk_import).to eq(bulk_import) }
+ end
+
describe '#current_user' do
it { expect(subject.current_user).to eq(user) }
end
- describe '#current_user' do
+ describe '#configuration' do
it { expect(subject.configuration).to eq(bulk_import.configuration) }
end
+
+ describe '#extra' do
+ it { expect(subject.extra).to eq(extra: :data) }
+ end
end
diff --git a/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb b/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
index 25c5178227a..9c79b3f4c9e 100644
--- a/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/extracted_data_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe BulkImports::Pipeline::ExtractedData do
let(:page_info) do
{
'has_next_page' => has_next_page,
- 'end_cursor' => cursor
+ 'next_page' => cursor
}
end
diff --git a/spec/lib/bulk_imports/pipeline/runner_spec.rb b/spec/lib/bulk_imports/pipeline/runner_spec.rb
index 59f01c9caaa..7235b7c95cd 100644
--- a/spec/lib/bulk_imports/pipeline/runner_spec.rb
+++ b/spec/lib/bulk_imports/pipeline/runner_spec.rb
@@ -38,23 +38,20 @@ RSpec.describe BulkImports::Pipeline::Runner do
extractor BulkImports::Extractor
transformer BulkImports::Transformer
loader BulkImports::Loader
-
- def after_run(_); end
end
stub_const('BulkImports::MyPipeline', pipeline)
end
- let_it_be_with_refind(:entity) { create(:bulk_import_entity) }
- let(:context) { BulkImports::Pipeline::Context.new(entity, extra: :data) }
+ let_it_be_with_reload(:entity) { create(:bulk_import_entity) }
+ let_it_be(:tracker) { create(:bulk_import_tracker, entity: entity) }
+ let_it_be(:context) { BulkImports::Pipeline::Context.new(tracker, extra: :data) }
subject { BulkImports::MyPipeline.new(context) }
describe 'pipeline runner' do
context 'when entity is not marked as failed' do
it 'runs pipeline extractor, transformer, loader' do
- extracted_data = BulkImports::Pipeline::ExtractedData.new(data: { foo: :bar })
-
expect_next_instance_of(BulkImports::Extractor) do |extractor|
expect(extractor)
.to receive(:extract)
@@ -132,6 +129,22 @@ RSpec.describe BulkImports::Pipeline::Runner do
subject.run
end
+ context 'when extracted data has multiple pages' do
+ it 'updates tracker information and runs pipeline again' do
+ first_page = extracted_data(has_next_page: true)
+ last_page = extracted_data
+
+ expect_next_instance_of(BulkImports::Extractor) do |extractor|
+ expect(extractor)
+ .to receive(:extract)
+ .with(context)
+ .and_return(first_page, last_page)
+ end
+
+ subject.run
+ end
+ end
+
context 'when exception is raised' do
before do
allow_next_instance_of(BulkImports::Extractor) do |extractor|
@@ -170,12 +183,7 @@ RSpec.describe BulkImports::Pipeline::Runner do
BulkImports::MyPipeline.abort_on_failure!
end
- it 'marks entity as failed' do
- expect { subject.run }
- .to change(entity, :status_name).to(:failed)
- end
-
- it 'logs warn message' do
+ it 'logs a warn message and marks entity as failed' do
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
expect(logger).to receive(:warn)
.with(
@@ -188,6 +196,9 @@ RSpec.describe BulkImports::Pipeline::Runner do
end
subject.run
+
+ expect(entity.status_name).to eq(:failed)
+ expect(tracker.status_name).to eq(:failed)
end
end
@@ -206,11 +217,11 @@ RSpec.describe BulkImports::Pipeline::Runner do
entity.fail_op!
expect_next_instance_of(Gitlab::Import::Logger) do |logger|
- expect(logger).to receive(:info)
+ expect(logger).to receive(:warn)
.with(
log_params(
context,
- message: 'Skipping due to failed pipeline status',
+ message: 'Skipping pipeline due to failed entity',
pipeline_class: 'BulkImports::MyPipeline'
)
)
@@ -219,14 +230,24 @@ RSpec.describe BulkImports::Pipeline::Runner do
subject.run
end
end
- end
- def log_params(context, extra = {})
- {
- bulk_import_id: context.bulk_import.id,
- bulk_import_entity_id: context.entity.id,
- bulk_import_entity_type: context.entity.source_type,
- context_extra: context.extra
- }.merge(extra)
+ def log_params(context, extra = {})
+ {
+ bulk_import_id: context.bulk_import.id,
+ bulk_import_entity_id: context.entity.id,
+ bulk_import_entity_type: context.entity.source_type,
+ context_extra: context.extra
+ }.merge(extra)
+ end
+
+ def extracted_data(has_next_page: false)
+ BulkImports::Pipeline::ExtractedData.new(
+ data: { foo: :bar },
+ page_info: {
+ 'has_next_page' => has_next_page,
+ 'next_page' => has_next_page ? 'cursor' : nil
+ }
+ )
+ end
end
end
diff --git a/spec/lib/bulk_imports/pipeline_spec.rb b/spec/lib/bulk_imports/pipeline_spec.rb
index c882e3d26ea..dda2e41f06c 100644
--- a/spec/lib/bulk_imports/pipeline_spec.rb
+++ b/spec/lib/bulk_imports/pipeline_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe BulkImports::Pipeline do
+ let(:context) { instance_double(BulkImports::Pipeline::Context, tracker: nil) }
+
before do
stub_const('BulkImports::Extractor', Class.new)
stub_const('BulkImports::Transformer', Class.new)
@@ -44,7 +46,7 @@ RSpec.describe BulkImports::Pipeline do
end
it 'returns itself when retrieving extractor & loader' do
- pipeline = BulkImports::AnotherPipeline.new(nil)
+ pipeline = BulkImports::AnotherPipeline.new(context)
expect(pipeline.send(:extractor)).to eq(pipeline)
expect(pipeline.send(:loader)).to eq(pipeline)
@@ -83,7 +85,7 @@ RSpec.describe BulkImports::Pipeline do
expect(BulkImports::Transformer).to receive(:new).with(foo: :bar)
expect(BulkImports::Loader).to receive(:new).with(foo: :bar)
- pipeline = BulkImports::MyPipeline.new(nil)
+ pipeline = BulkImports::MyPipeline.new(context)
pipeline.send(:extractor)
pipeline.send(:transformers)
@@ -109,7 +111,7 @@ RSpec.describe BulkImports::Pipeline do
expect(BulkImports::Transformer).to receive(:new).with(no_args)
expect(BulkImports::Loader).to receive(:new).with(no_args)
- pipeline = BulkImports::NoOptionsPipeline.new(nil)
+ pipeline = BulkImports::NoOptionsPipeline.new(context)
pipeline.send(:extractor)
pipeline.send(:transformers)
@@ -135,7 +137,7 @@ RSpec.describe BulkImports::Pipeline do
transformer = double
allow(BulkImports::Transformer).to receive(:new).and_return(transformer)
- pipeline = BulkImports::TransformersPipeline.new(nil)
+ pipeline = BulkImports::TransformersPipeline.new(context)
expect(pipeline.send(:transformers)).to eq([pipeline, transformer])
end
diff --git a/spec/lib/constraints/admin_constrainer_spec.rb b/spec/lib/constraints/admin_constrainer_spec.rb
index ac6ad31120e..6e8909ca129 100644
--- a/spec/lib/constraints/admin_constrainer_spec.rb
+++ b/spec/lib/constraints/admin_constrainer_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Constraints::AdminConstrainer do
end
describe '#matches' do
- context 'feature flag :user_mode_in_session is enabled' do
+ context 'application setting :admin_mode is enabled' do
context 'when user is a regular user' do
it 'forbids access' do
expect(subject.matches?(request)).to be(false)
@@ -46,9 +46,9 @@ RSpec.describe Constraints::AdminConstrainer do
end
end
- context 'feature flag :user_mode_in_session is disabled' do
+ context 'application setting :admin_mode is disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
context 'when user is a regular user' do
diff --git a/spec/lib/feature_spec.rb b/spec/lib/feature_spec.rb
index 3e158391d7f..dc8fd0de313 100644
--- a/spec/lib/feature_spec.rb
+++ b/spec/lib/feature_spec.rb
@@ -487,6 +487,98 @@ RSpec.describe Feature, stub_feature_flags: false do
end
end
+ context 'caching with stale reads from the database', :use_clean_rails_redis_caching, :request_store, :aggregate_failures do
+ let(:actor) { stub_feature_flag_gate('CustomActor:5') }
+ let(:another_actor) { stub_feature_flag_gate('CustomActor:10') }
+
+ # This is a bit unpleasant. For these tests we want to simulate stale reads
+ # from the database (due to database load balancing). A simple way to do
+ # that is to stub the response on the adapter Flipper uses for reading from
+ # the database. However, there isn't a convenient API for this. We know that
+ # the ActiveRecord adapter is always at the 'bottom' of the chain, so we can
+ # find it that way.
+ let(:active_record_adapter) do
+ adapter = described_class.flipper
+
+ loop do
+ break adapter unless adapter.instance_variable_get(:@adapter)
+
+ adapter = adapter.instance_variable_get(:@adapter)
+ end
+ end
+
+ it 'gives the correct value when enabling for an additional actor' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+
+ # Enable for `another_actor` and simulate a stale read
+ described_class.enable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+
+ # Should read from the cache and be enabled for both of these actors
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
+
+ it 'gives the correct value when enabling for percentage of time' do
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 10)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+
+ # Test against `gate_values` directly as otherwise it would be non-determistic
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(10)
+
+ # Enable 50% of time and simulate a stale read
+ described_class.enable_percentage_of_time(:enabled_feature_flag, 50)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+
+ # Should read from the cache and be enabled 50% of the time
+ expect(described_class.get(:enabled_feature_flag).gate_values.percentage_of_time).to eq(50)
+ end
+
+ it 'gives the correct value when disabling the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ described_class.enable(:enabled_feature_flag, another_actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+
+ # This be enabled for `actor` and `another_actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+
+ # Disable for `another_actor` and simulate a stale read
+ described_class.disable(:enabled_feature_flag, another_actor)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+
+ # Should read from the cache and be enabled only for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag, another_actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
+
+ it 'gives the correct value when deleting the flag' do
+ described_class.enable(:enabled_feature_flag, actor)
+ initial_gate_values = active_record_adapter.get(described_class.get(:enabled_feature_flag))
+
+ # This should only be enabled for `actor`
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(true)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+
+ # Remove and simulate a stale read
+ described_class.remove(:enabled_feature_flag)
+ allow(active_record_adapter).to receive(:get).once.and_return(initial_gate_values)
+
+ # Should read from the cache and be disabled everywhere
+ expect(described_class.enabled?(:enabled_feature_flag, actor)).to be(false)
+ expect(described_class.enabled?(:enabled_feature_flag)).to be(false)
+ end
+ end
+
describe Feature::Target do
describe '#targets' do
let(:project) { create(:project) }
diff --git a/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
new file mode 100644
index 00000000000..021fb8f5f58
--- /dev/null
+++ b/spec/lib/generators/gitlab/usage_metric_definition/redis_hll_generator_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require 'generator_helper'
+
+RSpec.describe Gitlab::UsageMetricDefinition::RedisHllGenerator do
+ include UsageDataHelpers
+
+ let(:category) { 'test_category' }
+ let(:event) { 'i_test_event' }
+ let(:args) { [category, event] }
+ let(:temp_dir) { Dir.mktmpdir }
+
+ # Interpolating to preload the class
+ # See https://github.com/rspec/rspec-mocks/issues/1079
+ before do
+ stub_const("#{Gitlab::UsageMetricDefinitionGenerator}::TOP_LEVEL_DIR", temp_dir)
+ # Stub Prometheus requests from Gitlab::Utils::UsageData
+ stub_prometheus_queries
+ end
+
+ it 'creates metric definition files' do
+ described_class.new(args).invoke_all
+
+ weekly_metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*i_test_event_weekly.yml')).first
+ monthly_metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_28d/*i_test_event_monthly.yml')).first
+
+ expect(YAML.safe_load(File.read(weekly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_weekly")
+ expect(YAML.safe_load(File.read(monthly_metric_definition_path))).to include("key_path" => "redis_hll_counters.test_category.i_test_event_monthly")
+ end
+end
diff --git a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
index b62eac14e3e..f8c055ae111 100644
--- a/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
+++ b/spec/lib/generators/gitlab/usage_metric_definition_generator_spec.rb
@@ -3,10 +3,42 @@
require 'generator_helper'
RSpec.describe Gitlab::UsageMetricDefinitionGenerator do
+ include UsageDataHelpers
+
+ let(:key_path) { 'counts_weekly.test_metric' }
+ let(:dir) { '7d' }
+ let(:temp_dir) { Dir.mktmpdir }
+
+ before do
+ stub_const("#{described_class}::TOP_LEVEL_DIR", temp_dir)
+ # Stub Prometheus requests from Gitlab::Utils::UsageData
+ stub_prometheus_queries
+ end
+
+ after do
+ FileUtils.rm_rf(temp_dir)
+ end
+
+ describe 'Creating metric definition file' do
+ # Stub version so that `milestone` key remains constant between releases to prevent flakiness.
+ before do
+ stub_const('Gitlab::VERSION', '13.9.0')
+ allow(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('test metric name')
+ end
+
+ let(:sample_metric) { load_sample_metric_definition(filename: 'sample_metric_with_name_suggestions.yml') }
+
+ it 'creates a metric definition file using the template' do
+ described_class.new([key_path], { 'dir' => dir }).invoke_all
+
+ metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
+
+ expect(YAML.safe_load(File.read(metric_definition_path))).to eq(sample_metric)
+ end
+ end
+
describe 'Validation' do
- let(:key_path) { 'counter.category.event' }
- let(:dir) { '7d' }
- let(:options) { [key_path, '--dir', dir, '--pretend'] }
+ let(:options) { [key_path, '--dir', dir] }
subject { described_class.start(options) }
@@ -42,34 +74,12 @@ RSpec.describe Gitlab::UsageMetricDefinitionGenerator do
end
describe 'Name suggestions' do
- let(:temp_dir) { Dir.mktmpdir }
-
- before do
- stub_const("#{described_class}::TOP_LEVEL_DIR", temp_dir)
- end
-
- context 'with product_intelligence_metrics_names_suggestions feature ON' do
- it 'adds name key to metric definition' do
- stub_feature_flags(product_intelligence_metrics_names_suggestions: true)
-
- expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('some name')
- described_class.new(['counts_weekly.test_metric'], { 'dir' => '7d' }).invoke_all
- metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
+ it 'adds name key to metric definition' do
+ expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).to receive(:generate).and_return('some name')
+ described_class.new([key_path], { 'dir' => dir }).invoke_all
+ metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
- expect(YAML.safe_load(File.read(metric_definition_path))).to include("name" => "some name")
- end
- end
-
- context 'with product_intelligence_metrics_names_suggestions feature OFF' do
- it 'adds name key to metric definition' do
- stub_feature_flags(product_intelligence_metrics_names_suggestions: false)
-
- expect(::Gitlab::Usage::Metrics::NamesSuggestions::Generator).not_to receive(:generate)
- described_class.new(['counts_weekly.test_metric'], { 'dir' => '7d' }).invoke_all
- metric_definition_path = Dir.glob(File.join(temp_dir, 'metrics/counts_7d/*_test_metric.yml')).first
-
- expect(YAML.safe_load(File.read(metric_definition_path)).keys).not_to include(:name)
- end
+ expect(YAML.safe_load(File.read(metric_definition_path))).to include("name" => "some name")
end
end
end
diff --git a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
index fceda763717..1ed43145aa6 100644
--- a/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
+++ b/spec/lib/gitlab/alert_management/alert_status_counts_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Gitlab::AlertManagement::AlertStatusCounts do
let_it_be(:alert_resolved) { create(:alert_management_alert, :resolved, project: project) }
let_it_be(:alert_ignored) { create(:alert_management_alert, :ignored, project: project) }
let_it_be(:alert_triggered) { create(:alert_management_alert) }
+
let(:params) { {} }
describe '#execute' do
diff --git a/spec/lib/gitlab/alert_management/payload/base_spec.rb b/spec/lib/gitlab/alert_management/payload/base_spec.rb
index 0c26e94e596..e093b3587c2 100644
--- a/spec/lib/gitlab/alert_management/payload/base_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/base_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::AlertManagement::Payload::Base do
let_it_be(:project) { create(:project) }
+
let(:raw_payload) { {} }
let(:payload_class) { described_class }
diff --git a/spec/lib/gitlab/alert_management/payload/generic_spec.rb b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
index b0c238c62c8..59933f7459d 100644
--- a/spec/lib/gitlab/alert_management/payload/generic_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/generic_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::AlertManagement::Payload::Generic do
let_it_be(:project) { build_stubbed(:project) }
+
let(:raw_payload) { {} }
let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
diff --git a/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb b/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb
index 862b5b2bdc3..fa8afd47c53 100644
--- a/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/managed_prometheus_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::AlertManagement::Payload::ManagedPrometheus do
let_it_be(:project) { create(:project) }
+
let(:raw_payload) { {} }
let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
@@ -136,6 +137,7 @@ RSpec.describe Gitlab::AlertManagement::Payload::ManagedPrometheus do
context 'with sufficient fallback info' do
let_it_be(:environment) { create(:environment, project: project, name: 'production') }
+
let(:raw_payload) do
{
'labels' => {
diff --git a/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
index f574f5ba6a3..6a4f35c01e3 100644
--- a/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload/prometheus_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::AlertManagement::Payload::Prometheus do
let_it_be(:project) { create(:project) }
+
let(:raw_payload) { {} }
let(:parsed_payload) { described_class.new(project: project, payload: raw_payload) }
diff --git a/spec/lib/gitlab/alert_management/payload_spec.rb b/spec/lib/gitlab/alert_management/payload_spec.rb
index 7c129a8a48e..efde7ed3772 100644
--- a/spec/lib/gitlab/alert_management/payload_spec.rb
+++ b/spec/lib/gitlab/alert_management/payload_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::AlertManagement::Payload do
describe '#parse' do
let_it_be(:project) { build_stubbed(:project) }
+
let(:payload) { {} }
context 'without a monitoring_tool specified by caller' do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
index 80d3f82b404..0a333965f68 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/base_query_builder_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::BaseQueryBuilder do
let_it_be(:mr1) { create(:merge_request, target_project: project, source_project: project, allow_broken: true, created_at: 3.months.ago) }
let_it_be(:mr2) { create(:merge_request, target_project: project, source_project: project, allow_broken: true, created_at: 1.month.ago) }
let_it_be(:user) { create(:user) }
+
let(:params) { { current_user: user } }
let(:records) do
stage = build(:cycle_analytics_project_stage, {
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
index c1ea000eb7b..14768025932 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/median_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Analytics::CycleAnalytics::Median do
let_it_be(:project) { create(:project, :repository) }
+
let(:query) { Project.joins(merge_requests: :metrics) }
let(:stage) do
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
index b8f9dde4291..ebc5ae2a632 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/records_fetcher_spec.rb
@@ -7,16 +7,15 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
Timecop.freeze { example.run }
end
+ let(:params) { { from: 1.year.ago, current_user: user } }
+
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:user) { create(:user) }
subject do
Gitlab::Analytics::CycleAnalytics::DataCollector.new(
stage: stage,
- params: {
- from: 1.year.ago,
- current_user: user
- }
+ params: params
).records_fetcher.serialized_records
end
@@ -34,6 +33,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
describe 'for issue based stage' do
let_it_be(:issue1) { create(:issue, project: project) }
let_it_be(:issue2) { create(:issue, project: project, confidential: true) }
+
let(:stage) do
build(:cycle_analytics_project_stage, {
start_event_identifier: :plan_stage_start,
@@ -130,4 +130,40 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::RecordsFetcher do
end
end
end
+
+ describe 'pagination' do
+ let_it_be(:issue1) { create(:issue, project: project) }
+ let_it_be(:issue2) { create(:issue, project: project) }
+ let_it_be(:issue3) { create(:issue, project: project) }
+
+ let(:stage) do
+ build(:cycle_analytics_project_stage, {
+ start_event_identifier: :plan_stage_start,
+ end_event_identifier: :issue_first_mentioned_in_commit,
+ project: project
+ })
+ end
+
+ before(:all) do
+ issue1.metrics.update(first_added_to_board_at: 3.days.ago, first_mentioned_in_commit_at: 2.days.ago)
+ issue2.metrics.update(first_added_to_board_at: 3.days.ago, first_mentioned_in_commit_at: 2.days.ago)
+ issue3.metrics.update(first_added_to_board_at: 3.days.ago, first_mentioned_in_commit_at: 2.days.ago)
+ end
+
+ before do
+ project.add_user(user, Gitlab::Access::DEVELOPER)
+
+ stub_const('Gitlab::Analytics::CycleAnalytics::RecordsFetcher::MAX_RECORDS', 2)
+ end
+
+ it 'limits the results' do
+ expect(subject.size).to eq(2)
+ end
+
+ it 'loads the record for the next page' do
+ params[:page] = 2
+
+ expect(subject.size).to eq(1)
+ end
+ end
end
diff --git a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
index 52e9f2d9846..b6f9c8106c9 100644
--- a/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
+++ b/spec/lib/gitlab/analytics/cycle_analytics/stage_events/code_stage_start_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::Analytics::CycleAnalytics::StageEvents::CodeStageStart do
other_merge_request = create(:merge_request, source_project: project, source_branch: 'a', target_branch: 'master')
- records = subject.apply_query_customization(MergeRequest.all).where('merge_requests_closing_issues.issue_id IS NOT NULL')
+ records = subject.apply_query_customization(MergeRequest.all).where.not('merge_requests_closing_issues.issue_id' => nil)
expect(records).to eq([merge_request])
expect(records).not_to include(other_merge_request)
end
diff --git a/spec/lib/gitlab/analytics/unique_visits_spec.rb b/spec/lib/gitlab/analytics/unique_visits_spec.rb
index 6ac58e13f4c..f4d5c0b1eca 100644
--- a/spec/lib/gitlab/analytics/unique_visits_spec.rb
+++ b/spec/lib/gitlab/analytics/unique_visits_spec.rb
@@ -24,18 +24,18 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
describe '#track_visit' do
it 'tracks the unique weekly visits for targets' do
- unique_visits.track_visit(visitor1_id, target1_id, 7.days.ago)
- unique_visits.track_visit(visitor1_id, target1_id, 7.days.ago)
- unique_visits.track_visit(visitor2_id, target1_id, 7.days.ago)
+ unique_visits.track_visit(target1_id, values: visitor1_id, time: 7.days.ago)
+ unique_visits.track_visit(target1_id, values: visitor1_id, time: 7.days.ago)
+ unique_visits.track_visit(target1_id, values: visitor2_id, time: 7.days.ago)
- unique_visits.track_visit(visitor2_id, target2_id, 7.days.ago)
- unique_visits.track_visit(visitor1_id, target2_id, 8.days.ago)
- unique_visits.track_visit(visitor1_id, target2_id, 15.days.ago)
+ unique_visits.track_visit(target2_id, values: visitor2_id, time: 7.days.ago)
+ unique_visits.track_visit(target2_id, values: visitor1_id, time: 8.days.ago)
+ unique_visits.track_visit(target2_id, values: visitor1_id, time: 15.days.ago)
- unique_visits.track_visit(visitor3_id, target4_id, 7.days.ago)
+ unique_visits.track_visit(target4_id, values: visitor3_id, time: 7.days.ago)
- unique_visits.track_visit(visitor3_id, target5_id, 15.days.ago)
- unique_visits.track_visit(visitor2_id, target5_id, 15.days.ago)
+ unique_visits.track_visit(target5_id, values: visitor3_id, time: 15.days.ago)
+ unique_visits.track_visit(target5_id, values: visitor2_id, time: 15.days.ago)
expect(unique_visits.unique_visits_for(targets: target1_id)).to eq(2)
expect(unique_visits.unique_visits_for(targets: target2_id)).to eq(1)
@@ -61,7 +61,7 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
end
it 'sets the keys in Redis to expire automatically after 12 weeks' do
- unique_visits.track_visit(visitor1_id, target1_id)
+ unique_visits.track_visit(target1_id, values: visitor1_id)
Gitlab::Redis::SharedState.with do |redis|
redis.scan_each(match: "{#{target1_id}}-*").each do |key|
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Analytics::UniqueVisits, :clean_gitlab_redis_shared_state
invalid_target_id = "x_invalid"
expect do
- unique_visits.track_visit(visitor1_id, invalid_target_id)
+ unique_visits.track_visit(invalid_target_id, values: visitor1_id)
end.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
diff --git a/spec/lib/gitlab/application_context_spec.rb b/spec/lib/gitlab/application_context_spec.rb
index 0fbbc67ef6a..c4fe2ebaba9 100644
--- a/spec/lib/gitlab/application_context_spec.rb
+++ b/spec/lib/gitlab/application_context_spec.rb
@@ -27,6 +27,20 @@ RSpec.describe Gitlab::ApplicationContext do
end
end
+ describe '.with_raw_context' do
+ it 'yields the block' do
+ expect { |b| described_class.with_raw_context({}, &b) }.to yield_control
+ end
+
+ it 'passes the attributes unaltered on to labkit' do
+ attrs = { foo: :bar }
+
+ expect(Labkit::Context).to receive(:with_context).with(attrs)
+
+ described_class.with_raw_context(attrs) {}
+ end
+ end
+
describe '.push' do
it 'passes the expected context on to labkit' do
fake_proc = duck_type(:call)
@@ -138,7 +152,7 @@ RSpec.describe Gitlab::ApplicationContext do
it 'does not cause queries' do
context = described_class.new(project: create(:project), namespace: create(:group, :nested), user: create(:user))
- expect { context.use { Labkit::Context.current.to_h } }.not_to exceed_query_limit(0)
+ expect { context.use { Gitlab::ApplicationContext.current } }.not_to exceed_query_limit(0)
end
end
end
diff --git a/spec/lib/gitlab/asciidoc_spec.rb b/spec/lib/gitlab/asciidoc_spec.rb
index 3eb015a5a22..f3799c58fed 100644
--- a/spec/lib/gitlab/asciidoc_spec.rb
+++ b/spec/lib/gitlab/asciidoc_spec.rb
@@ -83,7 +83,7 @@ module Gitlab
},
'fenced code with inline script' => {
input: '```mypre"><script>alert(3)</script>',
- output: "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"&gt;</span></code></pre>\n</div>\n</div>"
+ output: "<div>\n<div>\n<pre class=\"code highlight js-syntax-highlight language-plaintext\" lang=\"plaintext\" v-pre=\"true\"><code><span id=\"LC1\" class=\"line\" lang=\"plaintext\">\"&gt;</span></code></pre>\n</div>\n</div>"
}
}
@@ -353,7 +353,7 @@ module Gitlab
output = <<~HTML
<div>
<div>
- <pre class="code highlight js-syntax-highlight javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
+ <pre class="code highlight js-syntax-highlight language-javascript" lang="javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"><span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
</div>
</div>
HTML
@@ -380,7 +380,7 @@ module Gitlab
<div>
<div>class.cpp</div>
<div>
- <pre class="code highlight js-syntax-highlight cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
+ <pre class="code highlight js-syntax-highlight language-cpp" lang="cpp" v-pre="true"><code><span id="LC1" class="line" lang="cpp"><span class="cp">#include &lt;stdio.h&gt;</span></span>
<span id="LC2" class="line" lang="cpp"></span>
<span id="LC3" class="line" lang="cpp"><span class="k">for</span> <span class="p">(</span><span class="kt">int</span> <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span><span class="p">;</span> <span class="n">i</span> <span class="o">&lt;</span> <span class="mi">5</span><span class="p">;</span> <span class="n">i</span><span class="o">++</span><span class="p">)</span> <span class="p">{</span></span>
<span id="LC4" class="line" lang="cpp"> <span class="n">std</span><span class="o">::</span><span class="n">cout</span><span class="o">&lt;&lt;</span><span class="s">"*"</span><span class="o">&lt;&lt;</span><span class="n">std</span><span class="o">::</span><span class="n">endl</span><span class="p">;</span></span>
diff --git a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
index 67ffdee0c4a..69068883096 100644
--- a/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/auth_hash_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Auth::OAuth::AuthHash do
- let(:provider) { 'ldap'.freeze }
+ let(:provider) { 'ldap' }
let(:auth_hash) do
described_class.new(
OmniAuth::AuthHash.new(
diff --git a/spec/lib/gitlab/auth/otp/strategies/devise_spec.rb b/spec/lib/gitlab/auth/otp/strategies/devise_spec.rb
index 0c88421d456..e51705bdb9c 100644
--- a/spec/lib/gitlab/auth/otp/strategies/devise_spec.rb
+++ b/spec/lib/gitlab/auth/otp/strategies/devise_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Otp::Strategies::Devise do
let_it_be(:user) { create(:user) }
+
let(:otp_code) { 42 }
subject(:validate) { described_class.new(user).validate(otp_code) }
diff --git a/spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb b/spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb
index 88a245b6b10..dc20df98185 100644
--- a/spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb
+++ b/spec/lib/gitlab/auth/otp/strategies/forti_authenticator_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Otp::Strategies::FortiAuthenticator do
let_it_be(:user) { create(:user) }
+
let(:otp_code) { 42 }
let(:host) { 'forti_authenticator.example.com' }
diff --git a/spec/lib/gitlab/auth/otp/strategies/forti_token_cloud_spec.rb b/spec/lib/gitlab/auth/otp/strategies/forti_token_cloud_spec.rb
index 368cf98dfec..57ee53a452e 100644
--- a/spec/lib/gitlab/auth/otp/strategies/forti_token_cloud_spec.rb
+++ b/spec/lib/gitlab/auth/otp/strategies/forti_token_cloud_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth::Otp::Strategies::FortiTokenCloud do
let_it_be(:user) { create(:user) }
+
let(:otp_code) { 42 }
let(:url) { 'https://ftc.example.com:9696/api/v1' }
diff --git a/spec/lib/gitlab/auth_spec.rb b/spec/lib/gitlab/auth_spec.rb
index 4e4bbd1bb60..7a578ad3c90 100644
--- a/spec/lib/gitlab/auth_spec.rb
+++ b/spec/lib/gitlab/auth_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
let_it_be(:project) { create(:project) }
+
let(:gl_auth) { described_class }
describe 'constants' do
@@ -543,6 +544,7 @@ RSpec.describe Gitlab::Auth, :use_clean_rails_memory_store_caching do
context 'and belong to different projects' do
let_it_be(:other_project) { create(:project) }
+
let!(:read_registry) { create(:deploy_token, username: 'deployer', read_repository: false, projects: [project]) }
let!(:read_repository) { create(:deploy_token, username: read_registry.username, read_registry: false, projects: [other_project]) }
let(:auth_success) { Gitlab::Auth::Result.new(read_repository, other_project, :deploy_token, [:download_code]) }
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index 50e799908c6..dbf74bd9333 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2020_04_20_094444 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2021_03_13_045845 do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:users) { table(:users) }
let(:snippets) { table(:snippets) }
diff --git a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
index 7ad93c3124a..c4c0247ad3e 100644
--- a/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/copy_column_using_background_migration_job_spec.rb
@@ -64,5 +64,13 @@ RSpec.describe Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJo
expect(test_table.where('name is NULL and name_convert_to_text is NULL').pluck(:id)).to contain_exactly(15)
expect(test_table.where("name_convert_to_text = 'no name'").count).to eq(0)
end
+
+ it 'tracks timings of queries' do
+ expect(subject.batch_metrics.timings).to be_empty
+
+ subject.perform(10, 20, table_name, 'id', sub_batch_size, 'name', 'name_convert_to_text')
+
+ expect(subject.batch_metrics.timings[:update_all]).not_to be_empty
+ end
end
end
diff --git a/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb b/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb
new file mode 100644
index 00000000000..557dd8ddee6
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/migrate_pages_to_zip_storage_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::MigratePagesToZipStorage do
+ let(:namespace) { create(:group) } # rubocop: disable RSpec/FactoriesInMigrationSpecs
+ let(:migration) { described_class.new }
+
+ describe '#perform' do
+ context 'when there is project to migrate' do
+ let!(:project) { create_project('project') }
+
+ after do
+ FileUtils.rm_rf(project.pages_path)
+ end
+
+ it 'migrates project to zip storage' do
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService,
+ anything,
+ ignore_invalid_entries: false,
+ mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute_for_batch).with(project.id..project.id).and_call_original
+ end
+
+ migration.perform(project.id, project.id)
+
+ expect(project.reload.pages_metadatum.pages_deployment.file.filename).to eq("_migrated.zip")
+ end
+ end
+ end
+
+ def create_project(path)
+ project = create(:project) # rubocop: disable RSpec/FactoriesInMigrationSpecs
+ project.mark_pages_as_deployed
+
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+
+ project
+ end
+end
diff --git a/spec/lib/gitlab/bullet/exclusions_spec.rb b/spec/lib/gitlab/bullet/exclusions_spec.rb
new file mode 100644
index 00000000000..ba42156b0c4
--- /dev/null
+++ b/spec/lib/gitlab/bullet/exclusions_spec.rb
@@ -0,0 +1,155 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Bullet::Exclusions do
+ let(:config_file) do
+ file = Tempfile.new('bullet.yml')
+ File.basename(file)
+ end
+
+ let(:exclude) { [] }
+ let(:config) do
+ {
+ exclusions: {
+ abc: {
+ merge_request: '_mr_',
+ path_with_method: true,
+ exclude: exclude
+ }
+ }
+ }
+ end
+
+ before do
+ File.write(config_file, config.deep_stringify_keys.to_yaml)
+ end
+
+ after do
+ FileUtils.rm_f(config_file)
+ end
+
+ describe '#execute' do
+ subject(:executor) { described_class.new(config_file).execute }
+
+ shared_examples_for 'loads exclusion results' do
+ let(:config) { { exclusions: { abc: { exclude: exclude } } } }
+ let(:results) { [exclude] }
+
+ specify do
+ expect(executor).to match(results)
+ end
+ end
+
+ context 'with preferred method of path and method name' do
+ it_behaves_like 'loads exclusion results' do
+ let(:exclude) { %w[_path_ _method_] }
+ end
+ end
+
+ context 'with file pattern' do
+ it_behaves_like 'loads exclusion results' do
+ let(:exclude) { ['_file_pattern_'] }
+ end
+ end
+
+ context 'with file name and line range' do
+ it_behaves_like 'loads exclusion results' do
+ let(:exclude) { ['file_name.rb', 5..10] }
+ end
+ end
+
+ context 'without exclusions' do
+ it_behaves_like 'loads exclusion results' do
+ let(:exclude) { [] }
+ end
+ end
+
+ context 'without exclusions key in config' do
+ it_behaves_like 'loads exclusion results' do
+ let(:config) { {} }
+ let(:results) { [] }
+ end
+ end
+
+ context 'when config file does not exist' do
+ it 'provides an empty array for exclusions' do
+ expect(described_class.new('_some_bogus_file_').execute).to match([])
+ end
+ end
+ end
+
+ describe '#validate_paths!' do
+ context 'when validating scenarios' do
+ let(:source_file) do
+ file = Tempfile.new('bullet_test_source_file.rb')
+ File.basename(file)
+ end
+
+ subject { described_class.new(config_file).validate_paths! }
+
+ before do
+ FileUtils.touch(source_file)
+ end
+
+ after do
+ FileUtils.rm_f(source_file)
+ end
+
+ context 'when using paths with method name' do
+ let(:exclude) { [source_file, '_method_'] }
+
+ context 'when source file for exclusion exists' do
+ specify do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when source file for exclusion does not exist' do
+ let(:exclude) { %w[_bogus_file_ _method_] }
+
+ specify do
+ expect { subject }.to raise_error(RuntimeError)
+ end
+ end
+ end
+
+ context 'when using path only' do
+ let(:exclude) { [source_file] }
+
+ context 'when source file for exclusion exists' do
+ specify do
+ expect { subject }.not_to raise_error
+ end
+ end
+
+ context 'when source file for exclusion does not exist' do
+ let(:exclude) { '_bogus_file_' }
+
+ specify do
+ expect { subject }.to raise_error(RuntimeError)
+ end
+ end
+ end
+
+ context 'when path_with_method is false for a file pattern' do
+ let(:exclude) { ['_file_pattern_'] }
+ let(:config) do
+ {
+ exclusions: {
+ abc: {
+ merge_request: '_mr_',
+ path_with_method: false,
+ exclude: exclude
+ }
+ }
+ }
+ end
+
+ specify do
+ expect { subject }.not_to raise_error
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/bullet_spec.rb b/spec/lib/gitlab/bullet_spec.rb
new file mode 100644
index 00000000000..1262a0b8bde
--- /dev/null
+++ b/spec/lib/gitlab/bullet_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Bullet do
+ describe '#enabled?' do
+ it 'is enabled' do
+ stub_env('ENABLE_BULLET', true)
+
+ expect(described_class.enabled?).to be(true)
+ end
+
+ it 'is not enabled' do
+ stub_env('ENABLE_BULLET', nil)
+
+ expect(described_class.enabled?).to be(false)
+ end
+
+ it 'is correctly aliased for #extra_logging_enabled?' do
+ expect(described_class.method(:extra_logging_enabled?).original_name).to eq(:enabled?)
+ end
+ end
+
+ describe '#configure_bullet?' do
+ context 'with ENABLE_BULLET true' do
+ before do
+ stub_env('ENABLE_BULLET', true)
+ end
+
+ it 'is configurable' do
+ expect(described_class.configure_bullet?).to be(true)
+ end
+ end
+
+ context 'with ENABLE_BULLET falsey' do
+ before do
+ stub_env('ENABLE_BULLET', nil)
+ end
+
+ it 'is not configurable' do
+ expect(described_class.configure_bullet?).to be(false)
+ end
+
+ it 'is configurable in development' do
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
+
+ expect(described_class.configure_bullet?).to be(true)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/changelog/config_spec.rb b/spec/lib/gitlab/changelog/config_spec.rb
index 51988acf3d1..2809843b832 100644
--- a/spec/lib/gitlab/changelog/config_spec.rb
+++ b/spec/lib/gitlab/changelog/config_spec.rb
@@ -37,7 +37,8 @@ RSpec.describe Gitlab::Changelog::Config do
project,
'date_format' => 'foo',
'template' => 'bar',
- 'categories' => { 'foo' => 'bar' }
+ 'categories' => { 'foo' => 'bar' },
+ 'tag_regex' => 'foo'
)
expect(config.date_format).to eq('foo')
@@ -45,6 +46,7 @@ RSpec.describe Gitlab::Changelog::Config do
.to be_instance_of(Gitlab::Changelog::AST::Expressions)
expect(config.categories).to eq({ 'foo' => 'bar' })
+ expect(config.tag_regex).to eq('foo')
end
it 'raises Error when the categories are not a Hash' do
diff --git a/spec/lib/gitlab/checks/project_created_spec.rb b/spec/lib/gitlab/checks/project_created_spec.rb
index f099f19b061..74e43b04b6b 100644
--- a/spec/lib/gitlab/checks/project_created_spec.rb
+++ b/spec/lib/gitlab/checks/project_created_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Checks::ProjectCreated, :clean_gitlab_redis_shared_state do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, namespace: user.namespace) }
+
let(:protocol) { 'http' }
let(:git_user) { user }
let(:repository) { project.repository }
diff --git a/spec/lib/gitlab/checks/project_moved_spec.rb b/spec/lib/gitlab/checks/project_moved_spec.rb
index c7dad0a91d4..469aea8d093 100644
--- a/spec/lib/gitlab/checks/project_moved_spec.rb
+++ b/spec/lib/gitlab/checks/project_moved_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :repository, :wiki_repo, namespace: user.namespace) }
+
let(:repository) { project.repository }
let(:protocol) { 'http' }
let(:git_user) { user }
@@ -101,6 +102,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
context 'with project snippet' do
let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
+
let(:repository) { snippet.repository }
it_behaves_like 'errors per protocol' do
@@ -111,6 +113,7 @@ RSpec.describe Gitlab::Checks::ProjectMoved, :clean_gitlab_redis_shared_state do
context 'with personal snippet' do
let_it_be(:snippet) { create(:personal_snippet, :repository, author: user) }
+
let(:repository) { snippet.repository }
it 'returns nil' do
diff --git a/spec/lib/gitlab/ci/ansi2json/style_spec.rb b/spec/lib/gitlab/ci/ansi2json/style_spec.rb
index ff70ff69aaa..87085950a9f 100644
--- a/spec/lib/gitlab/ci/ansi2json/style_spec.rb
+++ b/spec/lib/gitlab/ci/ansi2json/style_spec.rb
@@ -160,9 +160,9 @@ RSpec.describe Gitlab::Ci::Ansi2json::Style do
with_them do
it 'change the style' do
style = described_class.new
- style.update(initial_state)
+ style.update(initial_state) # rubocop:disable Rails/SaveBang
- style.update(ansi_commands)
+ style.update(ansi_commands) # rubocop:disable Rails/SaveBang
expect(style.to_s).to eq(result)
end
diff --git a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
index 179578fe0a8..d294eca7f15 100644
--- a/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/bridge_spec.rb
@@ -107,6 +107,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
stage: 'test',
only: { refs: %w[branches tags] },
variables: {},
+ job_variables: {},
+ root_variables_inheritance: true,
scheduling_type: :stage)
end
end
@@ -130,6 +132,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
stage: 'test',
only: { refs: %w[branches tags] },
variables: {},
+ job_variables: {},
+ root_variables_inheritance: true,
scheduling_type: :stage)
end
end
@@ -284,6 +288,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Bridge do
parallel: { matrix: [{ 'PROVIDER' => ['aws'], 'STACK' => %w(monitoring app1) },
{ 'PROVIDER' => ['gcp'], 'STACK' => %w(data) }] },
variables: {},
+ job_variables: {},
+ root_variables_inheritance: true,
scheduling_type: :stage
)
end
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 064990667d5..cec1c97085b 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -13,6 +13,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
describe '#valid?' do
+ context 'with an empty hash as cache' do
+ let(:config) { {} }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
context 'when configuration is valid with a single cache' do
let(:config) { { key: 'key', paths: ["logs/"], untracked: true } }
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index a4167003987..ffcd029172a 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -663,6 +663,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
after_script: %w[cleanup],
only: { refs: %w[branches tags] },
variables: {},
+ job_variables: {},
+ root_variables_inheritance: true,
scheduling_type: :stage)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
index ac6b589ec6b..cb73044b62b 100644
--- a/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/jobs_spec.rb
@@ -100,6 +100,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Jobs do
stage: 'test',
trigger: { project: 'my/project' },
variables: {},
+ job_variables: {},
+ root_variables_inheritance: true,
scheduling_type: :stage
},
regular_job: {
@@ -109,6 +111,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Jobs do
script: ['something'],
stage: 'test',
variables: {},
+ job_variables: {},
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index 04e80450263..016d59e98b9 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -382,7 +382,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
context 'with only job variables' do
it 'does return defined variables' do
expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job' }
+ variables: { 'A' => 'job', 'B' => 'job' },
+ job_variables: { 'A' => 'job', 'B' => 'job' },
+ root_variables_inheritance: true
)
end
end
@@ -394,9 +396,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
).value
end
- it 'does return all variables and overwrite them' do
+ it 'does return job and root variables' do
expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job', 'C' => 'root', 'D' => 'root' }
+ variables: { 'A' => 'job', 'B' => 'job', 'C' => 'root', 'D' => 'root' },
+ job_variables: { 'A' => 'job', 'B' => 'job' },
+ root_variables_inheritance: true
)
end
@@ -408,9 +412,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
}
end
- it 'does return only job variables' do
+ it 'does return job and root variables' do
expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job' }
+ variables: { 'A' => 'job', 'B' => 'job' },
+ job_variables: { 'A' => 'job', 'B' => 'job' },
+ root_variables_inheritance: false
)
end
end
@@ -423,9 +429,11 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
}
end
- it 'does return only job variables' do
+ it 'does return job and root variables' do
expect(entry.value).to include(
- variables: { 'A' => 'job', 'B' => 'job', 'D' => 'root' }
+ variables: { 'A' => 'job', 'B' => 'job', 'D' => 'root' },
+ job_variables: { 'A' => 'job', 'B' => 'job' },
+ root_variables_inheritance: ['D']
)
end
end
@@ -493,7 +501,9 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
name: :rspec,
stage: 'test',
only: { refs: %w[branches tags] },
- variables: {}
+ variables: {},
+ job_variables: {},
+ root_variables_inheritance: true
)
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index 7b38c21788f..041eb748fc9 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -133,6 +133,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ job_variables: {},
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -147,6 +149,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ job_variables: {},
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -163,6 +167,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
cache: [{ key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' }],
only: { refs: %w(branches tags) },
variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
+ job_variables: { 'VAR' => 'job' },
+ root_variables_inheritance: true,
after_script: [],
ignore: false,
scheduling_type: :stage }
@@ -188,6 +194,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ job_variables: {},
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -202,6 +210,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'root', 'VAR2' => 'val 2' },
+ job_variables: {},
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -218,6 +228,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
cache: { key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' },
only: { refs: %w(branches tags) },
variables: { 'VAR' => 'job', 'VAR2' => 'val 2' },
+ job_variables: { 'VAR' => 'job' },
+ root_variables_inheritance: true,
after_script: [],
ignore: false,
scheduling_type: :stage }
@@ -267,6 +279,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'root' },
+ job_variables: {},
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -279,6 +293,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: { key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' },
variables: { 'VAR' => 'job' },
+ job_variables: { 'VAR' => 'job' },
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -311,6 +327,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
variables: { 'VAR' => 'root' },
+ job_variables: {},
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
@@ -323,6 +341,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
stage: 'test',
cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
variables: { 'VAR' => 'job' },
+ job_variables: { 'VAR' => 'job' },
+ root_variables_inheritance: true,
ignore: false,
after_script: ['make clean'],
only: { refs: %w[branches tags] },
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index 99f546ceb37..e5b008a482e 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -324,5 +324,39 @@ RSpec.describe Gitlab::Ci::Config::External::Mapper do
end
end
end
+
+ context 'when local file path has wildcard' do
+ let(:project) { create(:project, :repository) }
+
+ let(:values) do
+ { include: 'myfolder/*.yml' }
+ end
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:search_files_by_wildcard_path).with('myfolder/*.yml', '123456') do
+ ['myfolder/file1.yml', 'myfolder/file2.yml']
+ end
+ end
+ end
+
+ it 'includes the matched local files' do
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Local),
+ an_instance_of(Gitlab::Ci::Config::External::File::Local))
+
+ expect(subject.map(&:location)).to contain_exactly('myfolder/file1.yml', 'myfolder/file2.yml')
+ end
+
+ context 'when the FF ci_wildcard_file_paths is disabled' do
+ before do
+ stub_feature_flags(ci_wildcard_file_paths: false)
+ end
+
+ it 'cannot find any file returns an error message' do
+ expect(subject).to contain_exactly(an_instance_of(Gitlab::Ci::Config::External::File::Local))
+ expect(subject[0].errors).to eq(['Local file `myfolder/*.yml` does not exist!'])
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/external/processor_spec.rb b/spec/lib/gitlab/ci/config/external/processor_spec.rb
index d2d7116bb12..d657c3e943f 100644
--- a/spec/lib/gitlab/ci/config/external/processor_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/processor_spec.rb
@@ -366,5 +366,40 @@ RSpec.describe Gitlab::Ci::Config::External::Processor do
expect(output.keys).to match_array([:image, :my_build, :my_test])
end
end
+
+ context 'when local file path has wildcard' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:values) do
+ { include: 'myfolder/*.yml', image: 'ruby:2.7' }
+ end
+
+ before do
+ allow_next_instance_of(Repository) do |repository|
+ allow(repository).to receive(:search_files_by_wildcard_path).with('myfolder/*.yml', sha) do
+ ['myfolder/file1.yml', 'myfolder/file2.yml']
+ end
+
+ allow(repository).to receive(:blob_data_at).with(sha, 'myfolder/file1.yml') do
+ <<~HEREDOC
+ my_build:
+ script: echo Hello World
+ HEREDOC
+ end
+
+ allow(repository).to receive(:blob_data_at).with(sha, 'myfolder/file2.yml') do
+ <<~HEREDOC
+ my_test:
+ script: echo Hello World
+ HEREDOC
+ end
+ end
+ end
+
+ it 'fetches the matched files' do
+ output = processor.perform
+ expect(output.keys).to match_array([:image, :my_build, :my_test])
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
index fbf86927bd9..e5f0341c5fe 100644
--- a/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
+++ b/spec/lib/gitlab/ci/config/normalizer/matrix_strategy_spec.rb
@@ -1,8 +1,12 @@
# frozen_string_literal: true
require 'fast_spec_helper'
+require 'support/helpers/stubbed_feature'
+require 'support/helpers/stub_feature_flags'
RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
+ include StubFeatureFlags
+
describe '.applies_to?' do
subject { described_class.applies_to?(config) }
@@ -49,6 +53,10 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
variables: {
'PROVIDER' => 'aws',
'STACK' => 'app1'
+ },
+ job_variables: {
+ 'PROVIDER' => 'aws',
+ 'STACK' => 'app1'
}
},
{
@@ -58,6 +66,10 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
variables: {
'PROVIDER' => 'aws',
'STACK' => 'app2'
+ },
+ job_variables: {
+ 'PROVIDER' => 'aws',
+ 'STACK' => 'app2'
}
},
{
@@ -67,6 +79,10 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
variables: {
'PROVIDER' => 'ovh',
'STACK' => 'app'
+ },
+ job_variables: {
+ 'PROVIDER' => 'ovh',
+ 'STACK' => 'app'
}
},
{
@@ -76,6 +92,10 @@ RSpec.describe Gitlab::Ci::Config::Normalizer::MatrixStrategy do
variables: {
'PROVIDER' => 'gcp',
'STACK' => 'app'
+ },
+ job_variables: {
+ 'PROVIDER' => 'gcp',
+ 'STACK' => 'app'
}
}
]
diff --git a/spec/lib/gitlab/ci/lint_spec.rb b/spec/lib/gitlab/ci/lint_spec.rb
index 67324c09d86..aaa3a7a8b9d 100644
--- a/spec/lib/gitlab/ci/lint_spec.rb
+++ b/spec/lib/gitlab/ci/lint_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::Ci::Lint do
it 'sets merged_config' do
root_config = YAML.safe_load(content, [Symbol])
included_config = YAML.safe_load(included_content, [Symbol])
- expected_config = included_config.merge(root_config).except(:include)
+ expected_config = included_config.merge(root_config).except(:include).deep_stringify_keys
expect(subject.merged_yaml).to eq(expected_config.to_yaml)
end
diff --git a/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb b/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb
index c6b8cf2a985..6a08e8f0b7f 100644
--- a/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/codequality/code_climate_spec.rb
@@ -131,7 +131,6 @@ RSpec.describe Gitlab::Ci::Parsers::Codequality::CodeClimate do
expect { parse }.not_to raise_error
expect(codequality_report.degradations_count).to eq(0)
- expect(codequality_report.error_message).to eq("Invalid degradation format: The property '#/' did not contain a required property of 'location'")
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
index 9ca5aeeea58..900dfec38e2 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/command_spec.rb
@@ -321,4 +321,25 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Command do
it { is_expected.to be_falsey }
end
end
+
+ describe '#increment_pipeline_failure_reason_counter' do
+ let(:command) { described_class.new }
+ let(:reason) { :size_limit_exceeded }
+
+ subject { command.increment_pipeline_failure_reason_counter(reason) }
+
+ it 'increments the error metric' do
+ counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
+ expect { subject }.to change { counter.get(reason: reason.to_s) }.by(1)
+ end
+
+ context 'when the reason is nil' do
+ let(:reason) { nil }
+
+ it 'increments the error metric with unknown_failure' do
+ counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
+ expect { subject }.to change { counter.get(reason: 'unknown_failure') }.by(1)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
index 4ae51ac8bf9..e30a78546af 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/evaluate_workflow_rules_spec.rb
@@ -16,8 +16,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
describe '#perform!' do
context 'when pipeline has been skipped by workflow configuration' do
before do
- allow(step).to receive(:workflow_passed?)
- .and_return(false)
+ allow(step).to receive(:workflow_rules_result)
+ .and_return(
+ double(pass?: false, variables: {})
+ )
step.perform!
end
@@ -33,12 +35,18 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
it 'attaches an error to the pipeline' do
expect(pipeline.errors[:base]).to include('Pipeline filtered out by workflow rules.')
end
+
+ it 'saves workflow_rules_result' do
+ expect(command.workflow_rules_result.variables).to eq({})
+ end
end
context 'when pipeline has not been skipped by workflow configuration' do
before do
- allow(step).to receive(:workflow_passed?)
- .and_return(true)
+ allow(step).to receive(:workflow_rules_result)
+ .and_return(
+ double(pass?: true, variables: { 'VAR1' => 'val2' })
+ )
step.perform!
end
@@ -55,6 +63,10 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules do
it 'attaches no errors' do
expect(pipeline.errors).to be_empty
end
+
+ it 'saves workflow_rules_result' do
+ expect(command.workflow_rules_result.variables).to eq({ 'VAR1' => 'val2' })
+ end
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
new file mode 100644
index 00000000000..bcea6462790
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/helpers_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::Helpers do
+ let(:helper_class) do
+ Class.new do
+ include Gitlab::Ci::Pipeline::Chain::Helpers
+
+ attr_accessor :pipeline, :command
+
+ def initialize(pipeline, command)
+ self.pipeline = pipeline
+ self.command = command
+ end
+ end
+ end
+
+ subject(:helper) { helper_class.new(pipeline, command) }
+
+ let(:pipeline) { build(:ci_empty_pipeline) }
+ let(:command) { double(save_incompleted: true) }
+ let(:message) { 'message' }
+
+ describe '.error' do
+ shared_examples 'error function' do
+ specify do
+ expect(pipeline).to receive(:drop!).with(drop_reason).and_call_original
+ expect(pipeline).to receive(:add_error_message).with(message).and_call_original
+ expect(pipeline).to receive(:ensure_project_iid!).twice.and_call_original
+
+ subject.error(message, config_error: config_error, drop_reason: drop_reason)
+
+ expect(pipeline.yaml_errors).to eq(yaml_error)
+ expect(pipeline.errors[:base]).to include(message)
+ end
+ end
+
+ context 'when given a drop reason' do
+ context 'when config error is true' do
+ context 'sets the yaml error and overrides the drop reason' do
+ let(:drop_reason) { :config_error }
+ let(:config_error) { true }
+ let(:yaml_error) { message }
+
+ it_behaves_like "error function"
+ end
+ end
+
+ context 'when config error is false' do
+ context 'does not set the yaml error or override the drop reason' do
+ let(:drop_reason) { :size_limit_exceeded }
+ let(:config_error) { false }
+ let(:yaml_error) { nil }
+
+ it_behaves_like "error function"
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
index 78363be7f36..23cdec61bb3 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/limit/deployments_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::Deployments do
let(:save_incompleted) { false }
let(:command) do
- double(:command,
+ Gitlab::Ci::Pipeline::Chain::Command.new(
project: project,
pipeline_seed: pipeline_seed,
save_incompleted: save_incompleted
@@ -49,6 +49,11 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::Deployments do
expect(pipeline.deployments_limit_exceeded?).to be true
end
+
+ it 'calls increment_pipeline_failure_reason_counter' do
+ counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
+ expect { perform }.to change { counter.get(reason: 'deployments_limit_exceeded') }.by(1)
+ end
end
context 'when not saving incomplete pipelines' do
@@ -71,6 +76,12 @@ RSpec.describe ::Gitlab::Ci::Pipeline::Chain::Limit::Deployments do
expect(pipeline.errors.messages).to include(base: ['Pipeline has too many deployments! Requested 2, but the limit is 1.'])
end
+
+ it 'increments the error metric' do
+ expect(command).to receive(:increment_pipeline_failure_reason_counter).with(:deployments_limit_exceeded)
+
+ perform
+ end
end
it 'logs the error' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/pipeline/process_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/pipeline/process_spec.rb
new file mode 100644
index 00000000000..3885cea2d1b
--- /dev/null
+++ b/spec/lib/gitlab/ci/pipeline/chain/pipeline/process_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Ci::Pipeline::Chain::Pipeline::Process do
+ let_it_be(:project) { build(:project) }
+ let_it_be(:user) { build(:user) }
+ let_it_be(:pipeline) { build(:ci_pipeline, project: project, id: 42) }
+
+ let_it_be(:command) do
+ Gitlab::Ci::Pipeline::Chain::Command.new(project: project, current_user: user)
+ end
+
+ let(:step) { described_class.new(pipeline, command) }
+
+ describe '#perform!' do
+ subject(:perform) { step.perform! }
+
+ it 'schedules a job to process the pipeline' do
+ expect(Ci::InitialPipelineProcessWorker)
+ .to receive(:perform_async)
+ .with(42)
+
+ perform
+ end
+ end
+
+ describe '#break?' do
+ it { expect(step.break?).to be_falsey }
+ end
+end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 5506b079d0f..62de4d2e96d 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -22,6 +22,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
[
Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules.new(pipeline, command),
Gitlab::Ci::Pipeline::Chain::SeedBlock.new(pipeline, command),
Gitlab::Ci::Pipeline::Chain::Seed.new(pipeline, command)
]
@@ -95,6 +96,11 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
it 'wastes pipeline iid' do
expect(InternalId.ci_pipelines.where(project_id: project.id).last.last_value).to be > 0
end
+
+ it 'increments the error metric' do
+ counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
+ expect { run_chain }.to change { counter.get(reason: 'unknown_failure') }.by(1)
+ end
end
describe 'pipeline protect' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
index 80013cab6ee..264076859cb 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/seed_spec.rb
@@ -3,24 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
- let(:project) { create(:project, :repository) }
- let(:user) { create(:user, developer_projects: [project]) }
- let(:seeds_block) { }
-
- let(:command) do
- Gitlab::Ci::Pipeline::Chain::Command.new(
- project: project,
- current_user: user,
- origin_ref: 'master',
- seeds_block: seeds_block)
- end
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user, developer_projects: [project]) }
+ let(:seeds_block) { }
+ let(:command) { initialize_command }
let(:pipeline) { build(:ci_pipeline, project: project) }
describe '#perform!' do
before do
stub_ci_pipeline_yaml_file(YAML.dump(config))
- run_chain
end
let(:config) do
@@ -28,23 +20,25 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
subject(:run_chain) do
- [
- Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
- Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command)
- ].map(&:perform!)
-
- described_class.new(pipeline, command).perform!
+ run_previous_chain(pipeline, command)
+ perform_seed(pipeline, command)
end
it 'allocates next IID' do
+ run_chain
+
expect(pipeline.iid).to be_present
end
it 'ensures ci_ref' do
+ run_chain
+
expect(pipeline.ci_ref).to be_present
end
it 'sets the seeds in the command object' do
+ run_chain
+
expect(command.pipeline_seed).to be_a(Gitlab::Ci::Pipeline::Seed::Pipeline)
expect(command.pipeline_seed.size).to eq 1
end
@@ -59,6 +53,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'correctly fabricates stages and builds' do
+ run_chain
+
seed = command.pipeline_seed
expect(seed.stages.size).to eq 2
@@ -84,6 +80,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns pipeline seed with jobs only assigned to master' do
+ run_chain
+
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -103,6 +101,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns pipeline seed with jobs only assigned to schedules' do
+ run_chain
+
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -130,6 +130,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
let(:pipeline) { build(:ci_pipeline, project: project) }
it 'returns seeds for kubernetes dependent job' do
+ run_chain
+
seed = command.pipeline_seed
expect(seed.size).to eq 2
@@ -141,6 +143,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
context 'when kubernetes is not active' do
it 'does not return seeds for kubernetes dependent job' do
+ run_chain
+
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -158,6 +162,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'returns stage seeds only when variables expression is truthy' do
+ run_chain
+
seed = command.pipeline_seed
expect(seed.size).to eq 1
@@ -171,8 +177,125 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Seed do
end
it 'does not execute the block' do
+ run_chain
+
expect(pipeline.variables.size).to eq(0)
end
end
+
+ describe '#root_variables' do
+ let(:config) do
+ {
+ variables: { VAR1: 'var 1' },
+ workflow: {
+ rules: [{ if: '$CI_PIPELINE_SOURCE',
+ variables: { VAR1: 'overridden var 1' } },
+ { when: 'always' }]
+ },
+ rspec: { script: 'rake' }
+ }
+ end
+
+ let(:rspec_variables) { command.pipeline_seed.stages[0].statuses[0].variables.to_hash }
+
+ it 'sends root variable with overridden by rules' do
+ run_chain
+
+ expect(rspec_variables['VAR1']).to eq('overridden var 1')
+ end
+
+ context 'when the FF ci_workflow_rules_variables is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_rules_variables: false)
+ end
+
+ it 'sends root variable' do
+ run_chain
+
+ expect(rspec_variables['VAR1']).to eq('var 1')
+ end
+ end
+ end
+
+ context 'N+1 queries' do
+ it 'avoids N+1 queries when calculating variables of jobs' do
+ pipeline1, command1 = prepare_pipeline1
+ pipeline2, command2 = prepare_pipeline2
+
+ control = ActiveRecord::QueryRecorder.new do
+ perform_seed(pipeline1, command1)
+ end
+
+ expect { perform_seed(pipeline2, command2) }.not_to exceed_query_limit(
+ control.count + expected_extra_queries
+ )
+ end
+
+ private
+
+ def prepare_pipeline1
+ config1 = { build: { stage: 'build', script: 'build' } }
+ stub_ci_pipeline_yaml_file(YAML.dump(config1))
+ pipeline1 = build(:ci_pipeline, project: project)
+ command1 = initialize_command
+
+ run_previous_chain(pipeline1, command1)
+
+ [pipeline1, command1]
+ end
+
+ def prepare_pipeline2
+ config2 = { build1: { stage: 'build', script: 'build1' },
+ build2: { stage: 'build', script: 'build2' },
+ test: { stage: 'build', script: 'test' } }
+ stub_ci_pipeline_yaml_file(YAML.dump(config2))
+ pipeline2 = build(:ci_pipeline, project: project)
+ command2 = initialize_command
+
+ run_previous_chain(pipeline2, command2)
+
+ [pipeline2, command2]
+ end
+
+ def expected_extra_queries
+ extra_jobs = 2
+ non_handled_sql_queries = 3
+
+ # 1. Ci::Build Load () SELECT "ci_builds".* FROM "ci_builds"
+ # WHERE "ci_builds"."type" = 'Ci::Build'
+ # AND "ci_builds"."commit_id" IS NULL
+ # AND ("ci_builds"."retried" = FALSE OR "ci_builds"."retried" IS NULL)
+ # AND (stage_idx < 1)
+ # 2. Ci::InstanceVariable Load => `Ci::InstanceVariable#cached_data` => already cached with `fetch_memory_cache`
+ # 3. Ci::Variable Load => `Project#ci_variables_for` => already cached with `Gitlab::SafeRequestStore`
+
+ extra_jobs * non_handled_sql_queries
+ end
+ end
+
+ private
+
+ def run_previous_chain(pipeline, command)
+ [
+ Gitlab::Ci::Pipeline::Chain::Config::Content.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::Config::Process.new(pipeline, command),
+ Gitlab::Ci::Pipeline::Chain::EvaluateWorkflowRules.new(pipeline, command)
+ ].map(&:perform!)
+ end
+
+ def perform_seed(pipeline, command)
+ described_class.new(pipeline, command).perform!
+ end
+ end
+
+ private
+
+ def initialize_command
+ Gitlab::Ci::Pipeline::Chain::Command.new(
+ project: project,
+ current_user: user,
+ origin_ref: 'master',
+ seeds_block: seeds_block
+ )
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
index e55281f9705..caf3a053c4e 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/validate/external_spec.rb
@@ -3,8 +3,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
let(:pipeline) { build(:ci_empty_pipeline, user: user, project: project) }
let!(:step) { described_class.new(pipeline, command) }
@@ -42,6 +42,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
end
let(:save_incompleted) { true }
+ let(:dot_com) { true }
let(:command) do
Gitlab::Ci::Pipeline::Chain::Command.new(
project: project, current_user: user, yaml_processor_result: yaml_processor_result, save_incompleted: save_incompleted
@@ -51,11 +52,79 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
describe '#perform!' do
subject(:perform!) { step.perform! }
- context 'when validation returns true' do
+ let(:validation_service_url) { 'https://validation-service.external/' }
+
+ before do
+ stub_env('EXTERNAL_VALIDATION_SERVICE_URL', validation_service_url)
+ allow(Gitlab).to receive(:com?).and_return(dot_com)
+ allow(Labkit::Correlation::CorrelationId).to receive(:current_id).and_return('correlation-id')
+ end
+
+ context 'with configuration values in ApplicationSetting' do
+ let(:alternate_validation_service_url) { 'https://alternate-validation-service.external/' }
+ let(:validation_service_token) { 'SECURE_TOKEN' }
+ let(:shorter_timeout) { described_class::DEFAULT_VALIDATION_REQUEST_TIMEOUT - 1 }
+
before do
- allow(step).to receive(:validate_external).and_return(true)
+ stub_env('EXTERNAL_VALIDATION_SERVICE_TOKEN', 'TOKEN_IN_ENV')
+ allow(Gitlab::CurrentSettings.current_application_settings).to receive(:external_pipeline_validation_service_timeout).and_return(shorter_timeout)
+ allow(Gitlab::CurrentSettings.current_application_settings).to receive(:external_pipeline_validation_service_token).and_return(validation_service_token)
+ allow(Gitlab::CurrentSettings.current_application_settings).to receive(:external_pipeline_validation_service_url).and_return(alternate_validation_service_url)
+ end
+
+ it 'uses those values rather than env vars or defaults' do
+ expect(::Gitlab::HTTP).to receive(:post) do |url, params|
+ expect(url).to eq(alternate_validation_service_url)
+ expect(params[:timeout]).to eq(shorter_timeout)
+ expect(params[:headers]).to include('X-Gitlab-Token' => validation_service_token)
+ expect(params[:timeout]).to eq(shorter_timeout)
+ end
+
+ perform!
+ end
+ end
+
+ it 'respects the defined payload schema' do
+ expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
+ expect(params[:body]).to match_schema('/external_validation')
+ expect(params[:timeout]).to eq(described_class::DEFAULT_VALIDATION_REQUEST_TIMEOUT)
+ expect(params[:headers]).to eq({ 'X-Gitlab-Correlation-id' => 'correlation-id' })
+ end
+
+ perform!
+ end
+
+ context 'with EXTERNAL_VALIDATION_SERVICE_TIMEOUT defined' do
+ before do
+ stub_env('EXTERNAL_VALIDATION_SERVICE_TIMEOUT', validation_service_timeout)
+ end
+
+ context 'with valid value' do
+ let(:validation_service_timeout) { '1' }
+
+ it 'uses defined timeout' do
+ expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
+ expect(params[:timeout]).to eq(1)
+ end
+
+ perform!
+ end
+ end
+
+ context 'with invalid value' do
+ let(:validation_service_timeout) { '??' }
+
+ it 'uses default timeout' do
+ expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
+ expect(params[:timeout]).to eq(described_class::DEFAULT_VALIDATION_REQUEST_TIMEOUT)
+ end
+
+ perform!
+ end
end
+ end
+ shared_examples 'successful external authorization' do
it 'does not drop the pipeline' do
perform!
@@ -76,9 +145,117 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
end
end
- context 'when validation return false' do
+ context 'when EXTERNAL_VALIDATION_SERVICE_TOKEN is set' do
+ before do
+ stub_env('EXTERNAL_VALIDATION_SERVICE_TOKEN', '123')
+ end
+
+ it 'passes token in X-Gitlab-Token header' do
+ expect(::Gitlab::HTTP).to receive(:post) do |_url, params|
+ expect(params[:headers]).to include({ 'X-Gitlab-Token' => '123' })
+ end
+
+ perform!
+ end
+ end
+
+ context 'when validation returns 200 OK' do
+ before do
+ stub_request(:post, validation_service_url).to_return(status: 200, body: "{}")
+ end
+
+ it_behaves_like 'successful external authorization'
+ end
+
+ context 'when validation returns 404 Not Found' do
before do
- allow(step).to receive(:validate_external).and_return(false)
+ stub_request(:post, validation_service_url).to_return(status: 404, body: "{}")
+ end
+
+ it_behaves_like 'successful external authorization'
+ end
+
+ context 'when validation returns 500 Internal Server Error' do
+ before do
+ stub_request(:post, validation_service_url).to_return(status: 500, body: "{}")
+ end
+
+ it_behaves_like 'successful external authorization'
+ end
+
+ context 'when validation raises exceptions' do
+ before do
+ stub_request(:post, validation_service_url).to_raise(Net::OpenTimeout)
+ end
+
+ it_behaves_like 'successful external authorization'
+
+ it 'logs exceptions' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception)
+ .with(instance_of(Net::OpenTimeout), { project_id: project.id })
+
+ perform!
+ end
+ end
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(ci_external_validation_service: false)
+ stub_request(:post, validation_service_url)
+ end
+
+ it 'does not drop the pipeline' do
+ perform!
+
+ expect(pipeline.status).not_to eq('failed')
+ expect(pipeline.errors).to be_empty
+ end
+
+ it 'does not break the chain' do
+ perform!
+
+ expect(step.break?).to be false
+ end
+
+ it 'does not make requests' do
+ perform!
+
+ expect(WebMock).not_to have_requested(:post, validation_service_url)
+ end
+ end
+
+ context 'when not on .com' do
+ let(:dot_com) { false }
+
+ before do
+ stub_feature_flags(ci_external_validation_service: false)
+ stub_request(:post, validation_service_url).to_return(status: 404, body: "{}")
+ end
+
+ it 'drops the pipeline' do
+ perform!
+
+ expect(pipeline.status).to eq('failed')
+ expect(pipeline).to be_persisted
+ expect(pipeline.errors.to_a).to include('External validation failed')
+ end
+
+ it 'breaks the chain' do
+ perform!
+
+ expect(step.break?).to be true
+ end
+
+ it 'logs the authorization' do
+ expect(Gitlab::AppLogger).to receive(:info).with(message: 'Pipeline not authorized', project_id: project.id, user_id: user.id)
+
+ perform!
+ end
+ end
+
+ context 'when validation returns 406 Not Acceptable' do
+ before do
+ stub_request(:post, validation_service_url).to_return(status: 406, body: "{}")
end
it 'drops the pipeline' do
@@ -126,16 +303,4 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Validate::External do
end
end
end
-
- describe '#validation_service_payload' do
- subject(:validation_service_payload) { step.send(:validation_service_payload, pipeline, command.yaml_processor_result.stages_attributes) }
-
- it 'respects the defined schema' do
- expect(validation_service_payload).to match_schema('/external_validation')
- end
-
- it 'does not fire sql queries' do
- expect { validation_service_payload }.not_to exceed_query_limit(1)
- end
- end
end
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
index 7ec6949f852..f97935feb86 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build_spec.rb
@@ -6,10 +6,12 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:head_sha) { project.repository.head_commit.id }
let(:pipeline) { build(:ci_empty_pipeline, project: project, sha: head_sha) }
+ let(:root_variables) { [] }
+ let(:seed_context) { double(pipeline: pipeline, root_variables: root_variables) }
let(:attributes) { { name: 'rspec', ref: 'master', scheduling_type: :stage } }
let(:previous_stages) { [] }
- let(:seed_build) { described_class.new(pipeline, attributes, previous_stages) }
+ let(:seed_build) { described_class.new(seed_context, attributes, previous_stages) }
describe '#attributes' do
subject { seed_build.attributes }
@@ -75,8 +77,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
let(:attributes) do
{ name: 'rspec',
ref: 'master',
- yaml_variables: [{ key: 'VAR1', value: 'var 1', public: true },
- { key: 'VAR2', value: 'var 2', public: true }],
+ job_variables: [{ key: 'VAR1', value: 'var 1', public: true },
+ { key: 'VAR2', value: 'var 2', public: true }],
rules: [{ if: '$VAR == null', variables: { VAR1: 'new var 1', VAR3: 'var 3' } }] }
end
@@ -301,6 +303,133 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it { is_expected.to match a_hash_including(options: { allow_failure_criteria: nil }) }
end
end
+
+ context 'with workflow:rules:[variables:]' do
+ let(:attributes) do
+ { name: 'rspec',
+ ref: 'master',
+ yaml_variables: [{ key: 'VAR2', value: 'var 2', public: true },
+ { key: 'VAR3', value: 'var 3', public: true }],
+ job_variables: [{ key: 'VAR2', value: 'var 2', public: true },
+ { key: 'VAR3', value: 'var 3', public: true }],
+ root_variables_inheritance: root_variables_inheritance }
+ end
+
+ context 'when the pipeline has variables' do
+ let(:root_variables) do
+ [{ key: 'VAR1', value: 'var overridden pipeline 1', public: true },
+ { key: 'VAR2', value: 'var pipeline 2', public: true },
+ { key: 'VAR3', value: 'var pipeline 3', public: true },
+ { key: 'VAR4', value: 'new var pipeline 4', public: true }]
+ end
+
+ context 'when root_variables_inheritance is true' do
+ let(:root_variables_inheritance) { true }
+
+ it 'returns calculated yaml variables' do
+ expect(subject[:yaml_variables]).to match_array(
+ [{ key: 'VAR1', value: 'var overridden pipeline 1', public: true },
+ { key: 'VAR2', value: 'var 2', public: true },
+ { key: 'VAR3', value: 'var 3', public: true },
+ { key: 'VAR4', value: 'new var pipeline 4', public: true }]
+ )
+ end
+
+ context 'when FF ci_workflow_rules_variables is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_rules_variables: false)
+ end
+
+ it 'returns existing yaml variables' do
+ expect(subject[:yaml_variables]).to match_array(
+ [{ key: 'VAR2', value: 'var 2', public: true },
+ { key: 'VAR3', value: 'var 3', public: true }]
+ )
+ end
+ end
+ end
+
+ context 'when root_variables_inheritance is false' do
+ let(:root_variables_inheritance) { false }
+
+ it 'returns job variables' do
+ expect(subject[:yaml_variables]).to match_array(
+ [{ key: 'VAR2', value: 'var 2', public: true },
+ { key: 'VAR3', value: 'var 3', public: true }]
+ )
+ end
+ end
+
+ context 'when root_variables_inheritance is an array' do
+ let(:root_variables_inheritance) { %w(VAR1 VAR2 VAR3) }
+
+ it 'returns calculated yaml variables' do
+ expect(subject[:yaml_variables]).to match_array(
+ [{ key: 'VAR1', value: 'var overridden pipeline 1', public: true },
+ { key: 'VAR2', value: 'var 2', public: true },
+ { key: 'VAR3', value: 'var 3', public: true }]
+ )
+ end
+ end
+ end
+
+ context 'when the pipeline has not a variable' do
+ let(:root_variables_inheritance) { true }
+
+ it 'returns seed yaml variables' do
+ expect(subject[:yaml_variables]).to match_array(
+ [{ key: 'VAR2', value: 'var 2', public: true },
+ { key: 'VAR3', value: 'var 3', public: true }])
+ end
+ end
+ end
+
+ context 'when the job rule depends on variables' do
+ let(:attributes) do
+ { name: 'rspec',
+ ref: 'master',
+ yaml_variables: [{ key: 'VAR1', value: 'var 1', public: true }],
+ job_variables: [{ key: 'VAR1', value: 'var 1', public: true }],
+ root_variables_inheritance: root_variables_inheritance,
+ rules: rules }
+ end
+
+ let(:root_variables_inheritance) { true }
+
+ context 'when the rules use job variables' do
+ let(:rules) do
+ [{ if: '$VAR1 == "var 1"', variables: { VAR1: 'overridden var 1', VAR2: 'new var 2' } }]
+ end
+
+ it 'recalculates the variables' do
+ expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1', public: true },
+ { key: 'VAR2', value: 'new var 2', public: true })
+ end
+ end
+
+ context 'when the rules use root variables' do
+ let(:root_variables) do
+ [{ key: 'VAR2', value: 'var pipeline 2', public: true }]
+ end
+
+ let(:rules) do
+ [{ if: '$VAR2 == "var pipeline 2"', variables: { VAR1: 'overridden var 1', VAR2: 'overridden var 2' } }]
+ end
+
+ it 'recalculates the variables' do
+ expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'overridden var 1', public: true },
+ { key: 'VAR2', value: 'overridden var 2', public: true })
+ end
+
+ context 'when the root_variables_inheritance is false' do
+ let(:root_variables_inheritance) { false }
+
+ it 'does not recalculate the variables' do
+ expect(subject[:yaml_variables]).to contain_exactly({ key: 'VAR1', value: 'var 1', public: true })
+ end
+ end
+ end
+ end
end
describe '#bridge?' do
@@ -377,7 +506,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
it 'does not have environment' do
expect(subject).not_to be_has_environment
expect(subject.environment).to be_nil
- expect(subject.metadata).to be_nil
+ expect(subject.metadata&.expanded_environment_name).to be_nil
expect(Environment.exists?(name: expected_environment_name)).to eq(false)
end
end
@@ -1080,7 +1209,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build do
end
let(:stage_seed) do
- Gitlab::Ci::Pipeline::Seed::Stage.new(pipeline, stage_attributes, [])
+ Gitlab::Ci::Pipeline::Seed::Stage.new(seed_context, stage_attributes, [])
end
let(:previous_stages) { [stage_seed] }
diff --git a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
index 860b07647bd..21be8660def 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb
@@ -6,6 +6,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let(:seed_context) { double(pipeline: pipeline, root_variables: []) }
+
let(:stages_attributes) do
[
{
@@ -29,7 +31,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Pipeline do
end
subject(:seed) do
- described_class.new(pipeline, stages_attributes)
+ described_class.new(seed_context, stages_attributes)
end
describe '#stages' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
index 4b9db9fa6c6..5b04d2abd88 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Stage do
let(:project) { create(:project, :repository) }
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:previous_stages) { [] }
+ let(:seed_context) { double(pipeline: pipeline, root_variables: []) }
let(:attributes) do
{ name: 'test',
@@ -16,7 +17,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Stage do
end
subject do
- described_class.new(pipeline, attributes, previous_stages)
+ described_class.new(seed_context, attributes, previous_stages)
end
describe '#size' do
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
index b322e55cb5a..8378d096fcf 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_comparer_spec.rb
@@ -6,15 +6,17 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
let(:comparer) { described_class.new(base_report, head_report) }
let(:base_report) { Gitlab::Ci::Reports::CodequalityReports.new }
let(:head_report) { Gitlab::Ci::Reports::CodequalityReports.new }
- let(:degradation_1) { build(:codequality_degradation_1) }
- let(:degradation_2) { build(:codequality_degradation_2) }
+ let(:major_degradation) { build(:codequality_degradation, :major) }
+ let(:minor_degradation) { build(:codequality_degradation, :major) }
+ let(:critical_degradation) { build(:codequality_degradation, :critical) }
+ let(:blocker_degradation) { build(:codequality_degradation, :blocker) }
describe '#status' do
subject(:report_status) { comparer.status }
context 'when head report has an error' do
before do
- head_report.add_degradation(degradation_1)
+ head_report.add_degradation(major_degradation)
end
it 'returns status failed' do
@@ -50,7 +52,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when head report has an error' do
before do
- head_report.add_degradation(degradation_1)
+ head_report.add_degradation(major_degradation)
end
it 'returns the number of new errors' do
@@ -70,8 +72,8 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has an error and head has a different error' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
+ base_report.add_degradation(major_degradation)
+ head_report.add_degradation(minor_degradation)
end
it 'counts the base report error as resolved' do
@@ -81,7 +83,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has errors head has no errors' do
before do
- base_report.add_degradation(degradation_1)
+ base_report.add_degradation(major_degradation)
end
it 'counts the base report errors as resolved' do
@@ -91,8 +93,8 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has errors and head has the same error' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_1)
+ base_report.add_degradation(major_degradation)
+ head_report.add_degradation(major_degradation)
end
it 'returns zero' do
@@ -102,7 +104,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report does not have errors and head has errors' do
before do
- head_report.add_degradation(degradation_1)
+ head_report.add_degradation(major_degradation)
end
it 'returns zero' do
@@ -124,7 +126,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has an error' do
before do
- base_report.add_degradation(degradation_1)
+ base_report.add_degradation(major_degradation)
end
it 'returns zero' do
@@ -134,7 +136,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when head report has an error' do
before do
- head_report.add_degradation(degradation_1)
+ head_report.add_degradation(major_degradation)
end
it 'includes the head report error in the count' do
@@ -144,8 +146,8 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has errors and head report has errors' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
+ base_report.add_degradation(major_degradation)
+ head_report.add_degradation(minor_degradation)
end
it 'includes errors in the count' do
@@ -155,9 +157,9 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has errors and head report has the same error' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
+ base_report.add_degradation(major_degradation)
+ head_report.add_degradation(major_degradation)
+ head_report.add_degradation(minor_degradation)
end
it 'includes errors in the count' do
@@ -179,20 +181,28 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has errors and head has the same error' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
- end
-
- it 'includes the base report errors' do
- expect(existing_errors).to contain_exactly(degradation_1)
+ base_report.add_degradation(major_degradation)
+ base_report.add_degradation(critical_degradation)
+ base_report.add_degradation(blocker_degradation)
+ head_report.add_degradation(critical_degradation)
+ head_report.add_degradation(blocker_degradation)
+ head_report.add_degradation(major_degradation)
+ head_report.add_degradation(minor_degradation)
+ end
+
+ it 'includes the base report errors sorted by severity' do
+ expect(existing_errors).to eq([
+ blocker_degradation,
+ critical_degradation,
+ major_degradation
+ ])
end
end
context 'when base report has errors and head has a different error' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
+ base_report.add_degradation(major_degradation)
+ head_report.add_degradation(minor_degradation)
end
it 'returns an empty array' do
@@ -202,7 +212,7 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report does not have errors and head has errors' do
before do
- head_report.add_degradation(degradation_1)
+ head_report.add_degradation(major_degradation)
end
it 'returns an empty array' do
@@ -224,19 +234,25 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has errors and head has more errors' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
+ base_report.add_degradation(major_degradation)
+ head_report.add_degradation(critical_degradation)
+ head_report.add_degradation(minor_degradation)
+ head_report.add_degradation(blocker_degradation)
+ head_report.add_degradation(major_degradation)
end
- it 'includes errors not found in the base report' do
- expect(new_errors).to eq([degradation_2])
+ it 'includes errors not found in the base report sorted by severity' do
+ expect(new_errors).to eq([
+ blocker_degradation,
+ critical_degradation,
+ minor_degradation
+ ])
end
end
context 'when base report has an error and head has no errors' do
before do
- base_report.add_degradation(degradation_1)
+ base_report.add_degradation(major_degradation)
end
it 'returns an empty array' do
@@ -246,11 +262,11 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report does not have errors and head has errors' do
before do
- head_report.add_degradation(degradation_1)
+ head_report.add_degradation(major_degradation)
end
it 'returns the head report error' do
- expect(new_errors).to eq([degradation_1])
+ expect(new_errors).to eq([major_degradation])
end
end
@@ -268,9 +284,9 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report errors are still found in the head report' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
+ base_report.add_degradation(major_degradation)
+ head_report.add_degradation(major_degradation)
+ head_report.add_degradation(minor_degradation)
end
it 'returns an empty array' do
@@ -280,18 +296,25 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReportsComparer do
context 'when base report has errors and head has a different error' do
before do
- base_report.add_degradation(degradation_1)
- head_report.add_degradation(degradation_2)
+ base_report.add_degradation(major_degradation)
+ base_report.add_degradation(minor_degradation)
+ base_report.add_degradation(critical_degradation)
+ base_report.add_degradation(blocker_degradation)
+ head_report.add_degradation(major_degradation)
end
- it 'returns the base report error' do
- expect(resolved_errors).to eq([degradation_1])
+ it 'returns the base report errors not found in the head report, sorted by severity' do
+ expect(resolved_errors).to eq([
+ blocker_degradation,
+ critical_degradation,
+ minor_degradation
+ ])
end
end
context 'when base report does not have errors and head has errors' do
before do
- head_report.add_degradation(degradation_1)
+ head_report.add_degradation(major_degradation)
end
it 'returns an empty array' do
diff --git a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
index ae9b2f2c62b..3b0eaffc54e 100644
--- a/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
+++ b/spec/lib/gitlab/ci/reports/codequality_reports_spec.rb
@@ -34,8 +34,6 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReports do
it 'sets location as an error' do
codequality_report.add_degradation(invalid_degradation)
-
- expect(codequality_report.error_message).to eq("Invalid degradation format: The property '#/' did not contain a required property of 'location'")
end
end
end
@@ -79,4 +77,36 @@ RSpec.describe Gitlab::Ci::Reports::CodequalityReports do
end
end
end
+
+ describe '#sort_degradations!' do
+ let(:major) { build(:codequality_degradation, :major) }
+ let(:minor) { build(:codequality_degradation, :minor) }
+ let(:blocker) { build(:codequality_degradation, :blocker) }
+ let(:info) { build(:codequality_degradation, :info) }
+ let(:major_2) { build(:codequality_degradation, :major) }
+ let(:critical) { build(:codequality_degradation, :critical) }
+ let(:codequality_report) { described_class.new }
+
+ before do
+ codequality_report.add_degradation(major)
+ codequality_report.add_degradation(minor)
+ codequality_report.add_degradation(blocker)
+ codequality_report.add_degradation(major_2)
+ codequality_report.add_degradation(info)
+ codequality_report.add_degradation(critical)
+
+ codequality_report.sort_degradations!
+ end
+
+ it 'sorts degradations based on severity' do
+ expect(codequality_report.degradations.values).to eq([
+ blocker,
+ critical,
+ major,
+ major_2,
+ minor,
+ info
+ ])
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
index 831bc5e9f37..9ee55177ca0 100644
--- a/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
+++ b/spec/lib/gitlab/ci/reports/test_failure_history_spec.rb
@@ -13,9 +13,9 @@ RSpec.describe Gitlab::Ci::Reports::TestFailureHistory, :aggregate_failures do
subject(:load_history) { described_class.new([failed_rspec, failed_java], project).load! }
before do
- allow(Ci::TestCaseFailure)
+ allow(Ci::UnitTestFailure)
.to receive(:recent_failures_count)
- .with(project: project, test_case_keys: [failed_rspec.key, failed_java.key])
+ .with(project: project, unit_test_keys: [failed_rspec.key, failed_java.key])
.and_return(
failed_rspec.key => 2,
failed_java.key => 1
diff --git a/spec/lib/gitlab/ci/runner_instructions_spec.rb b/spec/lib/gitlab/ci/runner_instructions_spec.rb
index d1020026fe6..f872c631a50 100644
--- a/spec/lib/gitlab/ci/runner_instructions_spec.rb
+++ b/spec/lib/gitlab/ci/runner_instructions_spec.rb
@@ -6,7 +6,6 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do
using RSpec::Parameterized::TableSyntax
let(:params) { {} }
- let(:user) { create(:user) }
describe 'OS' do
Gitlab::Ci::RunnerInstructions::OS.each do |name, subject|
@@ -37,7 +36,7 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do
end
describe '#install_script' do
- subject { described_class.new(current_user: user, **params) }
+ subject { described_class.new(**params) }
context 'invalid params' do
where(:current_params, :expected_error_message) do
@@ -106,117 +105,18 @@ RSpec.describe Gitlab::Ci::RunnerInstructions do
end
end
- context 'group' do
- let(:group) { create(:group) }
-
- subject { described_class.new(current_user: user, group: group, **params) }
-
- context 'user is owner' do
- before do
- group.add_owner(user)
- end
-
- with_them do
- let(:params) { { os: commands.each_key.first, arch: 'foo' } }
-
- it 'have correct configurations' do
- result = subject.register_command
-
- expect(result).to include("#{commands[commands.each_key.first]} register")
- expect(result).to include("--registration-token #{group.runners_token}")
- expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
- end
- end
- end
-
- context 'user is not owner' do
- where(:user_permission) do
- [:maintainer, :developer, :reporter, :guest]
- end
-
- with_them do
- before do
- create(:group_member, user_permission, group: group, user: user)
- end
-
- it 'raises error' do
- result = subject.register_command
-
- expect(result).to be_nil
- expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
- end
- end
- end
- end
-
- context 'project' do
- let(:project) { create(:project) }
-
- subject { described_class.new(current_user: user, project: project, **params) }
-
- context 'user is maintainer' do
- before do
- project.add_maintainer(user)
- end
-
- with_them do
- let(:params) { { os: commands.each_key.first, arch: 'foo' } }
-
- it 'have correct configurations' do
- result = subject.register_command
-
- expect(result).to include("#{commands[commands.each_key.first]} register")
- expect(result).to include("--registration-token #{project.runners_token}")
- expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
- end
- end
- end
-
- context 'user is not maintainer' do
- where(:user_permission) do
- [:developer, :reporter, :guest]
- end
-
- with_them do
- before do
- create(:project_member, user_permission, project: project, user: user)
- end
-
- it 'raises error' do
- result = subject.register_command
-
- expect(result).to be_nil
- expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
- end
- end
- end
- end
-
context 'instance' do
- subject { described_class.new(current_user: user, **params) }
-
- context 'user is admin' do
- let(:user) { create(:user, :admin) }
-
- with_them do
- let(:params) { { os: commands.each_key.first, arch: 'foo' } }
+ subject { described_class.new(**params) }
- it 'have correct configurations' do
- result = subject.register_command
-
- expect(result).to include("#{commands[commands.each_key.first]} register")
- expect(result).to include("--registration-token #{Gitlab::CurrentSettings.runners_registration_token}")
- expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
- end
- end
- end
+ with_them do
+ let(:params) { { os: commands.each_key.first, arch: 'foo' } }
- context 'user is not admin' do
- it 'raises error' do
+ it 'have correct configurations' do
result = subject.register_command
- expect(result).to be_nil
- expect(subject.errors).to include("Gitlab::Access::AccessDeniedError")
+ expect(result).to include("#{commands[commands.each_key.first]} register")
+ expect(result).to include("--registration-token $REGISTRATION_TOKEN")
+ expect(result).to include("--url #{Gitlab::Routing.url_helpers.root_url(only_path: false)}")
end
end
end
diff --git a/spec/lib/gitlab/ci/status/build/common_spec.rb b/spec/lib/gitlab/ci/status/build/common_spec.rb
index 924ee5ee1a4..c4e83c1796d 100644
--- a/spec/lib/gitlab/ci/status/build/common_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/common_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::Ci::Status::Build::Common do
context 'when user does not have access to read build' do
before do
- project.update(public_builds: false)
+ project.update!(public_builds: false)
end
it { is_expected.not_to have_details }
diff --git a/spec/lib/gitlab/ci/status/composite_spec.rb b/spec/lib/gitlab/ci/status/composite_spec.rb
index 543cfe874ca..2b9523bd83d 100644
--- a/spec/lib/gitlab/ci/status/composite_spec.rb
+++ b/spec/lib/gitlab/ci/status/composite_spec.rb
@@ -6,13 +6,13 @@ RSpec.describe Gitlab::Ci::Status::Composite do
let_it_be(:pipeline) { create(:ci_pipeline) }
before_all do
- @statuses = Ci::HasStatus::STATUSES_ENUM.map do |status, idx|
+ @statuses = Ci::HasStatus::STATUSES_ENUM.to_h do |status, idx|
[status, create(:ci_build, pipeline: pipeline, status: status, importing: true)]
- end.to_h
+ end
- @statuses_with_allow_failure = Ci::HasStatus::STATUSES_ENUM.map do |status, idx|
+ @statuses_with_allow_failure = Ci::HasStatus::STATUSES_ENUM.to_h do |status, idx|
[status, create(:ci_build, pipeline: pipeline, status: status, allow_failure: true, importing: true)]
- end.to_h
+ end
end
describe '#status' do
diff --git a/spec/lib/gitlab/ci/trace_spec.rb b/spec/lib/gitlab/ci/trace_spec.rb
index 597e4ca9b03..0fe7c731f27 100644
--- a/spec/lib/gitlab/ci/trace_spec.rb
+++ b/spec/lib/gitlab/ci/trace_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_default: :keep do
let_it_be(:project) { create_default(:project).freeze }
- let_it_be_with_reload(:build) { create(:ci_build) }
+ let_it_be_with_reload(:build) { create(:ci_build, :success) }
let(:trace) { described_class.new(build) }
describe "associations" do
@@ -63,9 +63,7 @@ RSpec.describe Gitlab::Ci::Trace, :clean_gitlab_redis_shared_state, factory_defa
describe '#update_interval' do
context 'it is not being watched' do
- it 'returns 30 seconds' do
- expect(trace.update_interval).to eq(30.seconds)
- end
+ it { expect(trace.update_interval).to eq(60.seconds) }
end
context 'it is being watched' do
diff --git a/spec/lib/gitlab/ci/variables/helpers_spec.rb b/spec/lib/gitlab/ci/variables/helpers_spec.rb
index b45abf8c0e1..f13b334c10e 100644
--- a/spec/lib/gitlab/ci/variables/helpers_spec.rb
+++ b/spec/lib/gitlab/ci/variables/helpers_spec.rb
@@ -100,4 +100,50 @@ RSpec.describe Gitlab::Ci::Variables::Helpers do
it { is_expected.to eq(result) }
end
end
+
+ describe '.inherit_yaml_variables' do
+ let(:from) do
+ [{ key: 'key1', value: 'value1' },
+ { key: 'key2', value: 'value2' }]
+ end
+
+ let(:to) do
+ [{ key: 'key2', value: 'value22' },
+ { key: 'key3', value: 'value3' }]
+ end
+
+ let(:inheritance) { true }
+
+ let(:result) do
+ [{ key: 'key1', value: 'value1', public: true },
+ { key: 'key2', value: 'value22', public: true },
+ { key: 'key3', value: 'value3', public: true }]
+ end
+
+ subject { described_class.inherit_yaml_variables(from: from, to: to, inheritance: inheritance) }
+
+ it { is_expected.to eq(result) }
+
+ context 'when inheritance is false' do
+ let(:inheritance) { false }
+
+ let(:result) do
+ [{ key: 'key2', value: 'value22', public: true },
+ { key: 'key3', value: 'value3', public: true }]
+ end
+
+ it { is_expected.to eq(result) }
+ end
+
+ context 'when inheritance is array' do
+ let(:inheritance) { ['key2'] }
+
+ let(:result) do
+ [{ key: 'key2', value: 'value22', public: true },
+ { key: 'key3', value: 'value3', public: true }]
+ end
+
+ it { is_expected.to eq(result) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
index 7e3cd7ec254..e345cd4de9b 100644
--- a/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor/result_spec.rb
@@ -24,7 +24,7 @@ module Gitlab
let(:included_yml) do
YAML.dump(
- another_test: { stage: 'test', script: 'echo 2' }
+ { another_test: { stage: 'test', script: 'echo 2' } }.deep_stringify_keys
)
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index 5462a587d16..ad94dfc9160 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -43,6 +43,8 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -74,6 +76,8 @@ module Gitlab
allow_failure: false,
when: 'on_success',
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -111,7 +115,9 @@ module Gitlab
tag_list: %w[A B],
allow_failure: false,
when: "on_success",
- yaml_variables: []
+ yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true
})
end
end
@@ -158,6 +164,8 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -347,6 +355,8 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage,
options: { script: ["rspec"] },
only: { refs: ["branches"] } }] },
@@ -359,6 +369,8 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage,
options: { script: ["cap prod"] },
only: { refs: ["tags"] } }] },
@@ -372,7 +384,7 @@ module Gitlab
end
end
- describe '#workflow_attributes' do
+ describe 'workflow attributes' do
context 'with disallowed workflow:variables' do
let(:config) do
<<-EOYML
@@ -403,11 +415,11 @@ module Gitlab
end
it 'parses the workflow:rules configuration' do
- expect(subject.workflow_attributes[:rules]).to contain_exactly({ if: '$VAR == "value"' })
+ expect(subject.workflow_rules).to contain_exactly({ if: '$VAR == "value"' })
end
- it 'parses the root:variables as yaml_variables:' do
- expect(subject.workflow_attributes[:yaml_variables])
+ it 'parses the root:variables as #root_variables' do
+ expect(subject.root_variables)
.to contain_exactly({ key: 'SUPPORTED', value: 'parsed', public: true })
end
end
@@ -425,11 +437,11 @@ module Gitlab
end
it 'parses the workflow:rules configuration' do
- expect(subject.workflow_attributes[:rules]).to contain_exactly({ if: '$VAR == "value"' })
+ expect(subject.workflow_rules).to contain_exactly({ if: '$VAR == "value"' })
end
- it 'parses the root:variables as yaml_variables:' do
- expect(subject.workflow_attributes[:yaml_variables]).to eq([])
+ it 'parses the root:variables as #root_variables' do
+ expect(subject.root_variables).to eq([])
end
end
@@ -445,11 +457,11 @@ module Gitlab
end
it 'parses the workflow:rules configuration' do
- expect(subject.workflow_attributes[:rules]).to be_nil
+ expect(subject.workflow_rules).to be_nil
end
- it 'parses the root:variables as yaml_variables:' do
- expect(subject.workflow_attributes[:yaml_variables])
+ it 'parses the root:variables as #root_variables' do
+ expect(subject.root_variables)
.to contain_exactly({ key: 'SUPPORTED', value: 'parsed', public: true })
end
end
@@ -463,11 +475,11 @@ module Gitlab
end
it 'parses the workflow:rules configuration' do
- expect(subject.workflow_attributes[:rules]).to be_nil
+ expect(subject.workflow_rules).to be_nil
end
- it 'parses the root:variables as yaml_variables:' do
- expect(subject.workflow_attributes[:yaml_variables]).to eq([])
+ it 'parses the root:variables as #root_variables' do
+ expect(subject.root_variables).to eq([])
end
end
end
@@ -853,6 +865,8 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -861,7 +875,7 @@ module Gitlab
config = YAML.dump({ image: "ruby:2.7",
services: ["mysql"],
before_script: ["pwd"],
- rspec: { image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
+ rspec: { image: { name: "ruby:3.0", entrypoint: ["/usr/local/bin/init", "run"] },
services: [{ name: "postgresql", alias: "db-pg",
entrypoint: ["/usr/local/bin/init", "run"],
command: ["/usr/local/bin/init", "run"] }, "docker:dind"],
@@ -878,7 +892,7 @@ module Gitlab
options: {
before_script: ["pwd"],
script: ["rspec"],
- image: { name: "ruby:2.5", entrypoint: ["/usr/local/bin/init", "run"] },
+ image: { name: "ruby:3.0", entrypoint: ["/usr/local/bin/init", "run"] },
services: [{ name: "postgresql", alias: "db-pg", entrypoint: ["/usr/local/bin/init", "run"],
command: ["/usr/local/bin/init", "run"] },
{ name: "docker:dind" }]
@@ -886,6 +900,8 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -915,6 +931,8 @@ module Gitlab
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -923,7 +941,7 @@ module Gitlab
config = YAML.dump({ image: "ruby:2.7",
services: ["mysql"],
before_script: ["pwd"],
- rspec: { image: "ruby:2.5", services: ["postgresql", "docker:dind"], script: "rspec" } })
+ rspec: { image: "ruby:3.0", services: ["postgresql", "docker:dind"], script: "rspec" } })
config_processor = Gitlab::Ci::YamlProcessor.new(config).execute
@@ -936,12 +954,14 @@ module Gitlab
options: {
before_script: ["pwd"],
script: ["rspec"],
- image: { name: "ruby:2.5" },
+ image: { name: "ruby:3.0" },
services: [{ name: "postgresql" }, { name: "docker:dind" }]
},
allow_failure: false,
when: "on_success",
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -951,7 +971,10 @@ module Gitlab
describe 'Variables' do
subject { Gitlab::Ci::YamlProcessor.new(YAML.dump(config)).execute }
- let(:build_variables) { subject.builds.first[:yaml_variables] }
+ let(:build) { subject.builds.first }
+ let(:yaml_variables) { build[:yaml_variables] }
+ let(:job_variables) { build[:job_variables] }
+ let(:root_variables_inheritance) { build[:root_variables_inheritance] }
context 'when global variables are defined' do
let(:variables) do
@@ -967,10 +990,12 @@ module Gitlab
end
it 'returns global variables' do
- expect(build_variables).to contain_exactly(
+ expect(yaml_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
+ expect(job_variables).to eq([])
+ expect(root_variables_inheritance).to eq(true)
end
end
@@ -979,7 +1004,7 @@ module Gitlab
{ 'VAR1' => 'global1', 'VAR3' => 'global3', 'VAR4' => 'global4' }
end
- let(:job_variables) do
+ let(:build_variables) do
{ 'VAR1' => 'value1', 'VAR2' => 'value2' }
end
@@ -987,20 +1012,25 @@ module Gitlab
{
before_script: ['pwd'],
variables: global_variables,
- rspec: { script: 'rspec', variables: job_variables, inherit: inherit }
+ rspec: { script: 'rspec', variables: build_variables, inherit: inherit }
}
end
context 'when no inheritance is specified' do
let(:inherit) { }
- it 'returns all unique variables' do
- expect(build_variables).to contain_exactly(
- { key: 'VAR4', value: 'global4', public: true },
+ it 'returns all variables' do
+ expect(yaml_variables).to contain_exactly(
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true },
{ key: 'VAR3', value: 'global3', public: true },
+ { key: 'VAR4', value: 'global4', public: true }
+ )
+ expect(job_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
+ expect(root_variables_inheritance).to eq(true)
end
end
@@ -1008,22 +1038,32 @@ module Gitlab
let(:inherit) { { variables: false } }
it 'does not inherit variables' do
- expect(build_variables).to contain_exactly(
+ expect(yaml_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
+ expect(job_variables).to contain_exactly(
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ expect(root_variables_inheritance).to eq(false)
end
end
context 'when specific variables are to inherited' do
let(:inherit) { { variables: %w[VAR1 VAR4] } }
- it 'returns all unique variables and inherits only specified variables' do
- expect(build_variables).to contain_exactly(
- { key: 'VAR4', value: 'global4', public: true },
+ it 'returns all variables and inherits only specified variables' do
+ expect(yaml_variables).to contain_exactly(
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true },
+ { key: 'VAR4', value: 'global4', public: true }
+ )
+ expect(job_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
+ expect(root_variables_inheritance).to eq(%w[VAR1 VAR4])
end
end
end
@@ -1042,10 +1082,15 @@ module Gitlab
end
it 'returns job variables' do
- expect(build_variables).to contain_exactly(
+ expect(yaml_variables).to contain_exactly(
+ { key: 'VAR1', value: 'value1', public: true },
+ { key: 'VAR2', value: 'value2', public: true }
+ )
+ expect(job_variables).to contain_exactly(
{ key: 'VAR1', value: 'value1', public: true },
{ key: 'VAR2', value: 'value2', public: true }
)
+ expect(root_variables_inheritance).to eq(true)
end
end
@@ -1068,8 +1113,11 @@ module Gitlab
# When variables config is empty, we assume this is a valid
# configuration, see issue #18775
#
- expect(build_variables).to be_an_instance_of(Array)
- expect(build_variables).to be_empty
+ expect(yaml_variables).to be_an_instance_of(Array)
+ expect(yaml_variables).to be_empty
+
+ expect(job_variables).to eq([])
+ expect(root_variables_inheritance).to eq(true)
end
end
end
@@ -1084,8 +1132,11 @@ module Gitlab
end
it 'returns empty array' do
- expect(build_variables).to be_an_instance_of(Array)
- expect(build_variables).to be_empty
+ expect(yaml_variables).to be_an_instance_of(Array)
+ expect(yaml_variables).to be_empty
+
+ expect(job_variables).to eq([])
+ expect(root_variables_inheritance).to eq(true)
end
end
end
@@ -1717,6 +1768,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -2080,6 +2133,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
)
expect(subject.builds[4]).to eq(
@@ -2095,6 +2150,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :dag
)
end
@@ -2122,6 +2179,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
)
expect(subject.builds[4]).to eq(
@@ -2139,6 +2198,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :dag
)
end
@@ -2162,6 +2223,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :dag
)
end
@@ -2193,6 +2256,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :dag
)
end
@@ -2391,6 +2456,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
@@ -2438,6 +2505,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
expect(subject.second).to eq({
@@ -2451,6 +2520,8 @@ module Gitlab
when: "on_success",
allow_failure: false,
yaml_variables: [],
+ job_variables: [],
+ root_variables_inheritance: true,
scheduling_type: :stage
})
end
diff --git a/spec/lib/gitlab/composer/version_index_spec.rb b/spec/lib/gitlab/composer/version_index_spec.rb
index 7b0ed703f42..a4d016636aa 100644
--- a/spec/lib/gitlab/composer/version_index_spec.rb
+++ b/spec/lib/gitlab/composer/version_index_spec.rb
@@ -27,6 +27,11 @@ RSpec.describe Gitlab::Composer::VersionIndex do
'type' => 'zip',
'url' => "http://localhost/api/v4/projects/#{project.id}/packages/composer/archives/#{package.name}.zip?sha=#{branch.target}"
},
+ 'source' => {
+ 'reference' => branch.target,
+ 'type' => 'git',
+ 'url' => project.http_url_to_repo
+ },
'name' => package.name,
'uid' => package.id,
'version' => package.version
diff --git a/spec/lib/gitlab/conflict/file_spec.rb b/spec/lib/gitlab/conflict/file_spec.rb
index bb9bee763d8..46e5334cd81 100644
--- a/spec/lib/gitlab/conflict/file_spec.rb
+++ b/spec/lib/gitlab/conflict/file_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe Gitlab::Conflict::File do
let(:section_keys) { conflict_file.sections.map { |section| section[:id] }.compact }
context 'when resolving everything to the same side' do
- let(:resolution_hash) { section_keys.map { |key| [key, 'head'] }.to_h }
+ let(:resolution_hash) { section_keys.to_h { |key| [key, 'head'] } }
let(:resolved_lines) { conflict_file.resolve_lines(resolution_hash) }
let(:expected_lines) { conflict_file.lines.reject { |line| line.type == 'old' } }
@@ -54,8 +54,8 @@ RSpec.describe Gitlab::Conflict::File do
end
it 'raises ResolutionError when passed a hash without resolutions for all sections' do
- empty_hash = section_keys.map { |key| [key, nil] }.to_h
- invalid_hash = section_keys.map { |key| [key, 'invalid'] }.to_h
+ empty_hash = section_keys.to_h { |key| [key, nil] }
+ invalid_hash = section_keys.to_h { |key| [key, 'invalid'] }
expect { conflict_file.resolve_lines({}) }
.to raise_error(Gitlab::Git::Conflict::Resolver::ResolutionError)
diff --git a/spec/lib/gitlab/crypto_helper_spec.rb b/spec/lib/gitlab/crypto_helper_spec.rb
index 024564ea213..616a37a4cb9 100644
--- a/spec/lib/gitlab/crypto_helper_spec.rb
+++ b/spec/lib/gitlab/crypto_helper_spec.rb
@@ -20,22 +20,24 @@ RSpec.describe Gitlab::CryptoHelper do
expect(encrypted).not_to include "\n"
end
- it 'does not save hashed token with iv value in database' do
- expect { described_class.aes256_gcm_encrypt('some-value') }.not_to change { TokenWithIv.count }
- end
-
it 'encrypts using static iv' do
expect(Encryptor).to receive(:encrypt).with(described_class::AES256_GCM_OPTIONS.merge(value: 'some-value', iv: described_class::AES256_GCM_IV_STATIC)).and_return('hashed_value')
described_class.aes256_gcm_encrypt('some-value')
end
- end
- describe '.aes256_gcm_decrypt' do
- before do
- stub_feature_flags(dynamic_nonce_creation: false)
+ context 'with provided iv' do
+ let(:iv) { create_nonce }
+
+ it 'encrypts using provided iv' do
+ expect(Encryptor).to receive(:encrypt).with(described_class::AES256_GCM_OPTIONS.merge(value: 'some-value', iv: iv)).and_return('hashed_value')
+
+ described_class.aes256_gcm_encrypt('some-value', nonce: iv)
+ end
end
+ end
+ describe '.aes256_gcm_decrypt' do
context 'when token was encrypted using static nonce' do
let(:encrypted) { described_class.aes256_gcm_encrypt('some-value', nonce: described_class::AES256_GCM_IV_STATIC) }
@@ -50,54 +52,22 @@ RSpec.describe Gitlab::CryptoHelper do
expect(decrypted).to eq 'some-value'
end
-
- it 'does not save hashed token with iv value in database' do
- expect { described_class.aes256_gcm_decrypt(encrypted) }.not_to change { TokenWithIv.count }
- end
-
- context 'with feature flag switched on' do
- before do
- stub_feature_flags(dynamic_nonce_creation: true)
- end
-
- it 'correctly decrypts encrypted string' do
- decrypted = described_class.aes256_gcm_decrypt(encrypted)
-
- expect(decrypted).to eq 'some-value'
- end
- end
end
context 'when token was encrypted using random nonce' do
let(:value) { 'random-value' }
-
- # for compatibility with tokens encrypted using dynamic nonce
- let!(:encrypted) do
- iv = create_nonce
- encrypted_token = described_class.create_encrypted_token(value, iv)
- TokenWithIv.create!(hashed_token: Digest::SHA256.digest(encrypted_token), hashed_plaintext_token: Digest::SHA256.digest(encrypted_token), iv: iv)
- encrypted_token
- end
-
- before do
- stub_feature_flags(dynamic_nonce_creation: true)
- end
+ let(:iv) { create_nonce }
+ let(:encrypted) { described_class.aes256_gcm_encrypt(value, nonce: iv) }
it 'correctly decrypts encrypted string' do
- decrypted = described_class.aes256_gcm_decrypt(encrypted)
+ decrypted = described_class.aes256_gcm_decrypt(encrypted, nonce: iv)
expect(decrypted).to eq value
end
-
- it 'does not save hashed token with iv value in database' do
- expect { described_class.aes256_gcm_decrypt(encrypted) }.not_to change { TokenWithIv.count }
- end
end
end
def create_nonce
- cipher = OpenSSL::Cipher.new('aes-256-gcm')
- cipher.encrypt # Required before '#random_iv' can be called
- cipher.random_iv # Ensures that the IV is the correct length respective to the algorithm used.
+ ::Digest::SHA256.hexdigest('my-value').bytes.take(TokenAuthenticatableStrategies::EncryptionHelper::NONCE_SIZE).pack('c*')
end
end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index ab1728414bb..932238f281e 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -19,6 +19,9 @@ RSpec.describe Gitlab::DataBuilder::Build do
it { expect(data[:tag]).to eq(build.tag) }
it { expect(data[:build_id]).to eq(build.id) }
it { expect(data[:build_status]).to eq(build.status) }
+ it { expect(data[:build_created_at]).to eq(build.created_at) }
+ it { expect(data[:build_started_at]).to eq(build.started_at) }
+ it { expect(data[:build_finished_at]).to eq(build.finished_at) }
it { expect(data[:build_allow_failure]).to eq(false) }
it { expect(data[:build_failure_reason]).to eq(build.failure_reason) }
it { expect(data[:project_id]).to eq(build.project.id) }
diff --git a/spec/lib/gitlab/data_builder/pipeline_spec.rb b/spec/lib/gitlab/data_builder/pipeline_spec.rb
index cf04f560ceb..bec1e612c02 100644
--- a/spec/lib/gitlab/data_builder/pipeline_spec.rb
+++ b/spec/lib/gitlab/data_builder/pipeline_spec.rb
@@ -59,7 +59,6 @@ RSpec.describe Gitlab::DataBuilder::Pipeline do
expect(runner_data[:id]).to eq(ci_runner.id)
expect(runner_data[:description]).to eq(ci_runner.description)
expect(runner_data[:active]).to eq(ci_runner.active)
- expect(runner_data[:is_shared]).to eq(ci_runner.instance_type?)
expect(runner_data[:tags]).to match_array(tag_names)
end
end
diff --git a/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb b/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb
new file mode 100644
index 00000000000..e96862fbc2d
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batch_metrics_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchMetrics do
+ let(:batch_metrics) { described_class.new }
+
+ describe '#time_operation' do
+ it 'tracks the duration of the operation using monotonic time' do
+ expect(batch_metrics.timings).to be_empty
+
+ expect(Gitlab::Metrics::System).to receive(:monotonic_time)
+ .exactly(6).times
+ .and_return(0.0, 111.0, 200.0, 290.0, 300.0, 410.0)
+
+ batch_metrics.time_operation(:my_label) do
+ # some operation
+ end
+
+ batch_metrics.time_operation(:my_other_label) do
+ # some operation
+ end
+
+ batch_metrics.time_operation(:my_label) do
+ # some operation
+ end
+
+ expect(batch_metrics.timings).to eq(my_label: [111.0, 110.0], my_other_label: [90.0])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
new file mode 100644
index 00000000000..7d0e10b62c6
--- /dev/null
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_runner_spec.rb
@@ -0,0 +1,198 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationRunner do
+ let(:migration_wrapper) { double('test wrapper') }
+ let(:runner) { described_class.new(migration_wrapper) }
+
+ describe '#run_migration_job' do
+ shared_examples_for 'it has completed the migration' do
+ it 'does not create and run a migration job' do
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect do
+ runner.run_migration_job(migration)
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedJob.count }
+ end
+
+ it 'marks the migration as finished' do
+ relation = Gitlab::Database::BackgroundMigration::BatchedMigration.finished.where(id: migration.id)
+
+ expect { runner.run_migration_job(migration) }.to change { relation.count }.by(1)
+ end
+ end
+
+ context 'when the migration has no previous jobs' do
+ let(:migration) { create(:batched_background_migration, :active, batch_size: 2) }
+
+ let(:job_relation) do
+ Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
+ end
+
+ context 'when the migration has batches to process' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ it 'runs the job for the first batch' do
+ migration.update!(min_value: event1.id, max_value: event2.id)
+
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.first)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.first).to have_attributes(
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: migration.batch_size,
+ sub_batch_size: migration.sub_batch_size)
+ end
+ end
+
+ context 'when the batch maximum exceeds the migration maximum' do
+ let!(:events) { create_list(:event, 3) }
+ let(:event1) { events[0] }
+ let(:event2) { events[1] }
+
+ it 'clamps the batch maximum to the migration maximum' do
+ migration.update!(min_value: event1.id, max_value: event2.id, batch_size: 5)
+
+ expect(migration_wrapper).to receive(:perform)
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.first).to have_attributes(
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: migration.batch_size,
+ sub_batch_size: migration.sub_batch_size)
+ end
+ end
+
+ context 'when the migration has no batches to process' do
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+
+ context 'when the migration has previous jobs' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ let!(:migration) do
+ create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event3.id)
+ end
+
+ let!(:previous_job) do
+ create(:batched_background_migration_job,
+ batched_migration: migration,
+ min_value: event1.id,
+ max_value: event2.id,
+ batch_size: 2,
+ sub_batch_size: 1)
+ end
+
+ let(:job_relation) do
+ Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
+ end
+
+ context 'when the migration has batches to process' do
+ it 'runs the migration job for the next batch' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.last)
+ end
+
+ expect { runner.run_migration_job(migration) }.to change { job_relation.count }.by(1)
+
+ expect(job_relation.last).to have_attributes(
+ min_value: event3.id,
+ max_value: event3.id,
+ batch_size: migration.batch_size,
+ sub_batch_size: migration.sub_batch_size)
+ end
+
+ context 'when the batch minimum exceeds the migration maximum' do
+ before do
+ migration.update!(batch_size: 5, max_value: event2.id)
+ end
+
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+
+ context 'when the migration has no batches remaining' do
+ before do
+ create(:batched_background_migration_job,
+ batched_migration: migration,
+ min_value: event3.id,
+ max_value: event3.id,
+ batch_size: 2,
+ sub_batch_size: 1)
+ end
+
+ it_behaves_like 'it has completed the migration'
+ end
+ end
+ end
+
+ describe '#run_entire_migration' do
+ context 'when not in a development or test environment' do
+ it 'raises an error' do
+ environment = double('environment', development?: false, test?: false)
+ migration = build(:batched_background_migration, :finished)
+
+ allow(Rails).to receive(:env).and_return(environment)
+
+ expect do
+ runner.run_entire_migration(migration)
+ end.to raise_error('this method is not intended for use in real environments')
+ end
+ end
+
+ context 'when the given migration is not active' do
+ it 'does not create and run migration jobs' do
+ migration = build(:batched_background_migration, :finished)
+
+ expect(migration_wrapper).not_to receive(:perform)
+
+ expect do
+ runner.run_entire_migration(migration)
+ end.not_to change { Gitlab::Database::BackgroundMigration::BatchedJob.count }
+ end
+ end
+
+ context 'when the given migration is active' do
+ let!(:event1) { create(:event) }
+ let!(:event2) { create(:event) }
+ let!(:event3) { create(:event) }
+
+ let!(:migration) do
+ create(:batched_background_migration, :active, batch_size: 2, min_value: event1.id, max_value: event3.id)
+ end
+
+ let(:job_relation) do
+ Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: migration.id)
+ end
+
+ it 'runs all jobs inline until finishing the migration' do
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.first)
+ end
+
+ expect(migration_wrapper).to receive(:perform) do |job_record|
+ expect(job_record).to eq(job_relation.last)
+ end
+
+ expect { runner.run_entire_migration(migration) }.to change { job_relation.count }.by(2)
+
+ expect(job_relation.first).to have_attributes(min_value: event1.id, max_value: event2.id)
+ expect(job_relation.last).to have_attributes(min_value: event3.id, max_value: event3.id)
+
+ expect(migration.reload).to be_finished
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
index f4a939e7c1f..261e23d0745 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_spec.rb
@@ -29,6 +29,16 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
+ describe '.active_migration' do
+ let!(:migration1) { create(:batched_background_migration, :finished) }
+ let!(:migration2) { create(:batched_background_migration, :active) }
+ let!(:migration3) { create(:batched_background_migration, :active) }
+
+ it 'returns the first active migration according to queue order' do
+ expect(described_class.active_migration).to eq(migration2)
+ end
+ end
+
describe '#interval_elapsed?' do
context 'when the migration has no last_job' do
let(:batched_migration) { build(:batched_background_migration) }
@@ -77,6 +87,34 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
end
end
end
+
+ context 'when an interval variance is given' do
+ let(:variance) { 2.seconds }
+
+ context 'when the last job is less than an interval with variance old' do
+ it 'returns false' do
+ freeze_time do
+ create(:batched_background_migration_job,
+ batched_migration: batched_migration,
+ created_at: Time.current - 1.minute - 57.seconds)
+
+ expect(batched_migration.interval_elapsed?(variance: variance)).to eq(false)
+ end
+ end
+ end
+
+ context 'when the last job is more than an interval with variance old' do
+ it 'returns true' do
+ freeze_time do
+ create(:batched_background_migration_job,
+ batched_migration: batched_migration,
+ created_at: Time.current - 1.minute - 58.seconds)
+
+ expect(batched_migration.interval_elapsed?(variance: variance)).to eq(true)
+ end
+ end
+ end
+ end
end
end
@@ -157,4 +195,17 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigration, type: :m
describe '#batch_class_name=' do
it_behaves_like 'an attr_writer that demodulizes assigned class names', :batch_class_name
end
+
+ describe '#prometheus_labels' do
+ let(:batched_migration) { create(:batched_background_migration, job_class_name: 'TestMigration', table_name: 'foo', column_name: 'bar') }
+
+ it 'returns a hash with labels for the migration' do
+ labels = {
+ migration_id: batched_migration.id,
+ migration_identifier: 'TestMigration/foo.bar'
+ }
+
+ expect(batched_migration.prometheus_labels).to eq(labels)
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 17cceb35ff7..00d13f23d36 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -3,43 +3,105 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '#perform' do
- let(:migration_wrapper) { described_class.new }
+ subject { described_class.new.perform(job_record) }
+
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
let_it_be(:active_migration) { create(:batched_background_migration, :active, job_arguments: [:id, :other_id]) }
let!(:job_record) { create(:batched_background_migration_job, batched_migration: active_migration) }
+ let(:job_instance) { double('job instance', batch_metrics: {}) }
+
+ before do
+ allow(job_class).to receive(:new).and_return(job_instance)
+ end
it 'runs the migration job' do
- expect_next_instance_of(job_class) do |job_instance|
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- end
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- migration_wrapper.perform(job_record)
+ subject
end
- it 'updates the the tracking record in the database' do
+ it 'updates the tracking record in the database' do
+ test_metrics = { 'my_metris' => 'some value' }
+
+ expect(job_instance).to receive(:perform)
+ expect(job_instance).to receive(:batch_metrics).and_return(test_metrics)
+
expect(job_record).to receive(:update!).with(hash_including(attempts: 1, status: :running)).and_call_original
freeze_time do
- migration_wrapper.perform(job_record)
+ subject
reloaded_job_record = job_record.reload
expect(reloaded_job_record).not_to be_pending
expect(reloaded_job_record.attempts).to eq(1)
expect(reloaded_job_record.started_at).to eq(Time.current)
+ expect(reloaded_job_record.metrics).to eq(test_metrics)
+ end
+ end
+
+ context 'reporting prometheus metrics' do
+ let(:labels) { job_record.batched_migration.prometheus_labels }
+
+ before do
+ allow(job_instance).to receive(:perform)
+ end
+
+ it 'reports batch_size' do
+ expect(described_class.metrics[:gauge_batch_size]).to receive(:set).with(labels, job_record.batch_size)
+
+ subject
+ end
+
+ it 'reports sub_batch_size' do
+ expect(described_class.metrics[:gauge_sub_batch_size]).to receive(:set).with(labels, job_record.sub_batch_size)
+
+ subject
+ end
+
+ it 'reports updated tuples (currently based on batch_size)' do
+ expect(described_class.metrics[:counter_updated_tuples]).to receive(:increment).with(labels, job_record.batch_size)
+
+ subject
+ end
+
+ it 'reports summary of query timings' do
+ metrics = { 'timings' => { 'update_all' => [1, 2, 3, 4, 5] } }
+
+ expect(job_instance).to receive(:batch_metrics).and_return(metrics)
+
+ metrics['timings'].each do |key, timings|
+ summary_labels = labels.merge(operation: key)
+ timings.each do |timing|
+ expect(described_class.metrics[:histogram_timings]).to receive(:observe).with(summary_labels, timing)
+ end
+ end
+
+ subject
+ end
+
+ it 'reports time efficiency' do
+ freeze_time do
+ expect(Time).to receive(:current).and_return(Time.zone.now - 5.seconds).ordered
+ expect(Time).to receive(:current).and_return(Time.zone.now).ordered
+
+ ratio = 5 / job_record.batched_migration.interval.to_f
+
+ expect(described_class.metrics[:histogram_time_efficiency]).to receive(:observe).with(labels, ratio)
+
+ subject
+ end
end
end
context 'when the migration job does not raise an error' do
it 'marks the tracking record as succeeded' do
- expect_next_instance_of(job_class) do |job_instance|
- expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- end
+ expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, 'id', 'other_id')
freeze_time do
- migration_wrapper.perform(job_record)
+ subject
reloaded_job_record = job_record.reload
@@ -51,14 +113,12 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
context 'when the migration job raises an error' do
it 'marks the tracking record as failed before raising the error' do
- expect_next_instance_of(job_class) do |job_instance|
- expect(job_instance).to receive(:perform)
- .with(1, 10, 'events', 'id', 1, 'id', 'other_id')
- .and_raise(RuntimeError, 'Something broke!')
- end
+ expect(job_instance).to receive(:perform)
+ .with(1, 10, 'events', 'id', 1, 'id', 'other_id')
+ .and_raise(RuntimeError, 'Something broke!')
freeze_time do
- expect { migration_wrapper.perform(job_record) }.to raise_error(RuntimeError, 'Something broke!')
+ expect { subject }.to raise_error(RuntimeError, 'Something broke!')
reloaded_job_record = job_record.reload
diff --git a/spec/lib/gitlab/database/background_migration/scheduler_spec.rb b/spec/lib/gitlab/database/background_migration/scheduler_spec.rb
deleted file mode 100644
index ba745acdf8a..00000000000
--- a/spec/lib/gitlab/database/background_migration/scheduler_spec.rb
+++ /dev/null
@@ -1,182 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::Database::BackgroundMigration::Scheduler, '#perform' do
- let(:scheduler) { described_class.new }
-
- shared_examples_for 'it has no jobs to run' do
- it 'does not create and run a migration job' do
- test_wrapper = double('test wrapper')
-
- expect(test_wrapper).not_to receive(:perform)
-
- expect do
- scheduler.perform(migration_wrapper: test_wrapper)
- end.not_to change { Gitlab::Database::BackgroundMigration::BatchedJob.count }
- end
- end
-
- context 'when there are no active migrations' do
- let!(:migration) { create(:batched_background_migration, :finished) }
-
- it_behaves_like 'it has no jobs to run'
- end
-
- shared_examples_for 'it has completed the migration' do
- it 'marks the migration as finished' do
- relation = Gitlab::Database::BackgroundMigration::BatchedMigration.finished.where(id: first_migration.id)
-
- expect { scheduler.perform }.to change { relation.count }.by(1)
- end
- end
-
- context 'when there are active migrations' do
- let!(:first_migration) { create(:batched_background_migration, :active, batch_size: 2) }
- let!(:last_migration) { create(:batched_background_migration, :active) }
-
- let(:job_relation) do
- Gitlab::Database::BackgroundMigration::BatchedJob.where(batched_background_migration_id: first_migration.id)
- end
-
- context 'when the migration interval has not elapsed' do
- before do
- expect_next_found_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigration) do |migration|
- expect(migration).to receive(:interval_elapsed?).and_return(false)
- end
- end
-
- it_behaves_like 'it has no jobs to run'
- end
-
- context 'when the interval has elapsed' do
- before do
- expect_next_found_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigration) do |migration|
- expect(migration).to receive(:interval_elapsed?).and_return(true)
- end
- end
-
- context 'when the first migration has no previous jobs' do
- context 'when the migration has batches to process' do
- let!(:event1) { create(:event) }
- let!(:event2) { create(:event) }
- let!(:event3) { create(:event) }
-
- it 'runs the job for the first batch' do
- first_migration.update!(min_value: event1.id, max_value: event3.id)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
- expect(wrapper).to receive(:perform).and_wrap_original do |_, job_record|
- expect(job_record).to eq(job_relation.first)
- end
- end
-
- expect { scheduler.perform }.to change { job_relation.count }.by(1)
-
- expect(job_relation.first).to have_attributes(
- min_value: event1.id,
- max_value: event2.id,
- batch_size: first_migration.batch_size,
- sub_batch_size: first_migration.sub_batch_size)
- end
- end
-
- context 'when the migration has no batches to process' do
- it_behaves_like 'it has no jobs to run'
- it_behaves_like 'it has completed the migration'
- end
- end
-
- context 'when the first migration has previous jobs' do
- let!(:event1) { create(:event) }
- let!(:event2) { create(:event) }
- let!(:event3) { create(:event) }
-
- let!(:previous_job) do
- create(:batched_background_migration_job,
- batched_migration: first_migration,
- min_value: event1.id,
- max_value: event2.id,
- batch_size: 2,
- sub_batch_size: 1)
- end
-
- context 'when the migration is ready to process another job' do
- it 'runs the migration job for the next batch' do
- first_migration.update!(min_value: event1.id, max_value: event3.id)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
- expect(wrapper).to receive(:perform).and_wrap_original do |_, job_record|
- expect(job_record).to eq(job_relation.last)
- end
- end
-
- expect { scheduler.perform }.to change { job_relation.count }.by(1)
-
- expect(job_relation.last).to have_attributes(
- min_value: event3.id,
- max_value: event3.id,
- batch_size: first_migration.batch_size,
- sub_batch_size: first_migration.sub_batch_size)
- end
- end
-
- context 'when the migration has no batches remaining' do
- let!(:final_job) do
- create(:batched_background_migration_job,
- batched_migration: first_migration,
- min_value: event3.id,
- max_value: event3.id,
- batch_size: 2,
- sub_batch_size: 1)
- end
-
- it_behaves_like 'it has no jobs to run'
- it_behaves_like 'it has completed the migration'
- end
- end
-
- context 'when the bounds of the next batch exceed the migration maximum value' do
- let!(:events) { create_list(:event, 3) }
- let(:event1) { events[0] }
- let(:event2) { events[1] }
-
- context 'when the batch maximum exceeds the migration maximum' do
- it 'clamps the batch maximum to the migration maximum' do
- first_migration.update!(batch_size: 5, min_value: event1.id, max_value: event2.id)
-
- expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper) do |wrapper|
- expect(wrapper).to receive(:perform)
- end
-
- expect { scheduler.perform }.to change { job_relation.count }.by(1)
-
- expect(job_relation.first).to have_attributes(
- min_value: event1.id,
- max_value: event2.id,
- batch_size: first_migration.batch_size,
- sub_batch_size: first_migration.sub_batch_size)
- end
- end
-
- context 'when the batch minimum exceeds the migration maximum' do
- let!(:previous_job) do
- create(:batched_background_migration_job,
- batched_migration: first_migration,
- min_value: event1.id,
- max_value: event2.id,
- batch_size: 5,
- sub_batch_size: 1)
- end
-
- before do
- first_migration.update!(batch_size: 5, min_value: 1, max_value: event2.id)
- end
-
- it_behaves_like 'it has no jobs to run'
- it_behaves_like 'it has completed the migration'
- end
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/database/batch_count_spec.rb b/spec/lib/gitlab/database/batch_count_spec.rb
index 29688b18e94..da13bc425d1 100644
--- a/spec/lib/gitlab/database/batch_count_spec.rb
+++ b/spec/lib/gitlab/database/batch_count_spec.rb
@@ -270,6 +270,8 @@ RSpec.describe Gitlab::Database::BatchCount do
end
it "defaults the batch size to #{Gitlab::Database::BatchCounter::DEFAULT_DISTINCT_BATCH_SIZE}" do
+ stub_feature_flags(loose_index_scan_for_distinct_values: false)
+
min_id = model.minimum(:id)
relation = instance_double(ActiveRecord::Relation)
allow(model).to receive_message_chain(:select, public_send: relation)
@@ -315,13 +317,85 @@ RSpec.describe Gitlab::Database::BatchCount do
end
end
- it_behaves_like 'when batch fetch query is canceled' do
+ context 'when the loose_index_scan_for_distinct_values feature flag is off' do
+ it_behaves_like 'when batch fetch query is canceled' do
+ let(:mode) { :distinct }
+ let(:operation) { :count }
+ let(:operation_args) { nil }
+ let(:column) { nil }
+
+ subject { described_class.method(:batch_distinct_count) }
+
+ before do
+ stub_feature_flags(loose_index_scan_for_distinct_values: false)
+ end
+ end
+ end
+
+ context 'when the loose_index_scan_for_distinct_values feature flag is on' do
let(:mode) { :distinct }
let(:operation) { :count }
let(:operation_args) { nil }
let(:column) { nil }
+ let(:batch_size) { 10_000 }
+
subject { described_class.method(:batch_distinct_count) }
+
+ before do
+ stub_feature_flags(loose_index_scan_for_distinct_values: true)
+ end
+
+ it 'reduces batch size by half and retry fetch' do
+ too_big_batch_relation_mock = instance_double(ActiveRecord::Relation)
+
+ count_method = double(send: 1)
+
+ allow(too_big_batch_relation_mock).to receive(:send).and_raise(ActiveRecord::QueryCanceled)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: 0, to: batch_size).and_return(too_big_batch_relation_mock)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: 0, to: batch_size / 2).and_return(count_method)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).with(from: batch_size / 2, to: batch_size).and_return(count_method)
+
+ subject.call(model, column, batch_size: batch_size, start: 0, finish: batch_size - 1)
+ end
+
+ context 'when all retries fail' do
+ let(:batch_count_query) { 'SELECT COUNT(id) FROM relation WHERE id BETWEEN 0 and 1' }
+
+ before do
+ relation = instance_double(ActiveRecord::Relation)
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive_message_chain(:new, :build_query).and_return(relation)
+ allow(relation).to receive(:send).and_raise(ActiveRecord::QueryCanceled.new('query timed out'))
+ allow(relation).to receive(:to_sql).and_return(batch_count_query)
+ end
+
+ it 'logs failing query' do
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(
+ event: 'batch_count',
+ relation: model.table_name,
+ operation: operation,
+ operation_args: operation_args,
+ start: 0,
+ mode: mode,
+ query: batch_count_query,
+ message: 'Query has been canceled with message: query timed out'
+ )
+ expect(subject.call(model, column, batch_size: batch_size, start: 0)).to eq(-1)
+ end
+ end
+
+ context 'when LooseIndexScanDistinctCount raises error' do
+ let(:column) { :creator_id }
+ let(:error_class) { Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError }
+
+ it 'rescues ColumnConfigurationError' do
+ allow(Gitlab::Database::LooseIndexScanDistinctCount).to receive(:new).and_raise(error_class.new('error message'))
+
+ expect(Gitlab::AppJsonLogger).to receive(:error).with(a_hash_including(message: 'LooseIndexScanDistinctCount column error: error message'))
+
+ expect(subject.call(Project, column, batch_size: 10_000, start: 0)).to eq(-1)
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb b/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb
new file mode 100644
index 00000000000..e0eac26e4d9
--- /dev/null
+++ b/spec/lib/gitlab/database/loose_index_scan_distinct_count_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::LooseIndexScanDistinctCount do
+ context 'counting distinct users' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:other_user) { create(:user) }
+
+ let(:column) { :creator_id }
+
+ before_all do
+ create_list(:project, 3, creator: user)
+ create_list(:project, 1, creator: other_user)
+ end
+
+ subject(:count) { described_class.new(Project, :creator_id).count(from: Project.minimum(:creator_id), to: Project.maximum(:creator_id) + 1) }
+
+ it { is_expected.to eq(2) }
+
+ context 'when STI model is queried' do
+ it 'does not raise error' do
+ expect { described_class.new(Group, :owner_id).count(from: 0, to: 1) }.not_to raise_error
+ end
+ end
+
+ context 'when model with default_scope is queried' do
+ it 'does not raise error' do
+ expect { described_class.new(GroupMember, :id).count(from: 0, to: 1) }.not_to raise_error
+ end
+ end
+
+ context 'when the fully qualified column is given' do
+ let(:column) { 'projects.creator_id' }
+
+ it { is_expected.to eq(2) }
+ end
+
+ context 'when AR attribute is given' do
+ let(:column) { Project.arel_table[:creator_id] }
+
+ it { is_expected.to eq(2) }
+ end
+
+ context 'when invalid value is given for the column' do
+ let(:column) { Class.new }
+
+ it { expect { described_class.new(Group, column) }.to raise_error(Gitlab::Database::LooseIndexScanDistinctCount::ColumnConfigurationError) }
+ end
+
+ context 'when null values are present' do
+ before do
+ create_list(:project, 2).each { |p| p.update_column(:creator_id, nil) }
+ end
+
+ it { is_expected.to eq(2) }
+ end
+ end
+
+ context 'counting STI models' do
+ let!(:groups) { create_list(:group, 3) }
+ let!(:namespaces) { create_list(:namespace, 2) }
+
+ let(:max_id) { Namespace.maximum(:id) + 1 }
+
+ it 'counts groups' do
+ count = described_class.new(Group, :id).count(from: 0, to: max_id)
+ expect(count).to eq(3)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 9178707a3d0..44293086e79 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -835,7 +835,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:check_trigger_permissions!).with(:users)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"new"')
+ .with(:users, :old, :new)
expect(model).to receive(:add_column)
.with(:users, :new, :integer,
@@ -860,14 +860,18 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
context 'with existing records and type casting' do
let(:trigger_name) { model.rename_trigger_name(:users, :id, :new) }
let(:user) { create(:user) }
+ let(:copy_trigger) { double('copy trigger') }
+
+ before do
+ expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
+ .with(:users).and_return(copy_trigger)
+ end
it 'copies the value to the new column using the type_cast_function', :aggregate_failures do
expect(model).to receive(:copy_indexes).with(:users, :id, :new)
expect(model).to receive(:add_not_null_constraint).with(:users, :new)
expect(model).to receive(:execute).with("UPDATE \"users\" SET \"new\" = cast_to_jsonb_with_default(\"users\".\"id\") WHERE \"users\".\"id\" >= #{user.id}")
- expect(model).to receive(:execute).with("DROP TRIGGER IF EXISTS #{trigger_name}\nON \"users\"\n")
- expect(model).to receive(:execute).with("CREATE TRIGGER #{trigger_name}\nBEFORE INSERT OR UPDATE\nON \"users\"\nFOR EACH ROW\nEXECUTE FUNCTION #{trigger_name}()\n")
- expect(model).to receive(:execute).with("CREATE OR REPLACE FUNCTION #{trigger_name}()\nRETURNS trigger AS\n$BODY$\nBEGIN\n NEW.\"new\" := NEW.\"id\";\n RETURN NEW;\nEND;\n$BODY$\nLANGUAGE 'plpgsql'\nVOLATILE\n")
+ expect(copy_trigger).to receive(:create).with(:id, :new, trigger_name: nil)
model.rename_column_concurrently(:users, :id, :new, type_cast_function: 'cast_to_jsonb_with_default')
end
@@ -996,7 +1000,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:check_trigger_permissions!).with(:users)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"new"')
+ .with(:users, :old, :new)
expect(model).to receive(:add_column)
.with(:users, :old, :integer,
@@ -1156,7 +1160,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.with(:users, temp_undo_cleanup_column, :old)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+ .with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(:users, :old, :string)
end
@@ -1182,7 +1186,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
.with(:users, temp_undo_cleanup_column, :old)
expect(model).to receive(:install_rename_triggers_for_postgresql)
- .with(trigger_name, '"users"', '"old"', '"old_for_type_change"')
+ .with(:users, :old, 'old_for_type_change')
model.undo_cleanup_concurrent_column_type_change(
:users,
@@ -1204,28 +1208,25 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#install_rename_triggers_for_postgresql' do
it 'installs the triggers for PostgreSQL' do
- expect(model).to receive(:execute)
- .with(/CREATE OR REPLACE FUNCTION foo()/m)
+ copy_trigger = double('copy trigger')
- expect(model).to receive(:execute)
- .with(/DROP TRIGGER IF EXISTS foo/m)
+ expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
+ .with(:users).and_return(copy_trigger)
- expect(model).to receive(:execute)
- .with(/CREATE TRIGGER foo/m)
+ expect(copy_trigger).to receive(:create).with(:old, :new, trigger_name: 'foo')
- model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
- end
-
- it 'does not fail if trigger already exists' do
- model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
- model.install_rename_triggers_for_postgresql('foo', :users, :old, :new)
+ model.install_rename_triggers_for_postgresql(:users, :old, :new, trigger_name: 'foo')
end
end
describe '#remove_rename_triggers_for_postgresql' do
it 'removes the function and trigger' do
- expect(model).to receive(:execute).with('DROP TRIGGER IF EXISTS foo ON bar')
- expect(model).to receive(:execute).with('DROP FUNCTION IF EXISTS foo()')
+ copy_trigger = double('copy trigger')
+
+ expect(Gitlab::Database::UnidirectionalCopyTrigger).to receive(:on_table)
+ .with('bar').and_return(copy_trigger)
+
+ expect(copy_trigger).to receive(:drop).with('foo')
model.remove_rename_triggers_for_postgresql('bar', 'foo')
end
@@ -1702,65 +1703,171 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
describe '#initialize_conversion_of_integer_to_bigint' do
- let(:user) { create(:user) }
- let(:project) { create(:project, :repository) }
- let(:issue) { create(:issue, project: project) }
- let!(:event) do
- create(:event, :created, project: project, target: issue, author: user)
+ let(:table) { :test_table }
+ let(:column) { :id }
+ let(:tmp_column) { "#{column}_convert_to_bigint" }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer :id, primary_key: true
+ t.integer :non_nullable_column, null: false
+ t.integer :nullable_column
+ t.timestamps
+ end
end
- context 'in a transaction' do
- it 'raises RuntimeError' do
- allow(model).to receive(:transaction_open?).and_return(true)
+ context 'when the target table does not exist' do
+ it 'raises an error' do
+ expect { model.initialize_conversion_of_integer_to_bigint(:this_table_is_not_real, column) }
+ .to raise_error('Table this_table_is_not_real does not exist')
+ end
+ end
- expect { model.initialize_conversion_of_integer_to_bigint(:events, :id) }
- .to raise_error(RuntimeError)
+ context 'when the primary key does not exist' do
+ it 'raises an error' do
+ expect { model.initialize_conversion_of_integer_to_bigint(table, column, primary_key: :foobar) }
+ .to raise_error("Column foobar does not exist on #{table}")
end
end
- context 'outside a transaction' do
- before do
- allow(model).to receive(:transaction_open?).and_return(false)
+ context 'when the column to convert does not exist' do
+ let(:column) { :foobar }
+
+ it 'raises an error' do
+ expect { model.initialize_conversion_of_integer_to_bigint(table, column) }
+ .to raise_error("Column #{column} does not exist on #{table}")
end
+ end
- it 'creates a bigint column and starts backfilling it' do
- expect(model)
- .to receive(:add_column)
- .with(
- :events,
- 'id_convert_to_bigint',
- :bigint,
- default: 0,
- null: false
- )
+ context 'when the column to convert is the primary key' do
+ it 'creates a not-null bigint column and installs triggers' do
+ expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
- expect(model)
- .to receive(:install_rename_triggers)
- .with(:events, :id, 'id_convert_to_bigint')
+ expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
- expect(model).to receive(:queue_background_migration_jobs_by_range_at_intervals).and_call_original
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+ end
+ end
- expect(BackgroundMigrationWorker)
- .to receive(:perform_in)
- .ordered
- .with(
- 2.minutes,
- 'CopyColumnUsingBackgroundMigrationJob',
- [event.id, event.id, :events, :id, 100, :id, 'id_convert_to_bigint']
- )
+ context 'when the column to convert is not the primary key, but non-nullable' do
+ let(:column) { :non_nullable_column }
+
+ it 'creates a not-null bigint column and installs triggers' do
+ expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: 0, null: false)
+
+ expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+ end
+ end
+
+ context 'when the column to convert is not the primary key, but nullable' do
+ let(:column) { :nullable_column }
+
+ it 'creates a nullable bigint column and installs triggers' do
+ expect(model).to receive(:add_column).with(table, tmp_column, :bigint, default: nil)
+
+ expect(model).to receive(:install_rename_triggers).with(table, column, tmp_column)
+
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+ end
+ end
+ end
+
+ describe '#backfill_conversion_of_integer_to_bigint' do
+ let(:table) { :_test_backfill_table }
+ let(:column) { :id }
+ let(:tmp_column) { "#{column}_convert_to_bigint" }
+
+ before do
+ model.create_table table, id: false do |t|
+ t.integer :id, primary_key: true
+ t.text :message, null: false
+ t.timestamps
+ end
- expect(Gitlab::BackgroundMigration)
- .to receive(:steal)
- .ordered
- .with('CopyColumnUsingBackgroundMigrationJob')
+ allow(model).to receive(:perform_background_migration_inline?).and_return(false)
+ end
- model.initialize_conversion_of_integer_to_bigint(
- :events,
- :id,
- batch_size: 300,
- sub_batch_size: 100
+ context 'when the target table does not exist' do
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(:this_table_is_not_real, column) }
+ .to raise_error('Table this_table_is_not_real does not exist')
+ end
+ end
+
+ context 'when the primary key does not exist' do
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(table, column, primary_key: :foobar) }
+ .to raise_error("Column foobar does not exist on #{table}")
+ end
+ end
+
+ context 'when the column to convert does not exist' do
+ let(:column) { :foobar }
+
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
+ .to raise_error("Column #{column} does not exist on #{table}")
+ end
+ end
+
+ context 'when the temporary column does not exist' do
+ it 'raises an error' do
+ expect { model.backfill_conversion_of_integer_to_bigint(table, column) }
+ .to raise_error('The temporary column does not exist, initialize it with `initialize_conversion_of_integer_to_bigint`')
+ end
+ end
+
+ context 'when the conversion is properly initialized' do
+ let(:model_class) do
+ Class.new(ActiveRecord::Base) do
+ self.table_name = :_test_backfill_table
+ end
+ end
+
+ let(:migration_relation) { Gitlab::Database::BackgroundMigration::BatchedMigration.active }
+
+ before do
+ model.initialize_conversion_of_integer_to_bigint(table, column)
+
+ model_class.create!(message: 'hello')
+ model_class.create!(message: 'so long')
+ end
+
+ it 'creates the batched migration tracking record' do
+ last_record = model_class.create!(message: 'goodbye')
+
+ expect do
+ model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ end.to change { migration_relation.count }.by(1)
+
+ expect(migration_relation.last).to have_attributes(
+ job_class_name: 'CopyColumnUsingBackgroundMigrationJob',
+ table_name: table.to_s,
+ column_name: column.to_s,
+ min_value: 1,
+ max_value: last_record.id,
+ interval: 120,
+ batch_size: 2,
+ sub_batch_size: 1,
+ job_arguments: [column.to_s, "#{column}_convert_to_bigint"]
)
end
+
+ context 'when the migration should be performed inline' do
+ it 'calls the runner to run the entire migration' do
+ expect(model).to receive(:perform_background_migration_inline?).and_return(true)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |scheduler|
+ expect(scheduler).to receive(:run_entire_migration) do |batched_migration|
+ expect(batched_migration).to eq(migration_relation.last)
+ end
+ end
+
+ model.backfill_conversion_of_integer_to_bigint(table, column, batch_size: 2, sub_batch_size: 1)
+ end
+ end
end
end
@@ -1910,9 +2017,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
-
- project
+ projects.create!(namespace_id: namespace.id)
end
it 'generates iids properly for models created after the migration' do
diff --git a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
index e25e4af2e86..c6d456964cf 100644
--- a/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migrations/background_migration_helpers_spec.rb
@@ -263,7 +263,15 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
end
describe '#queue_batched_background_migration' do
+ let(:pgclass_info) { instance_double('Gitlab::Database::PgClass', cardinality_estimate: 42) }
+
+ before do
+ allow(Gitlab::Database::PgClass).to receive(:for_table).and_call_original
+ end
+
it 'creates the database record for the migration' do
+ expect(Gitlab::Database::PgClass).to receive(:for_table).with(:projects).and_return(pgclass_info)
+
expect do
model.queue_batched_background_migration(
'MyJobClass',
@@ -288,7 +296,8 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
batch_size: 100,
sub_batch_size: 10,
job_arguments: %w[],
- status: 'active')
+ status: 'active',
+ total_tuple_count: pgclass_info.cardinality_estimate)
end
context 'when the job interval is lower than the minimum' do
@@ -431,4 +440,21 @@ RSpec.describe Gitlab::Database::Migrations::BackgroundMigrationHelpers do
model.bulk_migrate_in(10.minutes, [%w(Class hello world)])
end
end
+
+ describe '#delete_queued_jobs' do
+ let(:job1) { double }
+ let(:job2) { double }
+
+ it 'deletes all queued jobs for the given background migration' do
+ expect(Gitlab::BackgroundMigration).to receive(:steal).with('BackgroundMigrationClassName') do |&block|
+ expect(block.call(job1)).to be(false)
+ expect(block.call(job2)).to be(false)
+ end
+
+ expect(job1).to receive(:delete)
+ expect(job2).to receive(:delete)
+
+ model.delete_queued_jobs('BackgroundMigrationClassName')
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
index b5d741fc5e9..5b2a29d1d2d 100644
--- a/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_migration_helpers/table_management_helpers_spec.rb
@@ -704,6 +704,72 @@ RSpec.describe Gitlab::Database::PartitioningMigrationHelpers::TableManagementHe
end
end
+ describe '#drop_nonpartitioned_archive_table' do
+ subject { migration.drop_nonpartitioned_archive_table source_table }
+
+ let(:archived_table) { "#{source_table}_archived" }
+
+ before do
+ migration.partition_table_by_date source_table, partition_column, min_date: min_date, max_date: max_date
+ migration.replace_with_partitioned_table source_table
+ end
+
+ it 'drops the archive table' do
+ expect(table_type(archived_table)).to eq('normal')
+
+ subject
+
+ expect(table_type(archived_table)).to eq(nil)
+ end
+
+ it 'drops the trigger on the source table' do
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+
+ subject
+
+ expect_trigger_not_to_exist(source_table, trigger_name)
+ end
+
+ it 'drops the sync function' do
+ expect_function_to_exist(function_name)
+
+ subject
+
+ expect_function_not_to_exist(function_name)
+ end
+ end
+
+ describe '#create_trigger_to_sync_tables' do
+ subject { migration.create_trigger_to_sync_tables(source_table, target_table, :id) }
+
+ let(:target_table) { "#{source_table}_copy" }
+
+ before do
+ migration.create_table target_table do |t|
+ t.string :name, null: false
+ t.integer :age, null: false
+ t.datetime partition_column
+ t.datetime :updated_at
+ end
+ end
+
+ it 'creates the sync function' do
+ expect_function_not_to_exist(function_name)
+
+ subject
+
+ expect_function_to_exist(function_name)
+ end
+
+ it 'installs the trigger' do
+ expect_trigger_not_to_exist(source_table, trigger_name)
+
+ subject
+
+ expect_valid_function_trigger(source_table, trigger_name, function_name, after: %w[delete insert update])
+ end
+ end
+
def filter_columns_by_name(columns, names)
columns.reject { |c| names.include?(c.name) }
end
diff --git a/spec/lib/gitlab/database/pg_class_spec.rb b/spec/lib/gitlab/database/pg_class_spec.rb
new file mode 100644
index 00000000000..83b50415a6c
--- /dev/null
+++ b/spec/lib/gitlab/database/pg_class_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::PgClass, type: :model do
+ describe '#cardinality_estimate' do
+ context 'when no information is available' do
+ subject { described_class.new(reltuples: 0.0).cardinality_estimate }
+
+ it 'returns nil for the estimate' do
+ expect(subject).to be_nil
+ end
+ end
+
+ context 'with reltuples available' do
+ subject { described_class.new(reltuples: 42.0).cardinality_estimate }
+
+ it 'returns the reltuples for the estimate' do
+ expect(subject).to eq(42)
+ end
+ end
+ end
+
+ describe '.for_table' do
+ let(:relname) { :projects }
+
+ subject { described_class.for_table(relname) }
+
+ it 'returns PgClass for this table' do
+ expect(subject).to be_a(described_class)
+ end
+
+ it 'matches the relname' do
+ expect(subject.relname).to eq(relname.to_s)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
index 757da2d9092..1edcd890370 100644
--- a/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
+++ b/spec/lib/gitlab/database/rename_reserved_paths_migration/v1/rename_base_spec.rb
@@ -246,7 +246,8 @@ RSpec.describe Gitlab::Database::RenameReservedPathsMigration::V1::RenameBase, :
subject.track_rename('namespace', 'path/to/namespace', 'path/to/renamed')
- old_path, new_path = [nil, nil]
+ old_path = nil
+ new_path = nil
Gitlab::Redis::SharedState.with do |redis|
rename_info = redis.lpop(key)
old_path, new_path = Gitlab::Json.parse(rename_info)
diff --git a/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb
new file mode 100644
index 00000000000..2955c208f16
--- /dev/null
+++ b/spec/lib/gitlab/database/unidirectional_copy_trigger_spec.rb
@@ -0,0 +1,191 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::UnidirectionalCopyTrigger do
+ include Database::TriggerHelpers
+
+ let(:table_name) { '_test_table' }
+ let(:connection) { ActiveRecord::Base.connection }
+ let(:copy_trigger) { described_class.on_table(table_name) }
+
+ describe '#name' do
+ context 'when a single column name is given' do
+ subject(:trigger_name) { copy_trigger.name('id', 'other_id') }
+
+ it 'returns the trigger name' do
+ expect(trigger_name).to eq('trigger_cfce7a56a9d6')
+ end
+ end
+
+ context 'when multiple column names are given' do
+ subject(:trigger_name) { copy_trigger.name(%w[id fk_id], %w[other_id other_fk_id]) }
+
+ it 'returns the trigger name' do
+ expect(trigger_name).to eq('trigger_166626e51481')
+ end
+ end
+
+ context 'when a different number of new and old column names are given' do
+ it 'raises an error' do
+ expect do
+ copy_trigger.name(%w[id fk_id], %w[other_id])
+ end.to raise_error(ArgumentError, 'number of source and destination columns must match')
+ end
+ end
+ end
+
+ describe '#create' do
+ let(:model) { Class.new(ActiveRecord::Base) }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ other_id integer,
+ fk_id bigint,
+ other_fk_id bigint);
+ SQL
+
+ model.table_name = table_name
+ end
+
+ context 'when a single column name is given' do
+ let(:trigger_name) { 'trigger_cfce7a56a9d6' }
+
+ it 'creates the trigger and function' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create('id', 'other_id')
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+
+ it 'properly copies the column data using the trigger function' do
+ copy_trigger.create('id', 'other_id')
+
+ record = model.create!(id: 10)
+ expect(record.reload).to have_attributes(other_id: 10)
+
+ record.update!({ id: 20 })
+ expect(record.reload).to have_attributes(other_id: 20)
+ end
+ end
+
+ context 'when multiple column names are given' do
+ let(:trigger_name) { 'trigger_166626e51481' }
+
+ it 'creates the trigger and function to set all the columns' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create(%w[id fk_id], %w[other_id other_fk_id])
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+
+ it 'properly copies the columns using the trigger function' do
+ copy_trigger.create(%w[id fk_id], %w[other_id other_fk_id])
+
+ record = model.create!(id: 10, fk_id: 20)
+ expect(record.reload).to have_attributes(other_id: 10, other_fk_id: 20)
+
+ record.update!(id: 30, fk_id: 50)
+ expect(record.reload).to have_attributes(other_id: 30, other_fk_id: 50)
+ end
+ end
+
+ context 'when a custom trigger name is given' do
+ let(:trigger_name) { '_test_trigger' }
+
+ it 'creates the trigger and function with the custom name' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create('id', 'other_id', trigger_name: trigger_name)
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+ end
+
+ context 'when the trigger function already exists' do
+ let(:trigger_name) { 'trigger_cfce7a56a9d6' }
+
+ it 'does not raise an error' do
+ expect_function_not_to_exist(trigger_name)
+ expect_trigger_not_to_exist(table_name, trigger_name)
+
+ copy_trigger.create('id', 'other_id')
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+
+ copy_trigger.create('id', 'other_id')
+
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+ end
+ end
+
+ context 'when a different number of new and old column names are given' do
+ it 'raises an error' do
+ expect do
+ copy_trigger.create(%w[id fk_id], %w[other_id])
+ end.to raise_error(ArgumentError, 'number of source and destination columns must match')
+ end
+ end
+ end
+
+ describe '#drop' do
+ let(:trigger_name) { '_test_trigger' }
+
+ before do
+ connection.execute(<<~SQL)
+ CREATE TABLE #{table_name} (
+ id serial NOT NULL PRIMARY KEY,
+ other_id integer NOT NULL);
+
+ CREATE FUNCTION #{trigger_name}()
+ RETURNS trigger
+ LANGUAGE plpgsql AS
+ $$
+ BEGIN
+ RAISE NOTICE 'hello';
+ RETURN NEW;
+ END
+ $$;
+
+ CREATE TRIGGER #{trigger_name}
+ BEFORE INSERT OR UPDATE
+ ON #{table_name}
+ FOR EACH ROW
+ EXECUTE FUNCTION #{trigger_name}();
+ SQL
+ end
+
+ it 'drops the trigger and function for the given arguments' do
+ expect_function_to_exist(trigger_name)
+ expect_valid_function_trigger(table_name, trigger_name, trigger_name, before: %w[insert update])
+
+ copy_trigger.drop(trigger_name)
+
+ expect_trigger_not_to_exist(table_name, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+ end
+
+ context 'when the trigger does not exist' do
+ it 'does not raise an error' do
+ copy_trigger.drop(trigger_name)
+
+ expect_trigger_not_to_exist(table_name, trigger_name)
+ expect_function_not_to_exist(trigger_name)
+
+ copy_trigger.drop(trigger_name)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
index 39029322e25..e70b34d6557 100644
--- a/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
+++ b/spec/lib/gitlab/database_importers/instance_administrators/create_group_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Gitlab::DatabaseImporters::InstanceAdministrators::CreateGroup do
end
end
- context 'with application settings and admin users' do
+ context 'with application settings and admin users', :do_not_mock_admin_mode_setting do
let(:group) { result[:group] }
let(:application_setting) { Gitlab::CurrentSettings.current_application_settings }
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 1553a989dba..b735ac7940b 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -407,13 +407,13 @@ RSpec.describe Gitlab::Database do
expect(described_class.db_read_only?).to be_truthy
end
- it 'detects a read write database' do
+ it 'detects a read-write database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => "f" }])
expect(described_class.db_read_only?).to be_falsey
end
- it 'detects a read write database' do
+ it 'detects a read-write database' do
allow(ActiveRecord::Base.connection).to receive(:execute).with('SELECT pg_is_in_recovery()').and_return([{ "pg_is_in_recovery" => false }])
expect(described_class.db_read_only?).to be_falsey
diff --git a/spec/lib/gitlab/diff/char_diff_spec.rb b/spec/lib/gitlab/diff/char_diff_spec.rb
index e4e2a3ba050..d38008c16f2 100644
--- a/spec/lib/gitlab/diff/char_diff_spec.rb
+++ b/spec/lib/gitlab/diff/char_diff_spec.rb
@@ -49,15 +49,15 @@ RSpec.describe Gitlab::Diff::CharDiff do
old_diffs, new_diffs = subject
expect(old_diffs).to eq([])
- expect(new_diffs).to eq([0..12])
+ expect(new_diffs).to eq([Gitlab::MarkerRange.new(0, 12, mode: :addition)])
end
end
it 'returns ranges of changes' do
old_diffs, new_diffs = subject
- expect(old_diffs).to eq([11..11])
- expect(new_diffs).to eq([3..3])
+ expect(old_diffs).to eq([Gitlab::MarkerRange.new(11, 11, mode: :deletion)])
+ expect(new_diffs).to eq([Gitlab::MarkerRange.new(3, 3, mode: :addition)])
end
end
diff --git a/spec/lib/gitlab/diff/highlight_cache_spec.rb b/spec/lib/gitlab/diff/highlight_cache_spec.rb
index d26bc5fc9a8..4c56911e665 100644
--- a/spec/lib/gitlab/diff/highlight_cache_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_cache_spec.rb
@@ -238,16 +238,36 @@ RSpec.describe Gitlab::Diff::HighlightCache, :clean_gitlab_redis_cache do
subject { cache.key }
it 'returns cache key' do
- is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true")
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:true:true")
end
- context 'when feature flag is disabled' do
+ context 'when the `introduce_marker_ranges` feature flag is disabled' do
before do
stub_feature_flags(introduce_marker_ranges: false)
end
it 'returns the original version of the cache' do
- is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:false")
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:false:true:true")
+ end
+ end
+
+ context 'when the `use_marker_ranges` feature flag is disabled' do
+ before do
+ stub_feature_flags(use_marker_ranges: false)
+ end
+
+ it 'returns the original version of the cache' do
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:false:true")
+ end
+ end
+
+ context 'when the `diff_line_syntax_highlighting` feature flag is disabled' do
+ before do
+ stub_feature_flags(diff_line_syntax_highlighting: false)
+ end
+
+ it 'returns the original version of the cache' do
+ is_expected.to eq("highlighted-diff-files:#{cache.diffable.cache_key}:2:#{cache.diff_options}:true:true:false")
end
end
end
diff --git a/spec/lib/gitlab/diff/highlight_spec.rb b/spec/lib/gitlab/diff/highlight_spec.rb
index e613674af3a..32ca6e4fde6 100644
--- a/spec/lib/gitlab/diff/highlight_spec.rb
+++ b/spec/lib/gitlab/diff/highlight_spec.rb
@@ -65,6 +65,14 @@ RSpec.describe Gitlab::Diff::Highlight do
expect(subject[5].rich_text).to eq(code)
end
+
+ context 'when use_marker_ranges feature flag is false too' do
+ it 'does not affect the result' do
+ code = %Q{+<span id="LC9" class="line" lang="ruby"> <span class="k">raise</span> <span class="no"><span class="idiff left">RuntimeError</span></span><span class="p"><span class="idiff">,</span></span><span class="idiff right"> </span><span class="s2">"System commands must be given as an array of strings"</span></span>\n}
+
+ expect(subject[5].rich_text).to eq(code)
+ end
+ end
end
context 'when no diff_refs' do
@@ -132,6 +140,18 @@ RSpec.describe Gitlab::Diff::Highlight do
end
end
+ context 'when `use_marker_ranges` feature flag is disabled' do
+ it 'returns the same result' do
+ with_feature_flag = described_class.new(diff_file, repository: project.repository).highlight
+
+ stub_feature_flags(use_marker_ranges: false)
+
+ without_feature_flag = described_class.new(diff_file, repository: project.repository).highlight
+
+ expect(with_feature_flag.map(&:rich_text)).to eq(without_feature_flag.map(&:rich_text))
+ end
+ end
+
context 'when no inline diffs' do
it_behaves_like 'without inline diffs'
end
diff --git a/spec/lib/gitlab/diff/inline_diff_spec.rb b/spec/lib/gitlab/diff/inline_diff_spec.rb
index 714b5d813c4..d7b50eb73ee 100644
--- a/spec/lib/gitlab/diff/inline_diff_spec.rb
+++ b/spec/lib/gitlab/diff/inline_diff_spec.rb
@@ -3,68 +3,30 @@
require 'spec_helper'
RSpec.describe Gitlab::Diff::InlineDiff do
- describe '.for_lines' do
- let(:diff) do
- <<-EOF.strip_heredoc
- class Test
- - def initialize(test = true)
- + def initialize(test = false)
- @test = test
- - if true
- - @foo = "bar"
- + unless false
- + @foo = "baz"
- end
- end
- end
- EOF
- end
-
- let(:subject) { described_class.for_lines(diff.lines) }
+ describe '#inline_diffs' do
+ subject { described_class.new(old_line, new_line, offset: offset).inline_diffs }
- it 'finds all inline diffs' do
- expect(subject[0]).to be_nil
- expect(subject[1]).to eq([25..27])
- expect(subject[2]).to eq([25..28])
- expect(subject[3]).to be_nil
- expect(subject[4]).to eq([5..10])
- expect(subject[5]).to eq([17..17])
- expect(subject[6]).to eq([5..15])
- expect(subject[7]).to eq([17..17])
- expect(subject[8]).to be_nil
- end
+ let(:old_line) { 'XXX def initialize(test = true)' }
+ let(:new_line) { 'YYY def initialize(test = false)' }
+ let(:offset) { 3 }
- it 'can handle unchanged empty lines' do
- expect { described_class.for_lines(['- bar', '+ baz', '']) }.not_to raise_error
+ it 'finds the inline diff', :aggregate_failures do
+ expect(subject[0]).to eq([Gitlab::MarkerRange.new(26, 28, mode: :deletion)])
+ expect(subject[1]).to eq([Gitlab::MarkerRange.new(26, 29, mode: :addition)])
end
context 'when lines have multiple changes' do
- let(:diff) do
- <<~EOF
- - Hello, how are you?
- + Hi, how are you doing?
- EOF
- end
-
- let(:subject) { described_class.for_lines(diff.lines) }
-
- it 'finds all inline diffs' do
- expect(subject[0]).to eq([3..6])
- expect(subject[1]).to eq([3..3, 17..22])
+ let(:old_line) { '- Hello, how are you?' }
+ let(:new_line) { '+ Hi, how are you doing?' }
+ let(:offset) { 1 }
+
+ it 'finds all inline diffs', :aggregate_failures do
+ expect(subject[0]).to eq([Gitlab::MarkerRange.new(3, 6, mode: :deletion)])
+ expect(subject[1]).to eq([
+ Gitlab::MarkerRange.new(3, 3, mode: :addition),
+ Gitlab::MarkerRange.new(17, 22, mode: :addition)
+ ])
end
end
end
-
- describe "#inline_diffs" do
- let(:old_line) { "XXX def initialize(test = true)" }
- let(:new_line) { "YYY def initialize(test = false)" }
- let(:subject) { described_class.new(old_line, new_line, offset: 3).inline_diffs }
-
- it "finds the inline diff" do
- old_diffs, new_diffs = subject
-
- expect(old_diffs).to eq([26..28])
- expect(new_diffs).to eq([26..29])
- end
- end
end
diff --git a/spec/lib/gitlab/diff/line_spec.rb b/spec/lib/gitlab/diff/line_spec.rb
index e10a50afde9..949def599ae 100644
--- a/spec/lib/gitlab/diff/line_spec.rb
+++ b/spec/lib/gitlab/diff/line_spec.rb
@@ -17,6 +17,8 @@ RSpec.describe Gitlab::Diff::Line do
rich_text: rich_text)
end
+ let(:rich_text) { nil }
+
describe '.init_from_hash' do
let(:rich_text) { '&lt;input&gt;' }
@@ -43,6 +45,29 @@ RSpec.describe Gitlab::Diff::Line do
end
end
+ describe '#text' do
+ let(:line) { described_class.new(raw_diff, 'new', 0, 0, 0) }
+ let(:raw_diff) { '+Hello' }
+
+ it 'returns raw diff text' do
+ expect(line.text).to eq('+Hello')
+ end
+
+ context 'when prefix is disabled' do
+ it 'returns raw diff text without prefix' do
+ expect(line.text(prefix: false)).to eq('Hello')
+ end
+
+ context 'when diff is empty' do
+ let(:raw_diff) { '' }
+
+ it 'returns an empty raw diff' do
+ expect(line.text(prefix: false)).to eq('')
+ end
+ end
+ end
+ end
+
context "when setting rich text" do
it 'escapes any HTML special characters in the diff chunk header' do
subject = described_class.new("<input>", "", 0, 0, 0)
@@ -51,4 +76,14 @@ RSpec.describe Gitlab::Diff::Line do
expect(line[:rich_text]).to eq("&lt;input&gt;")
end
end
+
+ describe '#set_marker_ranges' do
+ let(:marker_ranges) { [Gitlab::MarkerRange.new(1, 10, mode: :deletion)] }
+
+ it 'stores MarkerRanges in Diff::Line object' do
+ line.set_marker_ranges(marker_ranges)
+
+ expect(line.marker_ranges).to eq(marker_ranges)
+ end
+ end
end
diff --git a/spec/lib/gitlab/diff/lines_unfolder_spec.rb b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
index 4163c0eced5..8385cba3532 100644
--- a/spec/lib/gitlab/diff/lines_unfolder_spec.rb
+++ b/spec/lib/gitlab/diff/lines_unfolder_spec.rb
@@ -302,7 +302,8 @@ RSpec.describe Gitlab::Diff::LinesUnfolder do
new_diff_lines = subject.unfolded_diff_lines
new_diff_lines.each_with_index do |line, i|
- old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1]
+ old_pos = expected_diff_lines[i][0]
+ new_pos = expected_diff_lines[i][1]
unless line.type == 'match'
expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos))
@@ -396,7 +397,8 @@ RSpec.describe Gitlab::Diff::LinesUnfolder do
new_diff_lines = subject.unfolded_diff_lines
new_diff_lines.each_with_index do |line, i|
- old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1]
+ old_pos = expected_diff_lines[i][0]
+ new_pos = expected_diff_lines[i][1]
unless line.type == 'match'
expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos))
@@ -490,7 +492,8 @@ RSpec.describe Gitlab::Diff::LinesUnfolder do
new_diff_lines = subject.unfolded_diff_lines
new_diff_lines.each_with_index do |line, i|
- old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1]
+ old_pos = expected_diff_lines[i][0]
+ new_pos = expected_diff_lines[i][1]
unless line.type == 'match'
expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos))
@@ -581,7 +584,8 @@ RSpec.describe Gitlab::Diff::LinesUnfolder do
new_diff_lines = subject.unfolded_diff_lines
new_diff_lines.each_with_index do |line, i|
- old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1]
+ old_pos = expected_diff_lines[i][0]
+ new_pos = expected_diff_lines[i][1]
unless line.type == 'match'
expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos))
@@ -691,7 +695,8 @@ RSpec.describe Gitlab::Diff::LinesUnfolder do
new_diff_lines = subject.unfolded_diff_lines
new_diff_lines.each_with_index do |line, i|
- old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1]
+ old_pos = expected_diff_lines[i][0]
+ new_pos = expected_diff_lines[i][1]
unless line.type == 'match'
expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos))
@@ -783,7 +788,8 @@ RSpec.describe Gitlab::Diff::LinesUnfolder do
new_diff_lines = subject.unfolded_diff_lines
new_diff_lines.each_with_index do |line, i|
- old_pos, new_pos = expected_diff_lines[i][0], expected_diff_lines[i][1]
+ old_pos = expected_diff_lines[i][0]
+ new_pos = expected_diff_lines[i][1]
unless line.type == 'match'
expect(line.line_code).to eq(Gitlab::Git.diff_line_code(diff_file.file_path, new_pos, old_pos))
diff --git a/spec/lib/gitlab/diff/suggestions_parser_spec.rb b/spec/lib/gitlab/diff/suggestions_parser_spec.rb
index 5efce414dc8..a00c55d4fb2 100644
--- a/spec/lib/gitlab/diff/suggestions_parser_spec.rb
+++ b/spec/lib/gitlab/diff/suggestions_parser_spec.rb
@@ -56,7 +56,8 @@ RSpec.describe Gitlab::Diff::SuggestionsParser do
end
it 'parsed suggestion has correct data' do
- from_line, to_line = position.new_line, position.new_line
+ from_line = position.new_line
+ to_line = position.new_line
expect(subject.first.to_hash).to include(from_content: blob_lines_data(from_line, to_line),
to_content: " foo\n bar\n",
diff --git a/spec/lib/gitlab/downtime_check/message_spec.rb b/spec/lib/gitlab/downtime_check/message_spec.rb
deleted file mode 100644
index 2d82836db33..00000000000
--- a/spec/lib/gitlab/downtime_check/message_spec.rb
+++ /dev/null
@@ -1,41 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::DowntimeCheck::Message do
- describe '#to_s' do
- it 'returns an ANSI formatted String for an offline migration' do
- message = described_class.new('foo.rb', true, 'hello')
-
- expect(message.to_s).to eq("[\e[31moffline\e[0m]: foo.rb:\n\nhello\n\n")
- end
-
- it 'returns an ANSI formatted String for an online migration' do
- message = described_class.new('foo.rb')
-
- expect(message.to_s).to eq("[\e[32monline\e[0m]: foo.rb")
- end
- end
-
- describe '#reason?' do
- it 'returns false when no reason is specified' do
- message = described_class.new('foo.rb')
-
- expect(message.reason?).to eq(false)
- end
-
- it 'returns true when a reason is specified' do
- message = described_class.new('foo.rb', true, 'hello')
-
- expect(message.reason?).to eq(true)
- end
- end
-
- describe '#reason' do
- it 'strips excessive whitespace from the returned String' do
- message = described_class.new('foo.rb', true, " hello\n world\n\n foo")
-
- expect(message.reason).to eq("hello\nworld\n\nfoo")
- end
- end
-end
diff --git a/spec/lib/gitlab/downtime_check_spec.rb b/spec/lib/gitlab/downtime_check_spec.rb
deleted file mode 100644
index 761519425f6..00000000000
--- a/spec/lib/gitlab/downtime_check_spec.rb
+++ /dev/null
@@ -1,116 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::DowntimeCheck do
- subject { described_class.new }
-
- let(:path) { 'foo.rb' }
-
- describe '#check' do
- before do
- expect(subject).to receive(:require).with(path)
- end
-
- context 'when a migration does not specify if downtime is required' do
- it 'raises RuntimeError' do
- expect(subject).to receive(:class_for_migration_file)
- .with(path)
- .and_return(Class.new)
-
- expect { subject.check([path]) }
- .to raise_error(RuntimeError, /it requires downtime/)
- end
- end
-
- context 'when a migration requires downtime' do
- context 'when no reason is specified' do
- it 'raises RuntimeError' do
- stub_const('TestMigration::DOWNTIME', true)
-
- expect(subject).to receive(:class_for_migration_file)
- .with(path)
- .and_return(TestMigration)
-
- expect { subject.check([path]) }
- .to raise_error(RuntimeError, /no reason was given/)
- end
- end
-
- context 'when a reason is specified' do
- it 'returns an Array of messages' do
- stub_const('TestMigration::DOWNTIME', true)
- stub_const('TestMigration::DOWNTIME_REASON', 'foo')
-
- expect(subject).to receive(:class_for_migration_file)
- .with(path)
- .and_return(TestMigration)
-
- messages = subject.check([path])
-
- expect(messages).to be_an_instance_of(Array)
- expect(messages[0]).to be_an_instance_of(Gitlab::DowntimeCheck::Message)
-
- message = messages[0]
-
- expect(message.path).to eq(path)
- expect(message.offline).to eq(true)
- expect(message.reason).to eq('foo')
- end
- end
- end
- end
-
- describe '#check_and_print' do
- it 'checks the migrations and prints the results to STDOUT' do
- stub_const('TestMigration::DOWNTIME', true)
- stub_const('TestMigration::DOWNTIME_REASON', 'foo')
-
- expect(subject).to receive(:require).with(path)
-
- expect(subject).to receive(:class_for_migration_file)
- .with(path)
- .and_return(TestMigration)
-
- expect(subject).to receive(:puts).with(an_instance_of(String))
-
- subject.check_and_print([path])
- end
- end
-
- describe '#class_for_migration_file' do
- it 'returns the class for a migration file path' do
- expect(subject.class_for_migration_file('123_string.rb')).to eq(String)
- end
- end
-
- describe '#online?' do
- it 'returns true when a migration can be performed online' do
- stub_const('TestMigration::DOWNTIME', false)
-
- expect(subject.online?(TestMigration)).to eq(true)
- end
-
- it 'returns false when a migration can not be performed online' do
- stub_const('TestMigration::DOWNTIME', true)
-
- expect(subject.online?(TestMigration)).to eq(false)
- end
- end
-
- describe '#downtime_reason' do
- context 'when a reason is defined' do
- it 'returns the downtime reason' do
- stub_const('TestMigration::DOWNTIME_REASON', 'hello')
-
- expect(subject.downtime_reason(TestMigration)).to eq('hello')
- end
- end
-
- context 'when a reason is not defined' do
- it 'returns nil' do
- expect(subject.downtime_reason(Class.new)).to be_nil
- end
- end
- end
-end
diff --git a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
index 8872800069a..e76a5d3fe32 100644
--- a/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_handler_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
end
shared_examples 'a reply to existing comment' do
- it 'creates a comment' do
+ it 'creates a discussion' do
expect { receiver.execute }.to change { noteable.notes.count }.by(1)
new_note = noteable.notes.last
@@ -68,11 +68,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteHandler do
expect(new_note.note).to include('I could not disagree more.')
expect(new_note.in_reply_to?(note)).to be_truthy
- if note.part_of_discussion?
- expect(new_note.discussion_id).to eq(note.discussion_id)
- else
- expect(new_note.discussion_id).not_to eq(note.discussion_id)
- end
+ expect(new_note.discussion_id).to eq(note.discussion_id)
end
end
diff --git a/spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb b/spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb
index 94f28d3399a..d3535fa9bd3 100644
--- a/spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/create_note_on_issuable_handler_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::Email::Handler::CreateNoteOnIssuableHandler do
it_behaves_like :note_handler_shared_examples, true do
let_it_be(:recipient) { user }
- let(:update_commands_only) { email_reply_fixture('emails/update_commands_only_reply.eml') }
+ let(:update_commands_only) { email_reply_fixture('emails/update_commands_only.eml') }
let(:no_content) { email_reply_fixture('emails/no_content_reply.eml') }
let(:commands_in_reply) { email_reply_fixture('emails/commands_in_reply.eml') }
let(:with_quick_actions) { email_reply_fixture('emails/valid_reply_with_quick_actions.eml') }
diff --git a/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb b/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
index 13ad9ddd8ef..2c1badbd113 100644
--- a/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
+++ b/spec/lib/gitlab/email/handler/unsubscribe_handler_spec.rb
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::Email::Handler::UnsubscribeHandler do
context 'when the noteable could not be found' do
before do
- noteable.destroy
+ noteable.destroy!
end
it 'raises a NoteableNotFoundError' do
diff --git a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
index 0db40eca989..24f5299d357 100644
--- a/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/context_payload_processor_spec.rb
@@ -1,45 +1,66 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::ContextPayloadProcessor do
- subject(:processor) { described_class.new }
-
- before do
- allow_next_instance_of(Gitlab::ErrorTracking::ContextPayloadGenerator) do |generator|
- allow(generator).to receive(:generate).and_return(
- user: { username: 'root' },
- tags: { locale: 'en', program: 'test', feature_category: 'feature_a', correlation_id: 'cid' },
- extra: { some_info: 'info' }
- )
+ shared_examples 'processing an exception' do
+ before do
+ allow_next_instance_of(Gitlab::ErrorTracking::ContextPayloadGenerator) do |generator|
+ allow(generator).to receive(:generate).and_return(
+ user: { username: 'root' },
+ tags: { locale: 'en', program: 'test', feature_category: 'feature_a', correlation_id: 'cid' },
+ extra: { some_info: 'info' }
+ )
+ end
end
- end
- it 'merges the context payload into event payload' do
- payload = {
- user: { ip_address: '127.0.0.1' },
- tags: { priority: 'high' },
- extra: { sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] } }
- }
-
- processor.process(payload)
-
- expect(payload).to eql(
- user: {
- ip_address: '127.0.0.1',
- username: 'root'
- },
- tags: {
- priority: 'high',
- locale: 'en',
- program: 'test',
- feature_category: 'feature_a',
- correlation_id: 'cid'
- },
- extra: {
- some_info: 'info',
- sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] }
+ let(:payload) do
+ {
+ user: { ip_address: '127.0.0.1' },
+ tags: { priority: 'high' },
+ extra: { sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] } }
}
- )
+ end
+
+ it 'merges the context payload into event payload', :aggregate_failures do
+ expect(result_hash[:user]).to include(ip_address: '127.0.0.1', username: 'root')
+
+ expect(result_hash[:tags])
+ .to include(priority: 'high',
+ locale: 'en',
+ program: 'test',
+ feature_category: 'feature_a',
+ correlation_id: 'cid')
+
+ expect(result_hash[:extra])
+ .to include(some_info: 'info',
+ sidekiq: { class: 'SomeWorker', args: ['[FILTERED]', 1, 2] })
+ end
+ end
+
+ describe '.call' do
+ let(:event) { Raven::Event.new(payload) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
+ it_behaves_like 'processing an exception'
+
+ context 'when followed by #process' do
+ let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
+
+ it_behaves_like 'processing an exception'
+ end
+ end
+
+ describe '#process' do
+ let(:event) { Raven::Event.new(payload) }
+ let(:result_hash) { described_class.new.process(event.to_hash) }
+
+ context 'with sentry_processors_before_send disabled' do
+ before do
+ stub_feature_flags(sentry_processors_before_send: false)
+ end
+
+ it_behaves_like 'processing an exception'
+ end
end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
index 797707114a1..4808fdf2f06 100644
--- a/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/grpc_error_processor_spec.rb
@@ -3,73 +3,83 @@
require 'spec_helper'
RSpec.describe Gitlab::ErrorTracking::Processor::GrpcErrorProcessor do
- describe '#process' do
- subject { described_class.new }
-
+ shared_examples 'processing an exception' do
context 'when there is no GRPC exception' do
+ let(:exception) { RuntimeError.new }
let(:data) { { fingerprint: ['ArgumentError', 'Missing arguments'] } }
it 'leaves data unchanged' do
- expect(subject.process(data)).to eq(data)
+ expect(result_hash).to include(data)
end
end
context 'when there is a GPRC exception with a debug string' do
+ let(:exception) { GRPC::DeadlineExceeded.new('Deadline Exceeded', {}, '{"hello":1}') }
+
let(:data) do
{
- exception: {
- values: [
- {
- type: "GRPC::DeadlineExceeded",
- value: "4:DeadlineExceeded. debug_error_string:{\"hello\":1}"
- }
- ]
- },
extra: {
caller: 'test'
},
fingerprint: [
- "GRPC::DeadlineExceeded",
- "4:Deadline Exceeded. debug_error_string:{\"created\":\"@1598938192.005782000\",\"description\":\"Error received from peer unix:/home/git/gitalypraefect.socket\",\"file\":\"src/core/lib/surface/call.cc\",\"file_line\":1055,\"grpc_message\":\"Deadline Exceeded\",\"grpc_status\":4}"
+ 'GRPC::DeadlineExceeded',
+ '4:Deadline Exceeded. debug_error_string:{"created":"@1598938192.005782000","description":"Error received from peer unix:/home/git/gitalypraefect.socket","file":"src/core/lib/surface/call.cc","file_line":1055,"grpc_message":"Deadline Exceeded","grpc_status":4}'
]
}
end
- let(:expected) do
- {
- fingerprint: [
- "GRPC::DeadlineExceeded",
- "4:Deadline Exceeded."
- ],
- exception: {
- values: [
- {
- type: "GRPC::DeadlineExceeded",
- value: "4:DeadlineExceeded."
- }
- ]
- },
- extra: {
- caller: 'test',
- grpc_debug_error_string: "{\"hello\":1}"
- }
- }
- end
-
it 'removes the debug error string and stores it as an extra field' do
- expect(subject.process(data)).to eq(expected)
+ expect(result_hash[:fingerprint])
+ .to eq(['GRPC::DeadlineExceeded', '4:Deadline Exceeded.'])
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
context 'with no custom fingerprint' do
- before do
- data.delete(:fingerprint)
- expected.delete(:fingerprint)
+ let(:data) do
+ { extra: { caller: 'test' } }
end
it 'removes the debug error string and stores it as an extra field' do
- expect(subject.process(data)).to eq(expected)
+ expect(result_hash).not_to include(:fingerprint)
+
+ expect(result_hash[:exception][:values].first)
+ .to include(type: 'GRPC::DeadlineExceeded', value: '4:Deadline Exceeded.')
+
+ expect(result_hash[:extra])
+ .to include(caller: 'test', grpc_debug_error_string: '{"hello":1}')
end
end
end
end
+
+ describe '.call' do
+ let(:event) { Raven::Event.from_exception(exception, data) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
+ it_behaves_like 'processing an exception'
+
+ context 'when followed by #process' do
+ let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
+
+ it_behaves_like 'processing an exception'
+ end
+ end
+
+ describe '#process' do
+ let(:event) { Raven::Event.from_exception(exception, data) }
+ let(:result_hash) { described_class.new.process(event.to_hash) }
+
+ context 'with sentry_processors_before_send disabled' do
+ before do
+ stub_feature_flags(sentry_processors_before_send: false)
+ end
+
+ it_behaves_like 'processing an exception'
+ end
+ end
end
diff --git a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
index da7205c7f4f..20fd5d085a9 100644
--- a/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
+++ b/spec/lib/gitlab/error_tracking/processor/sidekiq_processor_spec.rb
@@ -94,28 +94,37 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
end
end
- describe '#process' do
+ shared_examples 'processing an exception' do
context 'when there is Sidekiq data' do
+ let(:wrapped_value) { { extra: { sidekiq: value } } }
+
shared_examples 'Sidekiq arguments' do |args_in_job_hash: true|
let(:path) { [:extra, :sidekiq, args_in_job_hash ? :job : nil, 'args'].compact }
let(:args) { [1, 'string', { a: 1 }, [1, 2]] }
- it 'only allows numeric arguments for an unknown worker' do
- value = { 'args' => args, 'class' => 'UnknownWorker' }
+ context 'for an unknown worker' do
+ let(:value) do
+ hash = { 'args' => args, 'class' => 'UnknownWorker' }
- value = { job: value } if args_in_job_hash
+ args_in_job_hash ? { job: hash } : hash
+ end
- expect(subject.process(extra_sidekiq(value)).dig(*path))
- .to eq([1, described_class::FILTERED_STRING, described_class::FILTERED_STRING, described_class::FILTERED_STRING])
+ it 'only allows numeric arguments for an unknown worker' do
+ expect(result_hash.dig(*path))
+ .to eq([1, described_class::FILTERED_STRING, described_class::FILTERED_STRING, described_class::FILTERED_STRING])
+ end
end
- it 'allows all argument types for a permitted worker' do
- value = { 'args' => args, 'class' => 'PostReceive' }
+ context 'for a permitted worker' do
+ let(:value) do
+ hash = { 'args' => args, 'class' => 'PostReceive' }
- value = { job: value } if args_in_job_hash
+ args_in_job_hash ? { job: hash } : hash
+ end
- expect(subject.process(extra_sidekiq(value)).dig(*path))
- .to eq(args)
+ it 'allows all argument types for a permitted worker' do
+ expect(result_hash.dig(*path)).to eq(args)
+ end
end
end
@@ -127,39 +136,62 @@ RSpec.describe Gitlab::ErrorTracking::Processor::SidekiqProcessor do
include_examples 'Sidekiq arguments', args_in_job_hash: false
end
- it 'removes a jobstr field if present' do
- value = {
- job: { 'args' => [1] },
- jobstr: { 'args' => [1] }.to_json
- }
+ context 'when a jobstr field is present' do
+ let(:value) do
+ {
+ job: { 'args' => [1] },
+ jobstr: { 'args' => [1] }.to_json
+ }
+ end
- expect(subject.process(extra_sidekiq(value)))
- .to eq(extra_sidekiq(value.except(:jobstr)))
+ it 'removes the jobstr' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value.except(:jobstr))
+ end
end
- it 'does nothing with no jobstr' do
- value = { job: { 'args' => [1] } }
+ context 'when no jobstr value is present' do
+ let(:value) { { job: { 'args' => [1] } } }
- expect(subject.process(extra_sidekiq(value)))
- .to eq(extra_sidekiq(value))
+ it 'does nothing' do
+ expect(result_hash.dig(:extra, :sidekiq)).to eq(value)
+ end
end
end
context 'when there is no Sidekiq data' do
- it 'does nothing' do
- value = {
- request: {
- method: 'POST',
- data: { 'key' => 'value' }
- }
- }
+ let(:value) { { tags: { foo: 'bar', baz: 'quux' } } }
+ let(:wrapped_value) { value }
- expect(subject.process(value)).to eq(value)
+ it 'does nothing' do
+ expect(result_hash).to include(value)
+ expect(result_hash.dig(:extra, :sidekiq)).to be_nil
end
end
+ end
+
+ describe '.call' do
+ let(:event) { Raven::Event.new(wrapped_value) }
+ let(:result_hash) { described_class.call(event).to_hash }
+
+ it_behaves_like 'processing an exception'
+
+ context 'when followed by #process' do
+ let(:result_hash) { described_class.new.process(described_class.call(event).to_hash) }
+
+ it_behaves_like 'processing an exception'
+ end
+ end
+
+ describe '#process' do
+ let(:event) { Raven::Event.new(wrapped_value) }
+ let(:result_hash) { described_class.new.process(event.to_hash) }
+
+ context 'with sentry_processors_before_send disabled' do
+ before do
+ stub_feature_flags(sentry_processors_before_send: false)
+ end
- def extra_sidekiq(hash)
- { extra: { sidekiq: hash } }
+ it_behaves_like 'processing an exception'
end
end
end
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index a905b9f8d40..2e67a9f0874 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -7,6 +7,7 @@ require 'raven/transports/dummy'
RSpec.describe Gitlab::ErrorTracking do
let(:exception) { RuntimeError.new('boom') }
let(:issue_url) { 'http://gitlab.com/gitlab-org/gitlab-foss/issues/1' }
+ let(:extra) { { issue_url: issue_url, some_other_info: 'info' } }
let(:user) { create(:user) }
@@ -42,6 +43,8 @@ RSpec.describe Gitlab::ErrorTracking do
}
end
+ let(:sentry_event) { Gitlab::Json.parse(Raven.client.transport.events.last[1]) }
+
before do
stub_sentry_settings
@@ -133,8 +136,6 @@ RSpec.describe Gitlab::ErrorTracking do
end
describe '.track_exception' do
- let(:extra) { { issue_url: issue_url, some_other_info: 'info' } }
-
subject(:track_exception) { described_class.track_exception(exception, extra) }
before do
@@ -195,6 +196,55 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
+ context 'when the error is kind of an `ActiveRecord::StatementInvalid`' do
+ let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
+
+ it 'injects the normalized sql query into extra' do
+ track_exception
+
+ expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ end
+ end
+
+ context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do
+ it 'injects the normalized sql query into extra' do
+ allow(exception).to receive(:cause).and_return(ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1'))
+
+ track_exception
+
+ expect(sentry_event.dig('extra', 'sql')).to eq('SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ end
+ end
+ end
+
+ shared_examples 'event processors' do
+ subject(:track_exception) { described_class.track_exception(exception, extra) }
+
+ before do
+ allow(Raven).to receive(:capture_exception).and_call_original
+ allow(Gitlab::ErrorTracking::Logger).to receive(:error)
+ end
+
+ context 'custom GitLab context when using Raven.capture_exception directly' do
+ subject(:raven_capture_exception) { Raven.capture_exception(exception) }
+
+ it 'merges a default set of tags into the existing tags' do
+ allow(Raven.context).to receive(:tags).and_return(foo: 'bar')
+
+ raven_capture_exception
+
+ expect(sentry_event['tags']).to include('correlation_id', 'feature_category', 'foo', 'locale', 'program')
+ end
+
+ it 'merges the current user information into the existing user information' do
+ Raven.user_context(id: -1)
+
+ raven_capture_exception
+
+ expect(sentry_event['user']).to eq('id' => -1, 'username' => user.username)
+ end
+ end
+
context 'with sidekiq args' do
context 'when the args does not have anything sensitive' do
let(:extra) { { sidekiq: { 'class' => 'PostReceive', 'args' => [1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'] } } }
@@ -211,16 +261,20 @@ RSpec.describe Gitlab::ErrorTracking do
)
)
end
+
+ it 'does not filter parameters when sending to Sentry' do
+ track_exception
+
+ expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq([1, { 'id' => 2, 'name' => 'hello' }, 'some-value', 'another-value'])
+ end
end
context 'when the args has sensitive information' do
let(:extra) { { sidekiq: { 'class' => 'UnknownWorker', 'args' => ['sensitive string', 1, 2] } } }
- it 'filters sensitive arguments before sending' do
+ it 'filters sensitive arguments before sending and logging' do
track_exception
- sentry_event = Gitlab::Json.parse(Raven.client.transport.events.last[1])
-
expect(sentry_event.dig('extra', 'sidekiq', 'args')).to eq(['[FILTERED]', 1, 2])
expect(Gitlab::ErrorTracking::Logger).to have_received(:error).with(
hash_including(
@@ -234,28 +288,44 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
- context 'when the error is kind of an `ActiveRecord::StatementInvalid`' do
- let(:exception) { ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1') }
+ context 'when the error is a GRPC error' do
+ context 'when the GRPC error contains a debug_error_string value' do
+ let(:exception) { GRPC::DeadlineExceeded.new('unknown cause', {}, '{"hello":1}') }
- it 'injects the normalized sql query into extra' do
- allow(Raven.client.transport).to receive(:send_event) do |event|
- expect(event.extra).to include(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
+ it 'sets the GRPC debug error string in the Sentry event and adds a custom fingerprint' do
+ track_exception
+
+ expect(sentry_event.dig('extra', 'grpc_debug_error_string')).to eq('{"hello":1}')
+ expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause.'])
end
+ end
- track_exception
+ context 'when the GRPC error does not contain a debug_error_string value' do
+ let(:exception) { GRPC::DeadlineExceeded.new }
+
+ it 'does not do any processing on the event' do
+ track_exception
+
+ expect(sentry_event['extra']).not_to include('grpc_debug_error_string')
+ expect(sentry_event['fingerprint']).to eq(['GRPC::DeadlineExceeded', '4:unknown cause'])
+ end
end
end
+ end
- context 'when the `ActiveRecord::StatementInvalid` is wrapped in another exception' do
- let(:exception) { RuntimeError.new(cause: ActiveRecord::StatementInvalid.new(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = 1 AND "users"."foo" = $1')) }
+ context 'with sentry_processors_before_send enabled' do
+ before do
+ stub_feature_flags(sentry_processors_before_send: true)
+ end
- it 'injects the normalized sql query into extra' do
- allow(Raven.client.transport).to receive(:send_event) do |event|
- expect(event.extra).to include(sql: 'SELECT "users".* FROM "users" WHERE "users"."id" = $2 AND "users"."foo" = $1')
- end
+ include_examples 'event processors'
+ end
- track_exception
- end
+ context 'with sentry_processors_before_send disabled' do
+ before do
+ stub_feature_flags(sentry_processors_before_send: false)
end
+
+ include_examples 'event processors'
end
end
diff --git a/spec/lib/gitlab/experimentation_spec.rb b/spec/lib/gitlab/experimentation_spec.rb
index 83c6b556fc6..5fef14bd2a0 100644
--- a/spec/lib/gitlab/experimentation_spec.rb
+++ b/spec/lib/gitlab/experimentation_spec.rb
@@ -7,7 +7,6 @@ require 'spec_helper'
RSpec.describe Gitlab::Experimentation::EXPERIMENTS do
it 'temporarily ensures we know what experiments exist for backwards compatibility' do
expected_experiment_keys = [
- :upgrade_link_in_user_menu_a,
:invite_members_version_b,
:invite_members_empty_group_version_a,
:contact_sales_btn_in_app
diff --git a/spec/lib/gitlab/git/diff_collection_spec.rb b/spec/lib/gitlab/git/diff_collection_spec.rb
index 1a3c332a21b..114b3d01952 100644
--- a/spec/lib/gitlab/git/diff_collection_spec.rb
+++ b/spec/lib/gitlab/git/diff_collection_spec.rb
@@ -31,6 +31,19 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
end
+ let(:overflow_max_bytes) { false }
+ let(:overflow_max_files) { false }
+ let(:overflow_max_lines) { false }
+
+ shared_examples 'overflow stuff' do
+ it 'returns the expected overflow values' do
+ subject.overflow?
+ expect(subject.overflow_max_bytes?).to eq(overflow_max_bytes)
+ expect(subject.overflow_max_files?).to eq(overflow_max_files)
+ expect(subject.overflow_max_lines?).to eq(overflow_max_lines)
+ end
+ end
+
subject do
Gitlab::Git::DiffCollection.new(
iterator,
@@ -76,12 +89,19 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
end
context 'overflow handling' do
+ subject { super() }
+
+ let(:collapsed_safe_files) { false }
+ let(:collapsed_safe_lines) { false }
+
context 'adding few enough files' do
let(:file_count) { 3 }
context 'and few enough lines' do
let(:line_count) { 10 }
+ it_behaves_like 'overflow stuff'
+
describe '#overflow?' do
subject { super().overflow? }
@@ -117,6 +137,11 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'when limiting is disabled' do
let(:limits) { false }
+ let(:overflow_max_bytes) { false }
+ let(:overflow_max_files) { false }
+ let(:overflow_max_lines) { false }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -155,6 +180,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'and too many lines' do
let(:line_count) { 1000 }
+ let(:overflow_max_lines) { true }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -184,6 +212,11 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'when limiting is disabled' do
let(:limits) { false }
+ let(:overflow_max_bytes) { false }
+ let(:overflow_max_files) { false }
+ let(:overflow_max_lines) { false }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -216,10 +249,13 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'adding too many files' do
let(:file_count) { 11 }
+ let(:overflow_max_files) { true }
context 'and few enough lines' do
let(:line_count) { 1 }
+ it_behaves_like 'overflow stuff'
+
describe '#overflow?' do
subject { super().overflow? }
@@ -248,6 +284,11 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'when limiting is disabled' do
let(:limits) { false }
+ let(:overflow_max_bytes) { false }
+ let(:overflow_max_files) { false }
+ let(:overflow_max_lines) { false }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -279,6 +320,10 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'and too many lines' do
let(:line_count) { 30 }
+ let(:overflow_max_lines) { true }
+ let(:overflow_max_files) { false }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -308,6 +353,11 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'when limiting is disabled' do
let(:limits) { false }
+ let(:overflow_max_bytes) { false }
+ let(:overflow_max_files) { false }
+ let(:overflow_max_lines) { false }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -344,6 +394,8 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'and few enough lines' do
let(:line_count) { 1 }
+ it_behaves_like 'overflow stuff'
+
describe '#overflow?' do
subject { super().overflow? }
@@ -375,6 +427,9 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'adding too many bytes' do
let(:file_count) { 10 }
let(:line_length) { 5200 }
+ let(:overflow_max_bytes) { true }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -404,6 +459,11 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
context 'when limiting is disabled' do
let(:limits) { false }
+ let(:overflow_max_bytes) { false }
+ let(:overflow_max_files) { false }
+ let(:overflow_max_lines) { false }
+
+ it_behaves_like 'overflow stuff'
describe '#overflow?' do
subject { super().overflow? }
@@ -437,6 +497,8 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
describe 'empty collection' do
subject { Gitlab::Git::DiffCollection.new([]) }
+ it_behaves_like 'overflow stuff'
+
describe '#overflow?' do
subject { super().overflow? }
@@ -555,7 +617,7 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
.and_return({ max_files: 2, max_lines: max_lines })
end
- it 'prunes diffs by default even little ones' do
+ it 'prunes diffs by default even little ones and sets collapsed_safe_files true' do
subject.each_with_index do |d, i|
if i < 2
expect(d.diff).not_to eq('')
@@ -563,6 +625,8 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
expect(d.diff).to eq('')
end
end
+
+ expect(subject.collapsed_safe_files?).to eq(true)
end
end
@@ -582,7 +646,7 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
.and_return({ max_files: max_files, max_lines: 80 })
end
- it 'prunes diffs by default even little ones' do
+ it 'prunes diffs by default even little ones and sets collapsed_safe_lines true' do
subject.each_with_index do |d, i|
if i < 2
expect(d.diff).not_to eq('')
@@ -590,26 +654,30 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
expect(d.diff).to eq('')
end
end
+
+ expect(subject.collapsed_safe_lines?).to eq(true)
end
end
context 'when go over safe limits on bytes' do
let(:iterator) do
[
- fake_diff(1, 45),
- fake_diff(1, 45),
- fake_diff(1, 20480),
- fake_diff(1, 1)
+ fake_diff(5, 10),
+ fake_diff(5000, 10),
+ fake_diff(5, 10),
+ fake_diff(5, 10)
]
end
before do
+ allow(Gitlab::CurrentSettings).to receive(:diff_max_patch_bytes).and_return(1.megabyte)
+
allow(Gitlab::Git::DiffCollection)
.to receive(:default_limits)
- .and_return({ max_files: max_files, max_lines: 80 })
+ .and_return({ max_files: 4, max_lines: 3000 })
end
- it 'prunes diffs by default even little ones' do
+ it 'prunes diffs by default even little ones and sets collapsed_safe_bytes true' do
subject.each_with_index do |d, i|
if i < 2
expect(d.diff).not_to eq('')
@@ -617,6 +685,8 @@ RSpec.describe Gitlab::Git::DiffCollection, :seed_helper do
expect(d.diff).to eq('')
end
end
+
+ expect(subject.collapsed_safe_bytes?).to eq(true)
end
end
end
diff --git a/spec/lib/gitlab/git/repository_spec.rb b/spec/lib/gitlab/git/repository_spec.rb
index cc1b1ceadcf..1e259c9c153 100644
--- a/spec/lib/gitlab/git/repository_spec.rb
+++ b/spec/lib/gitlab/git/repository_spec.rb
@@ -564,6 +564,41 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
end
end
+ describe '#search_files_by_regexp' do
+ let(:ref) { 'master' }
+
+ subject(:result) { mutable_repository.search_files_by_regexp(filter, ref) }
+
+ context 'when sending a valid regexp' do
+ let(:filter) { 'files\/.*\/.*\.rb' }
+
+ it 'returns matched files' do
+ expect(result).to contain_exactly('files/links/regex.rb',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/ruby/version_info.rb')
+ end
+ end
+
+ context 'when sending an ivalid regexp' do
+ let(:filter) { '*.rb' }
+
+ it 'raises error' do
+ expect { result }.to raise_error(GRPC::InvalidArgument,
+ /missing argument to repetition operator: `*`/)
+ end
+ end
+
+ context "when the ref doesn't exist" do
+ let(:filter) { 'files\/.*\/.*\.rb' }
+ let(:ref) { 'non-existing-branch' }
+
+ it 'returns an empty array' do
+ expect(result).to eq([])
+ end
+ end
+ end
+
describe '#find_remote_root_ref' do
it 'gets the remote root ref from GitalyClient' do
expect_any_instance_of(Gitlab::GitalyClient::RemoteService)
@@ -1711,14 +1746,15 @@ RSpec.describe Gitlab::Git::Repository, :seed_helper do
let(:right_branch) { 'test-master' }
let(:first_parent_ref) { 'refs/heads/test-master' }
let(:target_ref) { 'refs/merge-requests/999/merge' }
- let(:allow_conflicts) { false }
before do
repository.create_branch(right_branch, branch_head) unless repository.ref_exists?(first_parent_ref)
end
def merge_to_ref
- repository.merge_to_ref(user, left_sha, right_branch, target_ref, 'Merge message', first_parent_ref, allow_conflicts)
+ repository.merge_to_ref(user,
+ source_sha: left_sha, branch: right_branch, target_ref: target_ref,
+ message: 'Merge message', first_parent_ref: first_parent_ref)
end
it 'generates a commit in the target_ref' do
diff --git a/spec/lib/gitlab/git/tag_spec.rb b/spec/lib/gitlab/git/tag_spec.rb
index f83ccc6cae0..b6ff76c5e1c 100644
--- a/spec/lib/gitlab/git/tag_spec.rb
+++ b/spec/lib/gitlab/git/tag_spec.rb
@@ -101,4 +101,17 @@ RSpec.describe Gitlab::Git::Tag, :seed_helper do
end
end
end
+
+ describe "#cache_key" do
+ subject { repository.tags.first }
+
+ it "returns a cache key that changes based on changeable values" do
+ expect(subject).to receive(:name).and_return("v1.0.0")
+ expect(subject).to receive(:message).and_return("Initial release")
+
+ digest = Digest::SHA1.hexdigest(["v1.0.0", "Initial release", subject.target, subject.target_commit.sha].join)
+
+ expect(subject.cache_key).to eq("tag:#{digest}")
+ end
+ end
end
diff --git a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
index 037734f1b13..f0ec58f3c2d 100644
--- a/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/blob_service_spec.rb
@@ -14,14 +14,14 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
let(:limit) { 5 }
let(:not_in) { %w[branch-a branch-b] }
let(:expected_params) do
- { revision: revision, limit: limit, not_in_refs: not_in, not_in_all: false }
+ { revisions: ["master", "--not", "branch-a", "branch-b"], limit: limit }
end
subject { client.get_new_lfs_pointers(revision, limit, not_in) }
it 'sends a get_new_lfs_pointers message' do
expect_any_instance_of(Gitaly::BlobService::Stub)
- .to receive(:get_new_lfs_pointers)
+ .to receive(:list_lfs_pointers)
.with(gitaly_request_with_params(expected_params), kind_of(Hash))
.and_return([])
@@ -31,12 +31,39 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
context 'with not_in = :all' do
let(:not_in) { :all }
let(:expected_params) do
- { revision: revision, limit: limit, not_in_refs: [], not_in_all: true }
+ { revisions: ["master", "--not", "--all"], limit: limit }
end
it 'sends the correct message' do
expect_any_instance_of(Gitaly::BlobService::Stub)
- .to receive(:get_new_lfs_pointers)
+ .to receive(:list_lfs_pointers)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
+ .and_return([])
+
+ subject
+ end
+ end
+
+ context 'with hook environment' do
+ let(:git_env) do
+ {
+ 'GIT_OBJECT_DIRECTORY_RELATIVE' => '.git/objects',
+ 'GIT_ALTERNATE_OBJECT_DIRECTORIES_RELATIVE' => ['/dir/one', '/dir/two']
+ }
+ end
+
+ let(:expected_params) do
+ expected_repository = repository.gitaly_repository
+ expected_repository.git_alternate_object_directories = Google::Protobuf::RepeatedField.new(:string)
+
+ { limit: limit, repository: expected_repository }
+ end
+
+ it 'sends a list_all_lfs_pointers message' do
+ allow(Gitlab::Git::HookEnv).to receive(:all).with(repository.gl_repository).and_return(git_env)
+
+ expect_any_instance_of(Gitaly::BlobService::Stub)
+ .to receive(:list_all_lfs_pointers)
.with(gitaly_request_with_params(expected_params), kind_of(Hash))
.and_return([])
@@ -46,12 +73,16 @@ RSpec.describe Gitlab::GitalyClient::BlobService do
end
describe '#get_all_lfs_pointers' do
+ let(:expected_params) do
+ { revisions: ["--all"], limit: 0 }
+ end
+
subject { client.get_all_lfs_pointers }
it 'sends a get_all_lfs_pointers message' do
expect_any_instance_of(Gitaly::BlobService::Stub)
- .to receive(:get_all_lfs_pointers)
- .with(gitaly_request_with_params({}), kind_of(Hash))
+ .to receive(:list_lfs_pointers)
+ .with(gitaly_request_with_params(expected_params), kind_of(Hash))
.and_return([])
subject
diff --git a/spec/lib/gitlab/gitaly_client/call_spec.rb b/spec/lib/gitlab/gitaly_client/call_spec.rb
index 5c33ac40460..099307fc4e1 100644
--- a/spec/lib/gitlab/gitaly_client/call_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/call_spec.rb
@@ -24,11 +24,14 @@ RSpec.describe Gitlab::GitalyClient::Call do
def expect_call_details_to_match(duration_higher_than: 0)
expect(client.list_call_details.size).to eq(1)
expect(client.list_call_details.first)
- .to match a_hash_including(feature: "#{service}##{rpc}",
- duration: a_value > duration_higher_than,
- request: an_instance_of(Hash),
- rpc: rpc,
- backtrace: an_instance_of(Array))
+ .to match a_hash_including(
+ start: a_value > 0,
+ feature: "#{service}##{rpc}",
+ duration: a_value > duration_higher_than,
+ request: an_instance_of(Hash),
+ rpc: rpc,
+ backtrace: an_instance_of(Array)
+ )
end
context 'when the response is not an enumerator' do
diff --git a/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb b/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
index 15eebf62a39..9c3bc935acc 100644
--- a/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/object_pool_service_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Gitlab::GitalyClient::ObjectPoolService do
subject { described_class.new(object_pool) }
before do
- subject.create(raw_repository)
+ subject.create(raw_repository) # rubocop:disable Rails/SaveBang
end
describe '#create' do
@@ -22,7 +22,7 @@ RSpec.describe Gitlab::GitalyClient::ObjectPoolService do
context 'when the pool already exists' do
it 'returns an error' do
expect do
- subject.create(raw_repository)
+ subject.create(raw_repository) # rubocop:disable Rails/SaveBang
end.to raise_error(GRPC::FailedPrecondition)
end
end
diff --git a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
index 22707c9a36b..9a17140a1e0 100644
--- a/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/operation_service_spec.rb
@@ -88,17 +88,29 @@ RSpec.describe Gitlab::GitalyClient::OperationService do
let(:source_sha) { 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660' }
let(:ref) { 'refs/merge-requests/x/merge' }
let(:message) { 'validación' }
- let(:allow_conflicts) { false }
let(:response) { Gitaly::UserMergeToRefResponse.new(commit_id: 'new-commit-id') }
- subject { client.user_merge_to_ref(user, source_sha, nil, ref, message, first_parent_ref, allow_conflicts) }
+ let(:payload) do
+ { source_sha: source_sha, branch: 'branch', target_ref: ref,
+ message: message, first_parent_ref: first_parent_ref, allow_conflicts: true }
+ end
it 'sends a user_merge_to_ref message' do
- expect_any_instance_of(Gitaly::OperationService::Stub)
- .to receive(:user_merge_to_ref).with(kind_of(Gitaly::UserMergeToRefRequest), kind_of(Hash))
- .and_return(response)
-
- subject
+ freeze_time do
+ expect_any_instance_of(Gitaly::OperationService::Stub).to receive(:user_merge_to_ref) do |_, request, options|
+ expect(options).to be_kind_of(Hash)
+ expect(request.to_h).to eq(
+ payload.merge({
+ repository: repository.gitaly_repository.to_h,
+ message: message.dup.force_encoding(Encoding::ASCII_8BIT),
+ user: Gitlab::Git::User.from_gitlab(user).to_gitaly.to_h,
+ timestamp: { nanos: 0, seconds: Time.current.to_i }
+ })
+ )
+ end.and_return(response)
+
+ client.user_merge_to_ref(user, **payload)
+ end
end
end
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 7a382df1248..26ec194a2e7 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -246,6 +246,21 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
+ describe '#search_files_by_regexp' do
+ subject(:result) { client.search_files_by_regexp('master', '.*') }
+
+ before do
+ expect_any_instance_of(Gitaly::RepositoryService::Stub)
+ .to receive(:search_files_by_name)
+ .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
+ .and_return([double(files: ['file1.txt']), double(files: ['file2.txt'])])
+ end
+
+ it 'sends a search_files_by_name message and returns a flatten array' do
+ expect(result).to contain_exactly('file1.txt', 'file2.txt')
+ end
+ end
+
describe '#disconnect_alternates' do
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
@@ -255,7 +270,7 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
let(:object_pool_service) { Gitlab::GitalyClient::ObjectPoolService.new(object_pool) }
before do
- object_pool_service.create(repository)
+ object_pool_service.create(repository) # rubocop:disable Rails/SaveBang
object_pool_service.link_repository(repository)
end
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
index e42b6d89c30..01d9edf0ba1 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_merged_by_importer_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::PullRequestMergedByImporter, :clean_gitlab_redis_cache do
let_it_be(:merge_request) { create(:merged_merge_request) }
+
let(:project) { merge_request.project }
let(:merged_at) { Time.new(2017, 1, 1, 12, 00).utc }
let(:client_double) { double(user: double(id: 999, login: 'merger', email: 'merger@email.com')) }
diff --git a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
index 290f3f51202..5002e0384f3 100644
--- a/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/pull_request_review_importer_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::GithubImport::Importer::PullRequestReviewImporter, :clean
using RSpec::Parameterized::TableSyntax
let_it_be(:merge_request) { create(:merge_request) }
+
let(:project) { merge_request.project }
let(:client_double) { double(user: double(id: 999, login: 'author', email: 'author@email.com')) }
let(:submitted_at) { Time.new(2017, 1, 1, 12, 00).utc }
diff --git a/spec/lib/gitlab/github_import/milestone_finder_spec.rb b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
index 5da45b1897f..fe8652eb5a2 100644
--- a/spec/lib/gitlab/github_import/milestone_finder_spec.rb
+++ b/spec/lib/gitlab/github_import/milestone_finder_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::GithubImport::MilestoneFinder, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
let_it_be(:milestone) { create(:milestone, project: project) }
+
let(:finder) { described_class.new(project) }
describe '#id_for' do
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
deleted file mode 100644
index c88506899cd..00000000000
--- a/spec/lib/gitlab/graphql/authorize/authorize_field_service_spec.rb
+++ /dev/null
@@ -1,253 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# Also see spec/graphql/features/authorization_spec.rb for
-# integration tests of AuthorizeFieldService
-RSpec.describe Gitlab::Graphql::Authorize::AuthorizeFieldService do
- def type(type_authorizations = [])
- Class.new(Types::BaseObject) do
- graphql_name 'TestType'
-
- authorize type_authorizations
- end
- end
-
- def type_with_field(field_type, field_authorizations = [], resolved_value = 'Resolved value', **options)
- Class.new(Types::BaseObject) do
- graphql_name 'TestTypeWithField'
- options.reverse_merge!(null: true)
- field :test_field, field_type,
- authorize: field_authorizations,
- **options
-
- define_method :test_field do
- resolved_value
- end
- end
- end
-
- def resolve
- service.authorized_resolve[type_instance, {}, context]
- end
-
- subject(:service) { described_class.new(field) }
-
- describe '#authorized_resolve' do
- let_it_be(:current_user) { build(:user) }
- let_it_be(:presented_object) { 'presented object' }
- let_it_be(:query_type) { GraphQL::ObjectType.new }
- let_it_be(:schema) { GitlabSchema }
- let_it_be(:query) { GraphQL::Query.new(schema, document: nil, context: {}, variables: {}) }
- let_it_be(:context) { GraphQL::Query::Context.new(query: query, values: { current_user: current_user }, object: nil) }
-
- let(:type_class) { type_with_field(custom_type, :read_field, presented_object) }
- let(:type_instance) { type_class.authorized_new(presented_object, context) }
- let(:field) { type_class.fields['testField'].to_graphql }
-
- subject(:resolved) { ::Gitlab::Graphql::Lazy.force(resolve) }
-
- context 'reading the field of a lazy value' do
- let(:ability) { :read_field }
- let(:presented_object) { lazy_upcase('a') }
- let(:type_class) { type_with_field(GraphQL::STRING_TYPE, ability) }
-
- let(:upcaser) do
- Module.new do
- def self.upcase(strs)
- strs.map(&:upcase)
- end
- end
- end
-
- def lazy_upcase(str)
- ::BatchLoader::GraphQL.for(str).batch do |strs, found|
- strs.zip(upcaser.upcase(strs)).each { |s, us| found[s, us] }
- end
- end
-
- it 'does not run authorizations until we force the resolved value' do
- expect(Ability).not_to receive(:allowed?)
-
- expect(resolve).to respond_to(:force)
- end
-
- it 'runs authorizations when we force the resolved value' do
- spy_ability_check_for(ability, 'A')
-
- expect(resolved).to eq('Resolved value')
- end
-
- it 'redacts values that fail the permissions check' do
- spy_ability_check_for(ability, 'A', passed: false)
-
- expect(resolved).to be_nil
- end
-
- context 'we batch two calls' do
- def resolve(value)
- instance = type_class.authorized_new(lazy_upcase(value), context)
- service.authorized_resolve[instance, {}, context]
- end
-
- it 'batches resolution, but authorizes each object separately' do
- expect(upcaser).to receive(:upcase).once.and_call_original
- spy_ability_check_for(:read_field, 'A', passed: true)
- spy_ability_check_for(:read_field, 'B', passed: false)
- spy_ability_check_for(:read_field, 'C', passed: true)
-
- a = resolve('a')
- b = resolve('b')
- c = resolve('c')
-
- expect(a.force).to be_present
- expect(b.force).to be_nil
- expect(c.force).to be_present
- end
- end
- end
-
- shared_examples 'authorizing fields' do
- context 'scalar types' do
- shared_examples 'checking permissions on the presented object' do
- it 'checks the abilities on the object being presented and returns the value' do
- expected_permissions.each do |permission|
- spy_ability_check_for(permission, presented_object, passed: true)
- end
-
- expect(resolved).to eq('Resolved value')
- end
-
- it 'returns nil if the value was not authorized' do
- allow(Ability).to receive(:allowed?).and_return false
-
- expect(resolved).to be_nil
- end
- end
-
- context 'when the field is a built-in scalar type' do
- let(:type_class) { type_with_field(GraphQL::STRING_TYPE, :read_field) }
- let(:expected_permissions) { [:read_field] }
-
- it_behaves_like 'checking permissions on the presented object'
- end
-
- context 'when the field is a list of scalar types' do
- let(:type_class) { type_with_field([GraphQL::STRING_TYPE], :read_field) }
- let(:expected_permissions) { [:read_field] }
-
- it_behaves_like 'checking permissions on the presented object'
- end
-
- context 'when the field is sub-classed scalar type' do
- let(:type_class) { type_with_field(Types::TimeType, :read_field) }
- let(:expected_permissions) { [:read_field] }
-
- it_behaves_like 'checking permissions on the presented object'
- end
-
- context 'when the field is a list of sub-classed scalar types' do
- let(:type_class) { type_with_field([Types::TimeType], :read_field) }
- let(:expected_permissions) { [:read_field] }
-
- it_behaves_like 'checking permissions on the presented object'
- end
- end
-
- context 'when the field is a connection' do
- context 'when it resolves to nil' do
- let(:type_class) { type_with_field(Types::QueryType.connection_type, :read_field, nil) }
-
- it 'does not fail when authorizing' do
- expect(resolved).to be_nil
- end
- end
-
- context 'when it returns values' do
- let(:objects) { [1, 2, 3] }
- let(:field_type) { type([:read_object]).connection_type }
- let(:type_class) { type_with_field(field_type, [], objects) }
-
- it 'filters out unauthorized values' do
- spy_ability_check_for(:read_object, 1, passed: true)
- spy_ability_check_for(:read_object, 2, passed: false)
- spy_ability_check_for(:read_object, 3, passed: true)
-
- expect(resolved.nodes).to eq [1, 3]
- end
- end
- end
-
- context 'when the field is a specific type' do
- let(:custom_type) { type(:read_type) }
- let(:object_in_field) { double('presented in field') }
-
- let(:type_class) { type_with_field(custom_type, :read_field, object_in_field) }
- let(:type_instance) { type_class.authorized_new(object_in_field, context) }
-
- it 'checks both field & type permissions' do
- spy_ability_check_for(:read_field, object_in_field, passed: true)
- spy_ability_check_for(:read_type, object_in_field, passed: true)
-
- expect(resolved).to eq(object_in_field)
- end
-
- it 'returns nil if viewing was not allowed' do
- spy_ability_check_for(:read_field, object_in_field, passed: false)
- spy_ability_check_for(:read_type, object_in_field, passed: true)
-
- expect(resolved).to be_nil
- end
-
- context 'when the field is not nullable' do
- let(:type_class) { type_with_field(custom_type, :read_field, object_in_field, null: false) }
-
- it 'returns nil when viewing is not allowed' do
- spy_ability_check_for(:read_type, object_in_field, passed: false)
-
- expect(resolved).to be_nil
- end
- end
-
- context 'when the field is a list' do
- let(:object_1) { double('presented in field 1') }
- let(:object_2) { double('presented in field 2') }
- let(:presented_types) { [double(object: object_1), double(object: object_2)] }
-
- let(:type_class) { type_with_field([custom_type], :read_field, presented_types) }
- let(:type_instance) { type_class.authorized_new(presented_types, context) }
-
- it 'checks all permissions' do
- allow(Ability).to receive(:allowed?) { true }
-
- spy_ability_check_for(:read_field, object_1, passed: true)
- spy_ability_check_for(:read_type, object_1, passed: true)
- spy_ability_check_for(:read_field, object_2, passed: true)
- spy_ability_check_for(:read_type, object_2, passed: true)
-
- expect(resolved).to eq(presented_types)
- end
-
- it 'filters out objects that the user cannot see' do
- allow(Ability).to receive(:allowed?) { true }
-
- spy_ability_check_for(:read_type, object_1, passed: false)
-
- expect(resolved).to contain_exactly(have_attributes(object: object_2))
- end
- end
- end
- end
-
- it_behaves_like 'authorizing fields'
- end
-
- private
-
- def spy_ability_check_for(ability, object, passed: true)
- expect(Ability)
- .to receive(:allowed?)
- .with(current_user, ability, object)
- .and_return(passed)
- end
-end
diff --git a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
index c5d7665c3b2..0c548e1ce32 100644
--- a/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
+++ b/spec/lib/gitlab/graphql/authorize/authorize_resource_spec.rb
@@ -12,7 +12,8 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
authorize :read_the_thing
def initialize(user, found_object)
- @user, @found_object = user, found_object
+ @user = user
+ @found_object = found_object
end
def find_object
@@ -22,6 +23,14 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
def current_user
user
end
+
+ def context
+ { current_user: user }
+ end
+
+ def self.authorization
+ @authorization ||= ::Gitlab::Graphql::Authorize::ObjectAuthorization.new(required_permissions)
+ end
end
end
@@ -30,11 +39,14 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
subject(:loading_resource) { fake_class.new(user, project) }
+ before do
+ # don't allow anything by default
+ allow(Ability).to receive(:allowed?).and_return(false)
+ end
+
context 'when the user is allowed to perform the action' do
before do
- allow(Ability).to receive(:allowed?).with(user, :read_the_thing, project, scope: :user) do
- true
- end
+ allow(Ability).to receive(:allowed?).with(user, :read_the_thing, project).and_return(true)
end
describe '#authorized_find!' do
@@ -48,24 +60,12 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
expect { loading_resource.authorize!(project) }.not_to raise_error
end
end
-
- describe '#authorized_resource?' do
- it 'is true' do
- expect(loading_resource.authorized_resource?(project)).to be(true)
- end
- end
end
context 'when the user is not allowed to perform the action' do
- before do
- allow(Ability).to receive(:allowed?).with(user, :read_the_thing, project, scope: :user) do
- false
- end
- end
-
describe '#authorized_find!' do
it 'raises an error' do
- expect { loading_resource.authorize!(project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ expect { loading_resource.authorized_find! }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
@@ -74,12 +74,6 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
expect { loading_resource.authorize!(project) }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
end
-
- describe '#authorized_resource?' do
- it 'is false' do
- expect(loading_resource.authorized_resource?(project)).to be(false)
- end
- end
end
context 'when the class does not define #find_object' do
@@ -92,46 +86,6 @@ RSpec.describe Gitlab::Graphql::Authorize::AuthorizeResource do
end
end
- context 'when the class does not define authorize' do
- let(:fake_class) do
- Class.new do
- include Gitlab::Graphql::Authorize::AuthorizeResource
-
- attr_reader :user, :found_object
-
- def initialize(user, found_object)
- @user, @found_object = user, found_object
- end
-
- def find_object(*_args)
- found_object
- end
-
- def current_user
- user
- end
-
- def self.name
- 'TestClass'
- end
- end
- end
-
- let(:error) { /#{fake_class.name} has no authorizations/ }
-
- describe '#authorized_find!' do
- it 'raises a comprehensive error message' do
- expect { loading_resource.authorized_find! }.to raise_error(error)
- end
- end
-
- describe '#authorized_resource?' do
- it 'raises a comprehensive error message' do
- expect { loading_resource.authorized_resource?(project) }.to raise_error(error)
- end
- end
- end
-
describe '#authorize' do
it 'adds permissions from subclasses to those of superclasses when used on classes' do
base_class = Class.new do
diff --git a/spec/lib/gitlab/graphql/authorize/object_authorization_spec.rb b/spec/lib/gitlab/graphql/authorize/object_authorization_spec.rb
new file mode 100644
index 00000000000..73e25f23848
--- /dev/null
+++ b/spec/lib/gitlab/graphql/authorize/object_authorization_spec.rb
@@ -0,0 +1,64 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+RSpec.describe ::Gitlab::Graphql::Authorize::ObjectAuthorization do
+ describe '#ok?' do
+ subject { described_class.new(%i[go_fast go_slow]) }
+
+ let(:user) { double(:User, id: 10001) }
+
+ let(:policy) do
+ Class.new(::DeclarativePolicy::Base) do
+ condition(:fast, scope: :subject) { @subject.x >= 10 }
+ condition(:slow, scope: :subject) { @subject.y >= 10 }
+
+ rule { fast }.policy do
+ enable :go_fast
+ end
+
+ rule { slow }.policy do
+ enable :go_slow
+ end
+ end
+ end
+
+ before do
+ stub_const('Foo', Struct.new(:x, :y))
+ stub_const('FooPolicy', policy)
+ end
+
+ context 'when there are no abilities' do
+ subject { described_class.new([]) }
+
+ it { is_expected.to be_ok(double, double) }
+ end
+
+ context 'when no ability should be allowed' do
+ let(:object) { Foo.new(0, 0) }
+
+ it { is_expected.not_to be_ok(object, user) }
+ end
+
+ context 'when go_fast should be allowed' do
+ let(:object) { Foo.new(100, 0) }
+
+ it { is_expected.not_to be_ok(object, user) }
+ end
+
+ context 'when go_fast and go_slow should be allowed' do
+ let(:object) { Foo.new(100, 100) }
+
+ it { is_expected.to be_ok(object, user) }
+ end
+
+ context 'when the object delegates to another subject' do
+ def proxy(foo)
+ double(:Proxy, declarative_policy_subject: foo)
+ end
+
+ it { is_expected.to be_ok(proxy(Foo.new(100, 100)), user) }
+ it { is_expected.not_to be_ok(proxy(Foo.new(0, 100)), user) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/batch_key_spec.rb b/spec/lib/gitlab/graphql/batch_key_spec.rb
index 881fba5c1be..7b73b27f24b 100644
--- a/spec/lib/gitlab/graphql/batch_key_spec.rb
+++ b/spec/lib/gitlab/graphql/batch_key_spec.rb
@@ -6,6 +6,7 @@ require 'test_prof/recipes/rspec/let_it_be'
RSpec.describe ::Gitlab::Graphql::BatchKey do
let_it_be(:rect) { Struct.new(:len, :width) }
let_it_be(:circle) { Struct.new(:radius) }
+
let(:lookahead) { nil }
let(:object) { rect.new(2, 3) }
diff --git a/spec/lib/gitlab/graphql/deprecation_spec.rb b/spec/lib/gitlab/graphql/deprecation_spec.rb
new file mode 100644
index 00000000000..8b41145b855
--- /dev/null
+++ b/spec/lib/gitlab/graphql/deprecation_spec.rb
@@ -0,0 +1,213 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'active_model'
+
+RSpec.describe ::Gitlab::Graphql::Deprecation do
+ let(:options) { {} }
+
+ subject(:deprecation) { described_class.parse(options) }
+
+ describe '.parse' do
+ context 'with nil' do
+ let(:options) { nil }
+
+ it 'parses to nil' do
+ expect(deprecation).to be_nil
+ end
+ end
+
+ context 'with empty options' do
+ let(:options) { {} }
+
+ it 'parses to an empty deprecation' do
+ expect(deprecation).to eq(described_class.new)
+ end
+ end
+
+ context 'with defined options' do
+ let(:options) { { reason: :renamed, milestone: '10.10' } }
+
+ it 'assigns the properties' do
+ expect(deprecation).to eq(described_class.new(reason: 'This was renamed', milestone: '10.10'))
+ end
+ end
+ end
+
+ describe 'validations' do
+ let(:options) { { reason: :renamed, milestone: '10.10' } }
+
+ it { is_expected.to be_valid }
+
+ context 'when the milestone is absent' do
+ before do
+ options.delete(:milestone)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when the milestone is not milestone-ish' do
+ before do
+ options[:milestone] = 'next year'
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when the milestone is not a string' do
+ before do
+ options[:milestone] = 10.01
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when the reason is absent' do
+ before do
+ options.delete(:reason)
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when the reason is not a known reason' do
+ before do
+ options[:reason] = :not_stylish_enough
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+
+ context 'when the reason is a string' do
+ before do
+ options[:reason] = 'not stylish enough'
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when the reason is a string ending with a period' do
+ before do
+ options[:reason] = 'not stylish enough.'
+ end
+
+ it { is_expected.not_to be_valid }
+ end
+ end
+
+ describe '#deprecation_reason' do
+ context 'when there is a replacement' do
+ let(:options) { { reason: :renamed, milestone: '10.10', replacement: 'X.y' } }
+
+ it 'renders as reason-replacement-milestone' do
+ expect(deprecation.deprecation_reason).to eq('This was renamed. Please use `X.y`. Deprecated in 10.10.')
+ end
+ end
+
+ context 'when there is no replacement' do
+ let(:options) { { reason: :renamed, milestone: '10.10' } }
+
+ it 'renders as reason-milestone' do
+ expect(deprecation.deprecation_reason).to eq('This was renamed. Deprecated in 10.10.')
+ end
+ end
+
+ describe 'processing of reason' do
+ described_class::REASONS.each_key do |known_reason|
+ context "when the reason is a known reason such as #{known_reason.inspect}" do
+ let(:options) { { reason: known_reason } }
+
+ it 'renders the reason_text correctly' do
+ expect(deprecation.deprecation_reason).to start_with(described_class::REASONS[known_reason])
+ end
+ end
+ end
+
+ context 'when the reason is any other string' do
+ let(:options) { { reason: 'unhelpful' } }
+
+ it 'appends a period' do
+ expect(deprecation.deprecation_reason).to start_with('unhelpful.')
+ end
+ end
+ end
+ end
+
+ describe '#edit_description' do
+ let(:options) { { reason: :renamed, milestone: '10.10' } }
+
+ it 'appends milestone:reason with a leading space if there is a description' do
+ desc = deprecation.edit_description('Some description.')
+
+ expect(desc).to eq('Some description. Deprecated in 10.10: This was renamed.')
+ end
+
+ it 'returns nil if there is no description' do
+ desc = deprecation.edit_description(nil)
+
+ expect(desc).to be_nil
+ end
+ end
+
+ describe '#original_description' do
+ it 'records the description passed to it' do
+ deprecation.edit_description('Some description.')
+
+ expect(deprecation.original_description).to eq('Some description.')
+ end
+ end
+
+ describe '#markdown' do
+ context 'when there is a replacement' do
+ let(:options) { { reason: :renamed, milestone: '10.10', replacement: 'X.y' } }
+
+ context 'when the context is :inline' do
+ it 'renders on one line' do
+ expectation = '**Deprecated** in 10.10. This was renamed. Use: `X.y`.'
+
+ expect(deprecation.markdown).to eq(expectation)
+ expect(deprecation.markdown(context: :inline)).to eq(expectation)
+ end
+ end
+
+ context 'when the context is :block' do
+ it 'renders a warning note' do
+ expectation = <<~MD.chomp
+ WARNING:
+ **Deprecated** in 10.10.
+ This was renamed.
+ Use: `X.y`.
+ MD
+
+ expect(deprecation.markdown(context: :block)).to eq(expectation)
+ end
+ end
+ end
+
+ context 'when there is no replacement' do
+ let(:options) { { reason: 'Removed', milestone: '10.10' } }
+
+ context 'when the context is :inline' do
+ it 'renders on one line' do
+ expectation = '**Deprecated** in 10.10. Removed.'
+
+ expect(deprecation.markdown).to eq(expectation)
+ expect(deprecation.markdown(context: :inline)).to eq(expectation)
+ end
+ end
+
+ context 'when the context is :block' do
+ it 'renders a warning note' do
+ expectation = <<~MD.chomp
+ WARNING:
+ **Deprecated** in 10.10.
+ Removed.
+ MD
+
+ expect(deprecation.markdown(context: :block)).to eq(expectation)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/docs/renderer_spec.rb b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
index 5afed8c3390..8c0f7aac081 100644
--- a/spec/lib/gitlab/graphql/docs/renderer_spec.rb
+++ b/spec/lib/gitlab/graphql/docs/renderer_spec.rb
@@ -1,32 +1,35 @@
# frozen_string_literal: true
-require 'spec_helper'
+require 'fast_spec_helper'
RSpec.describe Gitlab::Graphql::Docs::Renderer do
describe '#contents' do
- # Returns a Schema that uses the given `type`
- def mock_schema(type, field_description)
- query_type = Class.new(Types::BaseObject) do
- graphql_name 'Query'
+ let(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/default.md.haml') }
- field :foo, type, null: true do
- description field_description
+ let(:query_type) do
+ Class.new(Types::BaseObject) { graphql_name 'Query' }.tap do |t|
+ # this keeps type and field_description in scope.
+ t.field :foo, type, null: true, description: field_description do
argument :id, GraphQL::ID_TYPE, required: false, description: 'ID of the object.'
end
end
+ end
- GraphQL::Schema.define(
- query: query_type,
- resolve_type: ->(obj, ctx) { raise 'Not a real schema' }
- )
+ let(:mock_schema) do
+ Class.new(GraphQL::Schema) do
+ def resolve_type(obj, ctx)
+ raise 'Not a real schema'
+ end
+ end
end
- let_it_be(:template) { Rails.root.join('lib/gitlab/graphql/docs/templates/default.md.haml') }
let(:field_description) { 'List of objects.' }
subject(:contents) do
+ mock_schema.query(query_type)
+
described_class.new(
- mock_schema(type, field_description).graphql_definition,
+ mock_schema,
output_dir: nil,
template: template
).contents
@@ -136,6 +139,22 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
null: false,
deprecated: { reason: 'This is deprecated', milestone: '1.10' },
description: 'A description.'
+ field :foo_with_args,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ deprecated: { reason: 'Do not use', milestone: '1.10' },
+ description: 'A description.' do
+ argument :fooity, ::GraphQL::INT_TYPE, required: false, description: 'X'
+ end
+ field :bar,
+ type: GraphQL::STRING_TYPE,
+ null: false,
+ description: 'A description.',
+ deprecated: {
+ reason: :renamed,
+ milestone: '1.10',
+ replacement: 'Query.boom'
+ }
end
end
@@ -145,7 +164,40 @@ RSpec.describe Gitlab::Graphql::Docs::Renderer do
| Field | Type | Description |
| ----- | ---- | ----------- |
- | `foo` **{warning-solid}** | [`String!`](#string) | **Deprecated:** This is deprecated. Deprecated in 1.10. |
+ | `bar` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This was renamed. Use: `Query.boom`. |
+ | `foo` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. This is deprecated. |
+ | `fooWithArgs` **{warning-solid}** | [`String!`](#string) | **Deprecated** in 1.10. Do not use. |
+ DOC
+
+ is_expected.to include(expectation)
+ end
+ end
+
+ context 'when a Query.field is deprecated' do
+ let(:type) { ::GraphQL::INT_TYPE }
+
+ before do
+ query_type.field(
+ name: :bar,
+ type: type,
+ null: true,
+ description: 'A bar',
+ deprecated: { reason: :renamed, milestone: '10.11', replacement: 'Query.foo' }
+ )
+ end
+
+ it 'includes the deprecation' do
+ expectation = <<~DOC
+ ### `bar`
+
+ A bar.
+
+ WARNING:
+ **Deprecated** in 10.11.
+ This was renamed.
+ Use: `Query.foo`.
+
+ Returns [`Int`](#int).
DOC
is_expected.to include(expectation)
diff --git a/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb b/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb
index ae5d9686c54..35750a87fb5 100644
--- a/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb
+++ b/spec/lib/gitlab/graphql/loaders/batch_lfs_oid_loader_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::Graphql::Loaders::BatchLfsOidLoader do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
let(:blob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.blob_at('master', 'files/lfs/lfs_object.iso'), repository) }
let(:otherblob) { Gitlab::Graphql::Representation::TreeEntry.new(repository.blob_at('master', 'README'), repository) }
diff --git a/spec/lib/gitlab/graphql/markdown_field_spec.rb b/spec/lib/gitlab/graphql/markdown_field_spec.rb
index 0e36ea14ac3..44ca23f547c 100644
--- a/spec/lib/gitlab/graphql/markdown_field_spec.rb
+++ b/spec/lib/gitlab/graphql/markdown_field_spec.rb
@@ -57,6 +57,7 @@ RSpec.describe Gitlab::Graphql::MarkdownField do
describe 'basic verification that references work' do
let_it_be(:project) { create(:project, :public) }
+
let(:issue) { create(:issue, project: project) }
let(:note) { build(:note, note: "Referencing #{issue.to_reference(full: true)}") }
diff --git a/spec/lib/gitlab/graphql/negatable_arguments_spec.rb b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
new file mode 100644
index 00000000000..bc6e25eb018
--- /dev/null
+++ b/spec/lib/gitlab/graphql/negatable_arguments_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Graphql::NegatableArguments do
+ let(:test_resolver) do
+ Class.new(Resolvers::BaseResolver).tap do |klass|
+ klass.extend described_class
+ allow(klass).to receive(:name).and_return('Resolvers::TestResolver')
+ end
+ end
+
+ describe '#negated' do
+ it 'defines :not argument' do
+ test_resolver.negated {}
+
+ expect(test_resolver.arguments['not'].type.name).to eq "Types::TestResolverNegatedParamsType"
+ end
+
+ it 'defines any arguments passed as block' do
+ test_resolver.negated do
+ argument :foo, GraphQL::STRING_TYPE, required: false
+ end
+
+ expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(['foo'])
+ end
+
+ it 'defines all arguments passed as block even if called multiple times' do
+ test_resolver.negated do
+ argument :foo, GraphQL::STRING_TYPE, required: false
+ end
+ test_resolver.negated do
+ argument :bar, GraphQL::STRING_TYPE, required: false
+ end
+
+ expect(test_resolver.arguments['not'].type.arguments.keys).to match_array(%w[foo bar])
+ end
+
+ it 'allows to specify custom argument name' do
+ test_resolver.negated(param_key: :negative) {}
+
+ expect(test_resolver.arguments).to include('negative')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
index 02e67488d3f..839ad9110cc 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb
@@ -337,6 +337,7 @@ RSpec.describe Gitlab::Graphql::Pagination::Keyset::Connection do
describe '#nodes' do
let_it_be(:all_nodes) { create_list(:project, 5) }
+
let(:paged_nodes) { subject.nodes }
it_behaves_like 'connection with paged nodes' do
diff --git a/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
index ec2ec4bf50d..792cb03e8c7 100644
--- a/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
+++ b/spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Graphql::Pagination::Keyset::LastItems do
let_it_be(:merge_request) { create(:merge_request) }
+
let(:scope) { MergeRequest.order_merged_at_asc }
subject { described_class.take_items(*args) }
diff --git a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
index 8450396284a..fc723138d88 100644
--- a/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
+++ b/spec/lib/gitlab/graphql/query_analyzers/logger_analyzer_spec.rb
@@ -3,43 +3,46 @@
require 'spec_helper'
RSpec.describe Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer do
- subject { described_class.new }
-
- describe '#initial_value' do
- it 'filters out sensitive variables' do
- doc = GraphQL.parse <<-GRAPHQL
- mutation createNote($body: String!) {
- createNote(input: {noteableId: "1", body: $body}) {
- note {
- id
- }
+ let(:initial_value) { analyzer.initial_value(query) }
+ let(:analyzer) { described_class.new }
+ let(:query) { GraphQL::Query.new(GitlabSchema, document: document, context: {}, variables: { body: "some note" }) }
+ let(:document) do
+ GraphQL.parse <<-GRAPHQL
+ mutation createNote($body: String!) {
+ createNote(input: {noteableId: "1", body: $body}) {
+ note {
+ id
}
}
- GRAPHQL
+ }
+ GRAPHQL
+ end
- query = GraphQL::Query.new(GitlabSchema, document: doc, context: {}, variables: { body: "some note" })
+ describe 'variables' do
+ subject { initial_value.fetch(:variables) }
- expect(subject.initial_value(query)[:variables]).to eq('{:body=>"[FILTERED]"}')
- end
+ it { is_expected.to eq('{:body=>"[FILTERED]"}') }
end
describe '#final_value' do
let(:monotonic_time_before) { 42 }
let(:monotonic_time_after) { 500 }
let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
+ let(:memo) { initial_value }
+
+ subject(:final_value) { analyzer.final_value(memo) }
+
+ before do
+ RequestStore.store[:graphql_logs] = nil
- it 'returns a duration in seconds' do
allow(GraphQL::Analysis).to receive(:analyze_query).and_return([4, 2, [[], []]])
allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
allow(Gitlab::GraphqlLogger).to receive(:info)
+ end
- expected_duration = monotonic_time_duration
- memo = subject.initial_value(spy('query'))
-
- subject.final_value(memo)
-
- expect(memo).to have_key(:duration_s)
- expect(memo[:duration_s]).to eq(expected_duration)
+ it 'inserts duration in seconds to memo and sets request store' do
+ expect { final_value }.to change { memo[:duration_s] }.to(monotonic_time_duration)
+ .and change { RequestStore.store[:graphql_logs] }.to([memo])
end
end
end
diff --git a/spec/lib/gitlab/highlight_spec.rb b/spec/lib/gitlab/highlight_spec.rb
index 9271b868e36..1a929373716 100644
--- a/spec/lib/gitlab/highlight_spec.rb
+++ b/spec/lib/gitlab/highlight_spec.rb
@@ -79,6 +79,21 @@ RSpec.describe Gitlab::Highlight do
expect(result).to eq(expected)
end
+
+ context 'when start line number is set' do
+ let(:expected) do
+ %q(<span id="LC10" class="line" lang="diff"><span class="gi">+aaa</span></span>
+<span id="LC11" class="line" lang="diff"><span class="gi">+bbb</span></span>
+<span id="LC12" class="line" lang="diff"><span class="gd">- ccc</span></span>
+<span id="LC13" class="line" lang="diff"> ddd</span>)
+ end
+
+ it 'highlights each line properly' do
+ result = described_class.new(file_name, content).highlight(content, context: { line_number: 10 })
+
+ expect(result).to eq(expected)
+ end
+ end
end
describe 'with CRLF' do
diff --git a/spec/lib/gitlab/hook_data/issue_builder_spec.rb b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
index 8a2395d70b2..8f898d898de 100644
--- a/spec/lib/gitlab/hook_data/issue_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/issue_builder_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::HookData::IssueBuilder do
let_it_be(:label) { create(:label) }
let_it_be(:issue) { create(:labeled_issue, labels: [label], project: label.project) }
+
let(:builder) { described_class.new(issue) }
describe '#build' do
diff --git a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
index fede7f273f1..0339faa9fcf 100644
--- a/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/merge_request_builder_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::HookData::MergeRequestBuilder do
let_it_be(:merge_request) { create(:merge_request) }
+
let(:builder) { described_class.new(merge_request) }
describe '#build' do
diff --git a/spec/lib/gitlab/hook_data/release_builder_spec.rb b/spec/lib/gitlab/hook_data/release_builder_spec.rb
index b630780b162..449965f5df1 100644
--- a/spec/lib/gitlab/hook_data/release_builder_spec.rb
+++ b/spec/lib/gitlab/hook_data/release_builder_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::HookData::ReleaseBuilder do
let_it_be(:project) { create(:project, :public, :repository) }
+
let(:release) { create(:release, project: project) }
let(:builder) { described_class.new(release) }
diff --git a/spec/lib/gitlab/hook_data/user_builder_spec.rb b/spec/lib/gitlab/hook_data/user_builder_spec.rb
new file mode 100644
index 00000000000..f971089850b
--- /dev/null
+++ b/spec/lib/gitlab/hook_data/user_builder_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::HookData::UserBuilder do
+ let_it_be(:user) { create(:user, name: 'John Doe', username: 'johndoe', email: 'john@example.com') }
+
+ describe '#build' do
+ let(:data) { described_class.new(user).build(event) }
+ let(:event_name) { data[:event_name] }
+ let(:attributes) do
+ [
+ :event_name, :created_at, :updated_at, :name, :email, :user_id, :username
+ ]
+ end
+
+ context 'data' do
+ shared_examples_for 'includes the required attributes' do
+ it 'includes the required attributes' do
+ expect(data).to include(*attributes)
+
+ expect(data[:name]).to eq('John Doe')
+ expect(data[:email]).to eq('john@example.com')
+ expect(data[:user_id]).to eq(user.id)
+ expect(data[:username]).to eq('johndoe')
+ expect(data[:created_at]).to eq(user.created_at.xmlschema)
+ expect(data[:updated_at]).to eq(user.updated_at.xmlschema)
+ end
+ end
+
+ shared_examples_for 'does not include old username attributes' do
+ it 'does not include old username attributes' do
+ expect(data).not_to include(:old_username)
+ end
+ end
+
+ shared_examples_for 'does not include state attributes' do
+ it 'does not include state attributes' do
+ expect(data).not_to include(:state)
+ end
+ end
+
+ context 'on create' do
+ let(:event) { :create }
+
+ it { expect(event_name).to eq('user_create') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include old username attributes'
+ it_behaves_like 'does not include state attributes'
+ end
+
+ context 'on destroy' do
+ let(:event) { :destroy }
+
+ it { expect(event_name).to eq('user_destroy') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include old username attributes'
+ it_behaves_like 'does not include state attributes'
+ end
+
+ context 'on rename' do
+ let(:event) { :rename }
+
+ it { expect(event_name).to eq('user_rename') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include state attributes'
+
+ it 'includes old username details' do
+ allow(user).to receive(:username_before_last_save).and_return('old-username')
+
+ expect(data[:old_username]).to eq(user.username_before_last_save)
+ end
+ end
+
+ context 'on failed_login' do
+ let(:event) { :failed_login }
+
+ it { expect(event_name).to eq('user_failed_login') }
+ it_behaves_like 'includes the required attributes'
+ it_behaves_like 'does not include old username attributes'
+
+ it 'includes state details' do
+ user.ldap_block!
+
+ expect(data[:state]).to eq('ldap_blocked')
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/http_connection_adapter_spec.rb b/spec/lib/gitlab/http_connection_adapter_spec.rb
index 96e6e485841..7c57d162e9b 100644
--- a/spec/lib/gitlab/http_connection_adapter_spec.rb
+++ b/spec/lib/gitlab/http_connection_adapter_spec.rb
@@ -124,130 +124,5 @@ RSpec.describe Gitlab::HTTPConnectionAdapter do
expect(connection.port).to eq(443)
end
end
-
- context 'when proxy settings are configured' do
- let(:options) do
- {
- http_proxyaddr: 'https://proxy.org',
- http_proxyport: 1557,
- http_proxyuser: 'user',
- http_proxypass: 'pass'
- }
- end
-
- before do
- stub_all_dns('https://proxy.org', ip_address: '166.84.12.54')
- end
-
- it 'sets up the proxy settings' do
- expect(connection.proxy_address).to eq('https://166.84.12.54')
- expect(connection.proxy_port).to eq(1557)
- expect(connection.proxy_user).to eq('user')
- expect(connection.proxy_pass).to eq('pass')
- end
-
- context 'when the address has path' do
- before do
- options[:http_proxyaddr] = 'https://proxy.org/path'
- end
-
- it 'sets up the proxy settings' do
- expect(connection.proxy_address).to eq('https://166.84.12.54/path')
- expect(connection.proxy_port).to eq(1557)
- end
- end
-
- context 'when the port is in the address and port' do
- before do
- options[:http_proxyaddr] = 'https://proxy.org:1422'
- end
-
- it 'sets up the proxy settings' do
- expect(connection.proxy_address).to eq('https://166.84.12.54')
- expect(connection.proxy_port).to eq(1557)
- end
-
- context 'when the port is only in the address' do
- before do
- options[:http_proxyport] = nil
- end
-
- it 'sets up the proxy settings' do
- expect(connection.proxy_address).to eq('https://166.84.12.54')
- expect(connection.proxy_port).to eq(1422)
- end
- end
- end
-
- context 'when it is a request to local network' do
- before do
- options[:http_proxyaddr] = 'http://172.16.0.0/12'
- end
-
- it 'raises error' do
- expect { subject }.to raise_error(
- Gitlab::HTTP::BlockedUrlError,
- "URL 'http://172.16.0.0:1557/12' is blocked: Requests to the local network are not allowed"
- )
- end
-
- context 'when local request allowed' do
- before do
- options[:allow_local_requests] = true
- end
-
- it 'sets up the connection' do
- expect(connection.proxy_address).to eq('http://172.16.0.0/12')
- expect(connection.proxy_port).to eq(1557)
- end
- end
- end
-
- context 'when it is a request to local address' do
- before do
- options[:http_proxyaddr] = 'http://127.0.0.1'
- end
-
- it 'raises error' do
- expect { subject }.to raise_error(
- Gitlab::HTTP::BlockedUrlError,
- "URL 'http://127.0.0.1:1557' is blocked: Requests to localhost are not allowed"
- )
- end
-
- context 'when local request allowed' do
- before do
- options[:allow_local_requests] = true
- end
-
- it 'sets up the connection' do
- expect(connection.proxy_address).to eq('http://127.0.0.1')
- expect(connection.proxy_port).to eq(1557)
- end
- end
- end
-
- context 'when http(s) environment variable is set' do
- before do
- stub_env('https_proxy' => 'https://my.proxy')
- end
-
- it 'sets up the connection' do
- expect(connection.proxy_address).to eq('https://proxy.org')
- expect(connection.proxy_port).to eq(1557)
- end
- end
-
- context 'when DNS rebinding protection is disabled' do
- before do
- stub_application_setting(dns_rebinding_protection_enabled: false)
- end
-
- it 'sets up the connection' do
- expect(connection.proxy_address).to eq('https://proxy.org')
- expect(connection.proxy_port).to eq(1557)
- end
- end
- end
end
end
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index 37b43066a62..5d1e3c79474 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -256,6 +256,8 @@ ci_pipelines:
- messages
- pipeline_artifacts
- latest_statuses
+- dast_profile
+- dast_profiles_pipeline
ci_refs:
- project
- ci_pipelines
@@ -269,6 +271,7 @@ stages:
- builds
- bridges
- latest_statuses
+- retried_statuses
statuses:
- project
- pipeline
@@ -740,3 +743,5 @@ status_page_published_incident:
- issue
issuable_sla:
- issue
+push_rule:
+ - group
diff --git a/spec/lib/gitlab/import_export/design_repo_saver_spec.rb b/spec/lib/gitlab/import_export/design_repo_saver_spec.rb
index 5501e3dee5a..fd3539ab99c 100644
--- a/spec/lib/gitlab/import_export/design_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/design_repo_saver_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::ImportExport::DesignRepoSaver do
describe 'bundle a design Git repo' do
let_it_be(:user) { create(:user) }
let_it_be(:design) { create(:design, :with_file, versions_count: 1) }
+
let!(:project) { create(:project, :design_repo) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index d084b9d7f7e..29b192de809 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -12,6 +12,7 @@ RSpec.describe Gitlab::ImportExport::FastHashSerializer do
let_it_be(:user) { create(:user) }
let_it_be(:project) { setup_project }
+
let(:shared) { project.import_export_shared }
let(:reader) { Gitlab::ImportExport::Reader.new(shared: shared) }
let(:tree) { reader.project_tree }
diff --git a/spec/lib/gitlab/import_export/project/export_task_spec.rb b/spec/lib/gitlab/import_export/project/export_task_spec.rb
index 1048379a5d6..7fcd2187a90 100644
--- a/spec/lib/gitlab/import_export/project/export_task_spec.rb
+++ b/spec/lib/gitlab/import_export/project/export_task_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::ImportExport::Project::ExportTask do
let_it_be(:username) { 'root' }
let(:namespace_path) { username }
let_it_be(:user) { create(:user, username: username) }
+
let(:measurement_enabled) { false }
let(:file_path) { 'spec/fixtures/gitlab/import_export/test_project_export.tar.gz' }
let(:project) { create(:project, creator: user, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
index e2bf87bf29f..bc5e6ea7bb3 100644
--- a/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_restorer_spec.rb
@@ -684,7 +684,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeRestorer do
it 'overrides project feature access levels' do
access_level_keys = ProjectFeature.available_features.map { |feature| ProjectFeature.access_level_attribute(feature) }
- disabled_access_levels = Hash[access_level_keys.collect { |item| [item, 'disabled'] }]
+ disabled_access_levels = access_level_keys.to_h { |item| [item, 'disabled'] }
project.create_import_data(data: { override_params: disabled_access_levels })
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index 50494433c5d..fd6c66a10a7 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -267,6 +267,7 @@ RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
describe '#saves project tree' do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
+
let(:project) { setup_project }
let(:full_path) do
if ndjson_enabled
diff --git a/spec/lib/gitlab/import_export/repo_saver_spec.rb b/spec/lib/gitlab/import_export/repo_saver_spec.rb
index 52001e778d6..73e0e0a08b9 100644
--- a/spec/lib/gitlab/import_export/repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/repo_saver_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::RepoSaver do
describe 'bundle a project Git repo' do
let_it_be(:user) { create(:user) }
+
let!(:project) { create(:project, :repository) }
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
diff --git a/spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb b/spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb
index 323ed9a746e..9f3e8d2fa86 100644
--- a/spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/snippet_repo_saver_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::ImportExport::SnippetRepoSaver do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
let_it_be(:snippet) { create(:project_snippet, :repository, project: project, author: user) }
+
let(:shared) { project.import_export_shared }
let(:bundler) { described_class.new(project: project, shared: shared, repository: snippet.repository) }
let(:bundle_path) { ::Gitlab::ImportExport.snippets_repo_bundle_path(shared.export_path) }
diff --git a/spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb b/spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb
index 8507c46ec83..aa284c60e73 100644
--- a/spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/snippets_repo_saver_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Gitlab::ImportExport::SnippetsRepoSaver do
describe 'bundle a project Git repo' do
let_it_be(:user) { create(:user) }
+
let!(:project) { create(:project) }
let(:shared) { project.import_export_shared }
let(:bundler) { described_class.new(current_user: user, project: project, shared: shared) }
diff --git a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
index 540f90e7804..c936d2bc27d 100644
--- a/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/wiki_repo_saver_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::ImportExport::WikiRepoSaver do
describe 'bundle a wiki Git repo' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :wiki_repo) }
+
let(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let(:shared) { project.import_export_shared }
let(:wiki_bundler) { described_class.new(exportable: project, shared: shared) }
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index a5c9cde4c37..488324ccddc 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -6,53 +6,6 @@ require 'rspec-parameterized'
RSpec.describe Gitlab::InstrumentationHelper do
using RSpec::Parameterized::TableSyntax
- describe '.keys' do
- it 'returns all available payload keys' do
- expected_keys = [
- :cpu_s,
- :gitaly_calls,
- :gitaly_duration_s,
- :rugged_calls,
- :rugged_duration_s,
- :elasticsearch_calls,
- :elasticsearch_duration_s,
- :elasticsearch_timed_out_count,
- :mem_objects,
- :mem_bytes,
- :mem_mallocs,
- :redis_calls,
- :redis_duration_s,
- :redis_read_bytes,
- :redis_write_bytes,
- :redis_action_cable_calls,
- :redis_action_cable_duration_s,
- :redis_action_cable_read_bytes,
- :redis_action_cable_write_bytes,
- :redis_cache_calls,
- :redis_cache_duration_s,
- :redis_cache_read_bytes,
- :redis_cache_write_bytes,
- :redis_queues_calls,
- :redis_queues_duration_s,
- :redis_queues_read_bytes,
- :redis_queues_write_bytes,
- :redis_shared_state_calls,
- :redis_shared_state_duration_s,
- :redis_shared_state_read_bytes,
- :redis_shared_state_write_bytes,
- :db_count,
- :db_write_count,
- :db_cached_count,
- :external_http_count,
- :external_http_duration_s,
- :rack_attack_redis_count,
- :rack_attack_redis_duration_s
- ]
-
- expect(described_class.keys).to eq(expected_keys)
- end
- end
-
describe '.add_instrumentation_data', :request_store do
let(:payload) { {} }
diff --git a/spec/lib/gitlab/jira_import/base_importer_spec.rb b/spec/lib/gitlab/jira_import/base_importer_spec.rb
index 1470bad2c4c..9d8143775f9 100644
--- a/spec/lib/gitlab/jira_import/base_importer_spec.rb
+++ b/spec/lib/gitlab/jira_import/base_importer_spec.rb
@@ -27,6 +27,7 @@ RSpec.describe Gitlab::JiraImport::BaseImporter do
context 'when import data exists' do
let_it_be(:project) { create(:project) }
let_it_be(:jira_import) { create(:jira_import_state, project: project) }
+
let(:subject) { described_class.new(project) }
context 'when #imported_items_cache_key is not implemented' do
diff --git a/spec/lib/gitlab/jira_import/handle_labels_service_spec.rb b/spec/lib/gitlab/jira_import/handle_labels_service_spec.rb
index 4e2c5afb077..b8c0dc64581 100644
--- a/spec/lib/gitlab/jira_import/handle_labels_service_spec.rb
+++ b/spec/lib/gitlab/jira_import/handle_labels_service_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Gitlab::JiraImport::HandleLabelsService do
let_it_be(:project_label) { create(:label, project: project, title: 'bug') }
let_it_be(:other_project_label) { create(:label, title: 'feature') }
let_it_be(:group_label) { create(:group_label, group: group, title: 'dev') }
+
let(:jira_labels) { %w(bug feature dev group::new) }
subject { described_class.new(project, jira_labels).execute }
diff --git a/spec/lib/gitlab/jira_import_spec.rb b/spec/lib/gitlab/jira_import_spec.rb
index 2b602c80640..94fdff984d5 100644
--- a/spec/lib/gitlab/jira_import_spec.rb
+++ b/spec/lib/gitlab/jira_import_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe Gitlab::JiraImport do
include JiraServiceHelper
let_it_be(:project, reload: true) { create(:project) }
+
let(:additional_params) { {} }
subject { described_class.validate_project_settings!(project, **additional_params) }
diff --git a/spec/lib/gitlab/json_spec.rb b/spec/lib/gitlab/json_spec.rb
index 59ec94f2855..42c4b315edf 100644
--- a/spec/lib/gitlab/json_spec.rb
+++ b/spec/lib/gitlab/json_spec.rb
@@ -348,6 +348,66 @@ RSpec.describe Gitlab::Json do
subject
end
end
+
+ context "precompiled JSON" do
+ let(:obj) { Gitlab::Json::PrecompiledJson.new(result) }
+
+ it "renders the string directly" do
+ expect(subject).to eq(result)
+ end
+
+ it "calls #to_s on the object" do
+ expect(obj).to receive(:to_s).once
+
+ subject
+ end
+
+ it "doesn't run the JSON formatter" do
+ expect(Gitlab::Json).not_to receive(:dump)
+
+ subject
+ end
+ end
+ end
+
+ describe Gitlab::Json::PrecompiledJson do
+ subject(:precompiled) { described_class.new(obj) }
+
+ describe "#to_s" do
+ subject { precompiled.to_s }
+
+ context "obj is a string" do
+ let(:obj) { "{}" }
+
+ it "returns a string" do
+ expect(subject).to eq("{}")
+ end
+ end
+
+ context "obj is an array" do
+ let(:obj) { ["{\"foo\": \"bar\"}", "{}"] }
+
+ it "returns a string" do
+ expect(subject).to eq("[{\"foo\": \"bar\"},{}]")
+ end
+ end
+
+ context "obj is an array of un-stringables" do
+ let(:obj) { [BasicObject.new] }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(NoMethodError)
+ end
+ end
+
+ context "obj is something else" do
+ let(:obj) { {} }
+
+ it "raises an error" do
+ expect { subject }.to raise_error(described_class::UnsupportedFormatError)
+ end
+ end
+ end
end
describe Gitlab::Json::LimitedEncoder do
diff --git a/spec/lib/gitlab/legacy_github_import/importer_spec.rb b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
index 56074147854..9a4d7bd996e 100644
--- a/spec/lib/gitlab/legacy_github_import/importer_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/importer_spec.rb
@@ -290,7 +290,7 @@ RSpec.describe Gitlab::LegacyGithubImport::Importer do
subject { described_class.new(project) }
before do
- project.update(import_type: 'gitea', import_url: "#{repo_root}/foo/group/project.git")
+ project.update!(import_type: 'gitea', import_url: "#{repo_root}/foo/group/project.git")
end
it_behaves_like 'Gitlab::LegacyGithubImport::Importer#execute' do
diff --git a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
index 4b1e0d2c144..454bab8846c 100644
--- a/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/issue_formatter_spec.rb
@@ -152,7 +152,7 @@ RSpec.describe Gitlab::LegacyGithubImport::IssueFormatter do
context 'when importing a Gitea project' do
before do
- project.update(import_type: 'gitea')
+ project.update!(import_type: 'gitea')
end
it_behaves_like 'Gitlab::LegacyGithubImport::IssueFormatter#attributes'
diff --git a/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
index 148b59dedab..64fcc46d304 100644
--- a/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/milestone_formatter_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe Gitlab::LegacyGithubImport::MilestoneFormatter do
let(:iid_attr) { :id }
before do
- project.update(import_type: 'gitea')
+ project.update!(import_type: 'gitea')
end
it_behaves_like 'Gitlab::LegacyGithubImport::MilestoneFormatter#attributes'
diff --git a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
index 3e6b9340d0b..7d8875e36c3 100644
--- a/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
+++ b/spec/lib/gitlab/legacy_github_import/pull_request_formatter_spec.rb
@@ -260,7 +260,7 @@ RSpec.describe Gitlab::LegacyGithubImport::PullRequestFormatter do
context 'when importing a Gitea project' do
before do
- project.update(import_type: 'gitea')
+ project.update!(import_type: 'gitea')
end
it_behaves_like 'Gitlab::LegacyGithubImport::PullRequestFormatter#attributes'
diff --git a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
index be562d916d3..23dbd4a5bb3 100644
--- a/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/active_record/extension_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
end
let(:cache_version) { Gitlab::MarkdownCache::CACHE_COMMONMARK_VERSION << 16 }
- let(:thing) { klass.create(title: markdown, title_html: html, cached_markdown_version: cache_version) }
+ let(:thing) { klass.create!(title: markdown, title_html: html, cached_markdown_version: cache_version) }
let(:markdown) { '`Foo`' }
let(:html) { '<p data-sourcepos="1:1-1:5" dir="auto"><code>Foo</code></p>' }
@@ -28,7 +28,7 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
before do
thing.title = thing.title
- thing.save
+ thing.save!
end
it { expect(thing.title).to eq(markdown) }
@@ -38,11 +38,11 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
end
context 'a changed markdown field' do
- let(:thing) { klass.create(title: markdown, title_html: html, cached_markdown_version: cache_version) }
+ let(:thing) { klass.create!(title: markdown, title_html: html, cached_markdown_version: cache_version) }
before do
thing.title = updated_markdown
- thing.save
+ thing.save!
end
it { expect(thing.title_html).to eq(updated_html) }
@@ -53,9 +53,9 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
it do
expect(thing).to receive(:refresh_markdown_cache).once
thing.title = ''
- thing.save
+ thing.save!
thing.title = ''
- thing.save
+ thing.save!
end
end
@@ -63,9 +63,9 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
it do
expect(thing).to receive(:refresh_markdown_cache).once
thing.title = '[//]: # (This is also a comment.)'
- thing.save
+ thing.save!
thing.title = '[//]: # (This is also a comment.)'
- thing.save
+ thing.save!
end
end
@@ -74,7 +74,7 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
before do
thing.state_id = 2
- thing.save
+ thing.save!
end
it { expect(thing.state_id).to eq(2) }
@@ -87,7 +87,7 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
let(:thing) { klass.new(title: updated_markdown, title_html: html, cached_markdown_version: nil) }
before do
- thing.save
+ thing.save!
end
it { expect(thing.title_html).to eq(updated_html) }
@@ -99,7 +99,7 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
thing.project = :new_project
allow(Banzai::Renderer).to receive(:cacheless_render_field).and_return(updated_html)
- thing.save
+ thing.save!
expect(thing.title_html).to eq(updated_html)
expect(thing.description_html).to eq(updated_html)
@@ -110,7 +110,7 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
thing.author = :new_author
allow(Banzai::Renderer).to receive(:cacheless_render_field).and_return(updated_html)
- thing.save
+ thing.save!
expect(thing.title_html).to eq(updated_html)
expect(thing.description_html).to eq(updated_html)
@@ -125,7 +125,7 @@ RSpec.describe Gitlab::MarkdownCache::ActiveRecord::Extension do
end
describe '#cached_html_up_to_date?' do
- let(:thing) { klass.create(title: updated_markdown, title_html: html, cached_markdown_version: nil) }
+ let(:thing) { klass.create!(title: updated_markdown, title_html: html, cached_markdown_version: nil) }
subject { thing.cached_html_up_to_date?(:title) }
diff --git a/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb b/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
index 3dcb9f160ba..b5d458f15fc 100644
--- a/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/redis/extension_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Extension, :clean_gitlab_redis_cach
include CacheMarkdownField
def initialize(title: nil, description: nil)
- @title, @description = title, description
+ @title = title
+ @description = description
end
attr_reader :title, :description
diff --git a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
index bf40af8e62e..07a87b245c2 100644
--- a/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
+++ b/spec/lib/gitlab/markdown_cache/redis/store_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Store, :clean_gitlab_redis_cache do
describe '.bulk_read' do
before do
- store.save(field_1_html: "hello", field_2_html: "world", cached_markdown_version: 1)
+ store.save(field_1_html: "hello", field_2_html: "world", cached_markdown_version: 1) # rubocop:disable Rails/SaveBang
end
it 'returns a hash of values from store' do
@@ -59,7 +59,7 @@ RSpec.describe Gitlab::MarkdownCache::Redis::Store, :clean_gitlab_redis_cache do
it 'stores updates to html fields and version' do
values_to_store = { field_1_html: "hello", field_2_html: "world", cached_markdown_version: 1 }
- store.save(values_to_store)
+ store.save(values_to_store) # rubocop:disable Rails/SaveBang
expect(read_values)
.to eq(field_1_html: "hello", field_2_html: "world", cached_markdown_version: "1")
diff --git a/spec/lib/gitlab/marker_range_spec.rb b/spec/lib/gitlab/marker_range_spec.rb
index 5f73d2a5048..c4670ec58a8 100644
--- a/spec/lib/gitlab/marker_range_spec.rb
+++ b/spec/lib/gitlab/marker_range_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Gitlab::MarkerRange do
let(:last) { 10 }
let(:mode) { nil }
- it { is_expected.to eq(first..last) }
+ it { expect(marker_range.to_range).to eq(first..last) }
it 'behaves like a Range' do
is_expected.to be_kind_of(Range)
@@ -51,14 +51,14 @@ RSpec.describe Gitlab::MarkerRange do
end
it 'keeps correct range' do
- is_expected.to eq(range)
+ is_expected.to eq(described_class.new(1, 3))
end
context 'when range excludes end' do
let(:range) { 1...3 }
it 'keeps correct range' do
- is_expected.to eq(range)
+ is_expected.to eq(described_class.new(1, 3, exclude_end: true))
end
end
@@ -68,4 +68,31 @@ RSpec.describe Gitlab::MarkerRange do
it { is_expected.to be(marker_range) }
end
end
+
+ describe '#==' do
+ subject { default_marker_range == another_marker_range }
+
+ let(:default_marker_range) { described_class.new(0, 1, mode: :addition) }
+ let(:another_marker_range) { default_marker_range }
+
+ it { is_expected.to be_truthy }
+
+ context 'when marker ranges have different modes' do
+ let(:another_marker_range) { described_class.new(0, 1, mode: :deletion) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when marker ranges have different ranges' do
+ let(:another_marker_range) { described_class.new(0, 2, mode: :addition) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when marker ranges is a simple range' do
+ let(:another_marker_range) { (0..1) }
+
+ it { is_expected.to be_falsey }
+ end
+ end
end
diff --git a/spec/lib/gitlab/metrics/background_transaction_spec.rb b/spec/lib/gitlab/metrics/background_transaction_spec.rb
index b31a2f7549a..d36ee24fc50 100644
--- a/spec/lib/gitlab/metrics/background_transaction_spec.rb
+++ b/spec/lib/gitlab/metrics/background_transaction_spec.rb
@@ -29,19 +29,62 @@ RSpec.describe Gitlab::Metrics::BackgroundTransaction do
end
describe '#labels' do
- it 'provides labels with endpoint_id and feature_category' do
- Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
- expect(transaction.labels).to eq({ endpoint_id: 'TestWorker', feature_category: 'projects' })
+ context 'when the worker queue is accessible' do
+ before do
+ test_worker_class = Class.new do
+ def self.queue
+ 'test_worker'
+ end
+ end
+ stub_const('TestWorker', test_worker_class)
+ end
+
+ it 'provides labels with endpoint_id, feature_category and queue' do
+ Gitlab::ApplicationContext.with_raw_context(feature_category: 'projects', caller_id: 'TestWorker') do
+ expect(transaction.labels).to eq({ endpoint_id: 'TestWorker', feature_category: 'projects', queue: 'test_worker' })
+ end
+ end
+ end
+
+ context 'when the worker name does not exist' do
+ it 'provides labels with endpoint_id and feature_category' do
+ # 123TestWorker is an invalid constant
+ Gitlab::ApplicationContext.with_raw_context(feature_category: 'projects', caller_id: '123TestWorker') do
+ expect(transaction.labels).to eq({ endpoint_id: '123TestWorker', feature_category: 'projects', queue: nil })
+ end
+ end
+ end
+
+ context 'when the worker queue is not accessible' do
+ before do
+ stub_const('TestWorker', Class.new)
+ end
+
+ it 'provides labels with endpoint_id and feature_category' do
+ Gitlab::ApplicationContext.with_raw_context(feature_category: 'projects', caller_id: 'TestWorker') do
+ expect(transaction.labels).to eq({ endpoint_id: 'TestWorker', feature_category: 'projects', queue: nil })
+ end
end
end
end
RSpec.shared_examples 'metric with labels' do |metric_method|
+ before do
+ test_worker_class = Class.new do
+ def self.queue
+ 'test_worker'
+ end
+ end
+ stub_const('TestWorker', test_worker_class)
+ end
+
it 'measures with correct labels and value' do
value = 1
- expect(prometheus_metric).to receive(metric_method).with({ endpoint_id: 'TestWorker', feature_category: 'projects' }, value)
+ expect(prometheus_metric).to receive(metric_method).with({
+ endpoint_id: 'TestWorker', feature_category: 'projects', queue: 'test_worker'
+ }, value)
- Labkit::Context.with_context(feature_category: 'projects', caller_id: 'TestWorker') do
+ Gitlab::ApplicationContext.with_raw_context(feature_category: 'projects', caller_id: 'TestWorker') do
transaction.send(metric_method, :test_metric, value)
end
end
diff --git a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
index dffd37eeb9d..6bfcfa21289 100644
--- a/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/active_record_spec.rb
@@ -8,65 +8,146 @@ RSpec.describe Gitlab::Metrics::Subscribers::ActiveRecord do
let(:env) { {} }
let(:subscriber) { described_class.new }
let(:connection) { double(:connection) }
- let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10', connection: connection } }
-
- let(:event) do
- double(
- :event,
- name: 'sql.active_record',
- duration: 2,
- payload: payload
- )
- end
- # Emulate Marginalia pre-pending comments
- def sql(query, comments: true)
- if comments && !%w[BEGIN COMMIT].include?(query)
- "/*application:web,controller:badges,action:pipeline,correlation_id:01EYN39K9VMJC56Z7808N7RSRH*/ #{query}"
- else
- query
+ describe '#transaction' do
+ let(:web_transaction) { double('Gitlab::Metrics::WebTransaction') }
+ let(:background_transaction) { double('Gitlab::Metrics::WebTransaction') }
+
+ let(:event) do
+ double(
+ :event,
+ name: 'transaction.active_record',
+ duration: 230,
+ payload: { connection: connection }
+ )
end
- end
- shared_examples 'track generic sql events' do
- where(:name, :sql_query, :record_query, :record_write_query, :record_cached_query) do
- 'SQL' | 'SELECT * FROM users WHERE id = 10' | true | false | false
- 'SQL' | 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' | true | false | false
- 'SQL' | 'SELECT * FROM users WHERE id = 10 FOR UPDATE' | true | true | false
- 'SQL' | 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' | true | true | false
- 'SQL' | 'DELETE FROM users where id = 10' | true | true | false
- 'SQL' | 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' | true | true | false
- 'SQL' | 'UPDATE users SET admin = true WHERE id = 10' | true | true | false
- 'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true
- 'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false
- nil | 'BEGIN' | false | false | false
- nil | 'COMMIT' | false | false | false
+ before do
+ allow(background_transaction).to receive(:observe)
+ allow(web_transaction).to receive(:observe)
end
- with_them do
- let(:payload) { { name: name, sql: sql(sql_query, comments: comments), connection: connection } }
+ context 'when both web and background transaction are available' do
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(web_transaction)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current)
+ .and_return(background_transaction)
+ end
+
+ it 'captures the metrics for web only' do
+ expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23)
- it 'marks the current thread as using the database' do
- # since it would already have been toggled by other specs
- Thread.current[:uses_db_connection] = nil
+ expect(background_transaction).not_to receive(:observe)
+ expect(background_transaction).not_to receive(:increment)
- expect { subscriber.sql(event) }.to change { Thread.current[:uses_db_connection] }.from(nil).to(true)
+ subscriber.transaction(event)
end
+ end
+
+ context 'when web transaction is available' do
+ let(:web_transaction) { double('Gitlab::Metrics::WebTransaction') }
+
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(web_transaction)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current)
+ .and_return(nil)
+ end
+
+ it 'captures the metrics for web only' do
+ expect(web_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23)
- it_behaves_like 'record ActiveRecord metrics'
- it_behaves_like 'store ActiveRecord info in RequestStore'
+ expect(background_transaction).not_to receive(:observe)
+ expect(background_transaction).not_to receive(:increment)
+
+ subscriber.transaction(event)
+ end
end
- end
- context 'without Marginalia comments' do
- let(:comments) { false }
+ context 'when background transaction is available' do
+ let(:background_transaction) { double('Gitlab::Metrics::BackgroundTransaction') }
+
+ before do
+ allow(::Gitlab::Metrics::WebTransaction).to receive(:current)
+ .and_return(nil)
+ allow(::Gitlab::Metrics::BackgroundTransaction).to receive(:current)
+ .and_return(background_transaction)
+ end
- it_behaves_like 'track generic sql events'
+ it 'captures the metrics for web only' do
+ expect(background_transaction).to receive(:observe).with(:gitlab_database_transaction_seconds, 0.23)
+
+ expect(web_transaction).not_to receive(:observe)
+ expect(web_transaction).not_to receive(:increment)
+
+ subscriber.transaction(event)
+ end
+ end
end
- context 'with Marginalia comments' do
- let(:comments) { true }
+ describe '#sql' do
+ let(:payload) { { sql: 'SELECT * FROM users WHERE id = 10', connection: connection } }
- it_behaves_like 'track generic sql events'
+ let(:event) do
+ double(
+ :event,
+ name: 'sql.active_record',
+ duration: 2,
+ payload: payload
+ )
+ end
+
+ # Emulate Marginalia pre-pending comments
+ def sql(query, comments: true)
+ if comments && !%w[BEGIN COMMIT].include?(query)
+ "/*application:web,controller:badges,action:pipeline,correlation_id:01EYN39K9VMJC56Z7808N7RSRH*/ #{query}"
+ else
+ query
+ end
+ end
+
+ shared_examples 'track generic sql events' do
+ where(:name, :sql_query, :record_query, :record_write_query, :record_cached_query) do
+ 'SQL' | 'SELECT * FROM users WHERE id = 10' | true | false | false
+ 'SQL' | 'WITH active_milestones AS (SELECT COUNT(*), state FROM milestones GROUP BY state) SELECT * FROM active_milestones' | true | false | false
+ 'SQL' | 'SELECT * FROM users WHERE id = 10 FOR UPDATE' | true | true | false
+ 'SQL' | 'WITH archived_rows AS (SELECT * FROM users WHERE archived = true) INSERT INTO products_log SELECT * FROM archived_rows' | true | true | false
+ 'SQL' | 'DELETE FROM users where id = 10' | true | true | false
+ 'SQL' | 'INSERT INTO project_ci_cd_settings (project_id) SELECT id FROM projects' | true | true | false
+ 'SQL' | 'UPDATE users SET admin = true WHERE id = 10' | true | true | false
+ 'CACHE' | 'SELECT * FROM users WHERE id = 10' | true | false | true
+ 'SCHEMA' | "SELECT attr.attname FROM pg_attribute attr INNER JOIN pg_constraint cons ON attr.attrelid = cons.conrelid AND attr.attnum = any(cons.conkey) WHERE cons.contype = 'p' AND cons.conrelid = '\"projects\"'::regclass" | false | false | false
+ nil | 'BEGIN' | false | false | false
+ nil | 'COMMIT' | false | false | false
+ end
+
+ with_them do
+ let(:payload) { { name: name, sql: sql(sql_query, comments: comments), connection: connection } }
+ let(:record_wal_query) { false }
+
+ it 'marks the current thread as using the database' do
+ # since it would already have been toggled by other specs
+ Thread.current[:uses_db_connection] = nil
+
+ expect { subscriber.sql(event) }.to change { Thread.current[:uses_db_connection] }.from(nil).to(true)
+ end
+
+ it_behaves_like 'record ActiveRecord metrics'
+ it_behaves_like 'store ActiveRecord info in RequestStore'
+ end
+ end
+
+ context 'without Marginalia comments' do
+ let(:comments) { false }
+
+ it_behaves_like 'track generic sql events'
+ end
+
+ context 'with Marginalia comments' do
+ let(:comments) { true }
+
+ it_behaves_like 'track generic sql events'
+ end
end
end
diff --git a/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb
index 5bcaf8fbc47..adbc05cb711 100644
--- a/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb
+++ b/spec/lib/gitlab/metrics/subscribers/external_http_spec.rb
@@ -6,29 +6,45 @@ RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do
let(:transaction) { Gitlab::Metrics::Transaction.new }
let(:subscriber) { described_class.new }
+ around do |example|
+ freeze_time { example.run }
+ end
+
let(:event_1) do
- double(:event, payload: {
- method: 'POST', code: "200", duration: 0.321,
- scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects',
- query: 'current=true'
- })
+ double(
+ :event,
+ payload: {
+ method: 'POST', code: "200", duration: 0.321,
+ scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects',
+ query: 'current=true'
+ },
+ time: Time.current
+ )
end
let(:event_2) do
- double(:event, payload: {
- method: 'GET', code: "301", duration: 0.12,
- scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2',
- query: 'current=true'
- })
+ double(
+ :event,
+ payload: {
+ method: 'GET', code: "301", duration: 0.12,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2',
+ query: 'current=true'
+ },
+ time: Time.current
+ )
end
let(:event_3) do
- double(:event, payload: {
- method: 'POST', duration: 5.3,
- scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2/issues',
- query: 'current=true',
- exception_object: Net::ReadTimeout.new
- })
+ double(
+ :event,
+ payload: {
+ method: 'POST', duration: 5.3,
+ scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2/issues',
+ query: 'current=true',
+ exception_object: Net::ReadTimeout.new
+ },
+ time: Time.current
+ )
end
describe '.detail_store' do
@@ -134,19 +150,22 @@ RSpec.describe Gitlab::Metrics::Subscribers::ExternalHttp, :request_store do
subscriber.request(event_3)
expect(Gitlab::SafeRequestStore[:external_http_detail_store].length).to eq(3)
- expect(Gitlab::SafeRequestStore[:external_http_detail_store][0]).to include(
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store][0]).to match a_hash_including(
+ start: be_like_time(Time.current),
method: 'POST', code: "200", duration: 0.321,
scheme: 'https', host: 'gitlab.com', port: 80, path: '/api/v4/projects',
query: 'current=true', exception_object: nil,
backtrace: be_a(Array)
)
- expect(Gitlab::SafeRequestStore[:external_http_detail_store][1]).to include(
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store][1]).to match a_hash_including(
+ start: be_like_time(Time.current),
method: 'GET', code: "301", duration: 0.12,
scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2',
query: 'current=true', exception_object: nil,
backtrace: be_a(Array)
)
- expect(Gitlab::SafeRequestStore[:external_http_detail_store][2]).to include(
+ expect(Gitlab::SafeRequestStore[:external_http_detail_store][2]).to match a_hash_including(
+ start: be_like_time(Time.current),
method: 'POST', duration: 5.3,
scheme: 'http', host: 'gitlab.com', port: 80, path: '/api/v4/projects/2/issues',
query: 'current=true',
diff --git a/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb b/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
new file mode 100644
index 00000000000..b9d00b556c5
--- /dev/null
+++ b/spec/lib/gitlab/middleware/rack_multipart_tempfile_factory_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rack'
+
+RSpec.describe Gitlab::Middleware::RackMultipartTempfileFactory do
+ let(:app) do
+ lambda do |env|
+ params = Rack::Request.new(env).params
+
+ if params['file']
+ [200, { 'Content-Type' => params['file'][:type] }, [params['file'][:tempfile].read]]
+ else
+ [204, {}, []]
+ end
+ end
+ end
+
+ let(:file_contents) { '/9j/4AAQSkZJRgABAQAAAQABAAD//gA+Q1JFQVRPUjogZ2QtanBlZyB2MS4wICh1c2luZyBJSkcg' }
+
+ let(:multipart_fixture) do
+ boundary = 'AaB03x'
+ data = <<~DATA
+ --#{boundary}\r
+ Content-Disposition: form-data; name="file"; filename="dj.jpg"\r
+ Content-Type: image/jpeg\r
+ Content-Transfer-Encoding: base64\r
+ \r
+ #{file_contents}\r
+ --#{boundary}--\r
+ DATA
+
+ {
+ 'CONTENT_TYPE' => "multipart/form-data; boundary=#{boundary}",
+ 'CONTENT_LENGTH' => data.bytesize.to_s,
+ input: StringIO.new(data)
+ }
+ end
+
+ subject { described_class.new(app) }
+
+ context 'for a multipart request' do
+ let(:env) { Rack::MockRequest.env_for('/', multipart_fixture) }
+
+ context 'when the environment variable is enabled' do
+ before do
+ stub_env('GITLAB_TEMPFILE_IMMEDIATE_UNLINK', '1')
+ end
+
+ it 'immediately unlinks the temporary file' do
+ tempfile = Tempfile.new('foo')
+
+ expect(tempfile.path).not_to be(nil)
+ expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
+ expect(tempfile).to receive(:unlink).and_call_original
+
+ subject.call(env)
+
+ expect(tempfile.path).to be(nil)
+ end
+
+ it 'processes the request as normal' do
+ expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
+ end
+ end
+
+ context 'when the environment variable is disabled' do
+ it 'does not immediately unlink the temporary file' do
+ tempfile = Tempfile.new('foo')
+
+ expect(tempfile.path).not_to be(nil)
+ expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
+ expect(tempfile).not_to receive(:unlink).and_call_original
+
+ subject.call(env)
+
+ expect(tempfile.path).not_to be(nil)
+ end
+
+ it 'processes the request as normal' do
+ expect(subject.call(env)).to eq([200, { 'Content-Type' => 'image/jpeg' }, [file_contents]])
+ end
+ end
+ end
+
+ context 'for a regular request' do
+ let(:env) { Rack::MockRequest.env_for('/', params: { 'foo' => 'bar' }) }
+
+ it 'does nothing' do
+ expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).not_to receive(:call)
+ expect(subject.call(env)).to eq([204, {}, []])
+ end
+ end
+end
diff --git a/spec/lib/gitlab/object_hierarchy_spec.rb b/spec/lib/gitlab/object_hierarchy_spec.rb
index 08e1a5ee0a3..eebd67695e0 100644
--- a/spec/lib/gitlab/object_hierarchy_spec.rb
+++ b/spec/lib/gitlab/object_hierarchy_spec.rb
@@ -3,14 +3,16 @@
require 'spec_helper'
RSpec.describe Gitlab::ObjectHierarchy do
- let!(:parent) { create(:group) }
- let!(:child1) { create(:group, parent: parent) }
- let!(:child2) { create(:group, parent: child1) }
+ let_it_be(:parent) { create(:group) }
+ let_it_be(:child1) { create(:group, parent: parent) }
+ let_it_be(:child2) { create(:group, parent: child1) }
+
+ let(:options) { {} }
shared_context 'Gitlab::ObjectHierarchy test cases' do
describe '#base_and_ancestors' do
let(:relation) do
- described_class.new(Group.where(id: child2.id)).base_and_ancestors
+ described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors
end
it 'includes the base rows' do
@@ -22,13 +24,13 @@ RSpec.describe Gitlab::ObjectHierarchy do
end
it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2)).base_and_ancestors(upto: child1)
+ relation = described_class.new(Group.where(id: child2), options: options).base_and_ancestors(upto: child1)
expect(relation).to contain_exactly(child2)
end
it 'uses ancestors_base #initialize argument' do
- relation = described_class.new(Group.where(id: child2.id), Group.none).base_and_ancestors
+ relation = described_class.new(Group.where(id: child2.id), Group.none, options: options).base_and_ancestors
expect(relation).to include(parent, child1, child2)
end
@@ -40,7 +42,7 @@ RSpec.describe Gitlab::ObjectHierarchy do
describe 'hierarchy_order option' do
let(:relation) do
- described_class.new(Group.where(id: child2.id)).base_and_ancestors(hierarchy_order: hierarchy_order)
+ described_class.new(Group.where(id: child2.id), options: options).base_and_ancestors(hierarchy_order: hierarchy_order)
end
context ':asc' do
@@ -63,7 +65,7 @@ RSpec.describe Gitlab::ObjectHierarchy do
describe '#base_and_descendants' do
let(:relation) do
- described_class.new(Group.where(id: parent.id)).base_and_descendants
+ described_class.new(Group.where(id: parent.id), options: options).base_and_descendants
end
it 'includes the base rows' do
@@ -75,7 +77,7 @@ RSpec.describe Gitlab::ObjectHierarchy do
end
it 'uses descendants_base #initialize argument' do
- relation = described_class.new(Group.none, Group.where(id: parent.id)).base_and_descendants
+ relation = described_class.new(Group.none, Group.where(id: parent.id), options: options).base_and_descendants
expect(relation).to include(parent, child1, child2)
end
@@ -87,7 +89,7 @@ RSpec.describe Gitlab::ObjectHierarchy do
context 'when with_depth is true' do
let(:relation) do
- described_class.new(Group.where(id: parent.id)).base_and_descendants(with_depth: true)
+ described_class.new(Group.where(id: parent.id), options: options).base_and_descendants(with_depth: true)
end
it 'includes depth in the results' do
@@ -106,14 +108,14 @@ RSpec.describe Gitlab::ObjectHierarchy do
describe '#descendants' do
it 'includes only the descendants' do
- relation = described_class.new(Group.where(id: parent)).descendants
+ relation = described_class.new(Group.where(id: parent), options: options).descendants
expect(relation).to contain_exactly(child1, child2)
end
end
describe '#max_descendants_depth' do
- subject { described_class.new(base_relation).max_descendants_depth }
+ subject { described_class.new(base_relation, options: options).max_descendants_depth }
context 'when base relation is empty' do
let(:base_relation) { Group.where(id: nil) }
@@ -136,13 +138,13 @@ RSpec.describe Gitlab::ObjectHierarchy do
describe '#ancestors' do
it 'includes only the ancestors' do
- relation = described_class.new(Group.where(id: child2)).ancestors
+ relation = described_class.new(Group.where(id: child2), options: options).ancestors
expect(relation).to contain_exactly(child1, parent)
end
it 'can find ancestors upto a certain level' do
- relation = described_class.new(Group.where(id: child2)).ancestors(upto: child1)
+ relation = described_class.new(Group.where(id: child2), options: options).ancestors(upto: child1)
expect(relation).to be_empty
end
@@ -150,7 +152,7 @@ RSpec.describe Gitlab::ObjectHierarchy do
describe '#all_objects' do
let(:relation) do
- described_class.new(Group.where(id: child1.id)).all_objects
+ described_class.new(Group.where(id: child1.id), options: options).all_objects
end
it 'includes the base rows' do
@@ -166,13 +168,13 @@ RSpec.describe Gitlab::ObjectHierarchy do
end
it 'uses ancestors_base #initialize argument for ancestors' do
- relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id)).all_objects
+ relation = described_class.new(Group.where(id: child1.id), Group.where(id: non_existing_record_id), options: options).all_objects
expect(relation).to include(parent)
end
it 'uses descendants_base #initialize argument for descendants' do
- relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id)).all_objects
+ relation = described_class.new(Group.where(id: non_existing_record_id), Group.where(id: child1.id), options: options).all_objects
expect(relation).to include(child2)
end
@@ -187,19 +189,78 @@ RSpec.describe Gitlab::ObjectHierarchy do
context 'when the use_distinct_in_object_hierarchy feature flag is enabled' do
before do
stub_feature_flags(use_distinct_in_object_hierarchy: true)
+ stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
+ end
+
+ it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+
+ it 'calls DISTINCT' do
+ expect(child2.self_and_ancestors.to_sql).to include("DISTINCT")
+ end
+
+ context 'when use_traversal_ids feature flag is enabled' do
+ it 'does not call DISTINCT' do
+ expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
+ end
+ end
+
+ context 'when use_traversal_ids feature flag is disabled' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it 'calls DISTINCT' do
+ expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
+ end
+ end
+ end
+
+ context 'when the use_distinct_for_all_object_hierarchy feature flag is enabled' do
+ before do
+ stub_feature_flags(use_distinct_in_object_hierarchy: false)
+ stub_feature_flags(use_distinct_for_all_object_hierarchy: true)
end
it_behaves_like 'Gitlab::ObjectHierarchy test cases'
it 'calls DISTINCT' do
- expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
expect(child2.self_and_ancestors.to_sql).to include("DISTINCT")
end
+
+ context 'when use_traversal_ids feature flag is enabled' do
+ it 'does not call DISTINCT' do
+ expect(parent.self_and_descendants.to_sql).not_to include("DISTINCT")
+ end
+ end
+
+ context 'when use_traversal_ids feature flag is disabled' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it 'calls DISTINCT' do
+ expect(parent.self_and_descendants.to_sql).to include("DISTINCT")
+ end
+
+ context 'when the skip_ordering option is set' do
+ let(:options) { { skip_ordering: true } }
+
+ it_behaves_like 'Gitlab::ObjectHierarchy test cases'
+
+ it 'does not include ROW_NUMBER()' do
+ query = described_class.new(Group.where(id: parent.id), options: options).base_and_descendants.to_sql
+
+ expect(query).to include("DISTINCT")
+ expect(query).not_to include("ROW_NUMBER()")
+ end
+ end
+ end
end
context 'when the use_distinct_in_object_hierarchy feature flag is disabled' do
before do
stub_feature_flags(use_distinct_in_object_hierarchy: false)
+ stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
end
it_behaves_like 'Gitlab::ObjectHierarchy test cases'
diff --git a/spec/lib/gitlab/pages/settings_spec.rb b/spec/lib/gitlab/pages/settings_spec.rb
index f5424a98153..c89bf9ff206 100644
--- a/spec/lib/gitlab/pages/settings_spec.rb
+++ b/spec/lib/gitlab/pages/settings_spec.rb
@@ -3,11 +3,11 @@
require 'spec_helper'
RSpec.describe Gitlab::Pages::Settings do
+ let(:settings) { double(path: 'the path', local_store: 'local store') }
+
describe '#path' do
subject { described_class.new(settings).path }
- let(:settings) { double(path: 'the path') }
-
it { is_expected.to eq('the path') }
context 'when running under a web server outside of test mode' do
@@ -16,9 +16,43 @@ RSpec.describe Gitlab::Pages::Settings do
allow(::Gitlab::Runtime).to receive(:web_server?).and_return(true)
end
- it 'raises a DiskAccessDenied exception' do
- expect { subject }.to raise_error(described_class::DiskAccessDenied)
+ it 'logs a DiskAccessDenied error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(described_class::DiskAccessDenied)
+ )
+
+ subject
+ end
+ end
+
+ context 'when local_store settings does not exist yet' do
+ before do
+ allow(Settings.pages).to receive(:local_store).and_return(nil)
end
+
+ it { is_expected.to eq('the path') }
+ end
+
+ context 'when local store exists but legacy storage is disabled' do
+ before do
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
+ end
+
+ it 'logs a DiskAccessDenied error' do
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(
+ instance_of(described_class::DiskAccessDenied)
+ )
+
+ subject
+ end
+ end
+ end
+
+ describe '#local_store' do
+ subject(:local_store) { described_class.new(settings).local_store }
+
+ it 'is an instance of Gitlab::Pages::Stores::LocalStore' do
+ expect(local_store).to be_a(Gitlab::Pages::Stores::LocalStore)
end
end
end
diff --git a/spec/lib/gitlab/pages/stores/local_store_spec.rb b/spec/lib/gitlab/pages/stores/local_store_spec.rb
new file mode 100644
index 00000000000..adab81b2589
--- /dev/null
+++ b/spec/lib/gitlab/pages/stores/local_store_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pages::Stores::LocalStore do
+ describe '#enabled' do
+ let(:local_store) { double(enabled: true) }
+
+ subject(:local_store_enabled) { described_class.new(local_store).enabled }
+
+ context 'when the pages_update_legacy_storage FF is disabled' do
+ before do
+ stub_feature_flags(pages_update_legacy_storage: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the pages_update_legacy_storage FF is enabled' do
+ it 'is equal to the original value' do
+ expect(local_store_enabled).to eq(local_store.enabled)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/pages_transfer_spec.rb b/spec/lib/gitlab/pages_transfer_spec.rb
index 552a2e0701c..021d9cb7318 100644
--- a/spec/lib/gitlab/pages_transfer_spec.rb
+++ b/spec/lib/gitlab/pages_transfer_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::PagesTransfer do
end
it 'does nothing if legacy storage is disabled' do
- stub_feature_flags(pages_update_legacy_storage: false)
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
described_class::METHODS.each do |meth|
expect(PagesTransferWorker)
@@ -72,7 +72,7 @@ RSpec.describe Gitlab::PagesTransfer do
end
it 'does nothing if legacy storage is disabled' do
- stub_feature_flags(pages_update_legacy_storage: false)
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
subject.public_send(meth, *args)
diff --git a/spec/lib/gitlab/pagination/keyset/order_spec.rb b/spec/lib/gitlab/pagination/keyset/order_spec.rb
index 665f790ee47..06a8aee1048 100644
--- a/spec/lib/gitlab/pagination/keyset/order_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/order_spec.rb
@@ -417,4 +417,59 @@ RSpec.describe Gitlab::Pagination::Keyset::Order do
end
end
end
+
+ context 'extract and apply cursor attributes' do
+ let(:model) { Project.new(id: 100) }
+ let(:scope) { Project.all }
+
+ shared_examples 'cursor attribute examples' do
+ describe '#cursor_attributes_for_node' do
+ it { expect(order.cursor_attributes_for_node(model)).to eq({ id: '100' }.with_indifferent_access) }
+ end
+
+ describe '#apply_cursor_conditions' do
+ context 'when params with string keys are passed' do
+ subject(:sql) { order.apply_cursor_conditions(scope, { 'id' => '100' }).to_sql }
+
+ it { is_expected.to include('"projects"."id" < 100)') }
+ end
+
+ context 'when params with symbol keys are passed' do
+ subject(:sql) { order.apply_cursor_conditions(scope, { id: '100' }).to_sql }
+
+ it { is_expected.to include('"projects"."id" < 100)') }
+ end
+ end
+ end
+
+ context 'when string attribute name is given' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: 'id',
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ it_behaves_like 'cursor attribute examples'
+ end
+
+ context 'when symbol attribute name is given' do
+ let(:order) do
+ Gitlab::Pagination::Keyset::Order.build([
+ Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
+ attribute_name: :id,
+ order_expression: Project.arel_table['id'].desc,
+ nullable: :not_nullable,
+ distinct: true
+ )
+ ])
+ end
+
+ it_behaves_like 'cursor attribute examples'
+ end
+ end
end
diff --git a/spec/lib/gitlab/pagination/offset_header_builder_with_controller_spec.rb b/spec/lib/gitlab/pagination/offset_header_builder_with_controller_spec.rb
new file mode 100644
index 00000000000..85e4b621e83
--- /dev/null
+++ b/spec/lib/gitlab/pagination/offset_header_builder_with_controller_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Pagination::OffsetHeaderBuilder, type: :controller do
+ controller(ActionController::Base) do
+ def index
+ relation = Project.where(archived: params[:archived]).page(params[:page]).order(:id).per(1)
+
+ params_for_pagination = { archived: params[:archived], page: params[:page] }
+
+ Gitlab::Pagination::OffsetHeaderBuilder.new(
+ request_context: self,
+ per_page: relation.limit_value,
+ page: relation.current_page,
+ next_page: relation.next_page,
+ prev_page: relation.prev_page,
+ params: params_for_pagination
+ ).execute(exclude_total_headers: true, data_without_counts: true)
+
+ render json: relation.map(&:id)
+ end
+ end
+
+ let_it_be(:projects) { create_list(:project, 2, archived: true).sort_by(&:id) }
+
+ describe 'pagination' do
+ it 'returns correct result for the first page' do
+ get :index, params: { page: 1, archived: true }
+
+ expect(json_response).to eq([projects.first.id])
+ end
+
+ it 'returns correct result for the second page' do
+ get :index, params: { page: 2, archived: true }
+
+ expect(json_response).to eq([projects.last.id])
+ end
+ end
+
+ describe 'pagination heders' do
+ it 'adds next page header' do
+ get :index, params: { page: 1, archived: true }
+
+ expect(response.headers['X-Next-Page']).to eq('2')
+ end
+
+ it 'adds only the specified params to the lnk' do
+ get :index, params: { page: 1, archived: true, some_param: '1' }
+
+ expect(response.headers['Link']).not_to include('some_param')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
index 08ac85c2625..157b3ca56c9 100644
--- a/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/cache/map_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::PhabricatorImport::Cache::Map, :clean_gitlab_redis_cache do
let_it_be(:project) { create(:project) }
+
let(:redis) { Gitlab::Redis::Cache }
subject(:map) { described_class.new(project) }
diff --git a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb b/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
index 3cb15f08627..0539bacba44 100644
--- a/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
+++ b/spec/lib/gitlab/phabricator_import/issues/task_importer_spec.rb
@@ -3,6 +3,7 @@ require 'spec_helper'
RSpec.describe Gitlab::PhabricatorImport::Issues::TaskImporter do
let_it_be(:project) { create(:project) }
+
let(:task) do
Gitlab::PhabricatorImport::Representation::Task.new(
{
diff --git a/spec/lib/gitlab/profiler_spec.rb b/spec/lib/gitlab/profiler_spec.rb
index 89917e515d0..48e2a2e9794 100644
--- a/spec/lib/gitlab/profiler_spec.rb
+++ b/spec/lib/gitlab/profiler_spec.rb
@@ -78,13 +78,8 @@ RSpec.describe Gitlab::Profiler do
end
it 'strips out the private token' do
- expect(custom_logger).to receive(:add) do |severity, _progname, message|
- next if message.include?('spec/')
-
- expect(severity).to eq(Logger::DEBUG)
- expect(message).to include('public').and include(described_class::FILTERED_STRING)
- expect(message).not_to include(private_token)
- end.at_least(1) # This spec could be wrapped in more blocks in the future
+ allow(custom_logger).to receive(:add).and_call_original
+ expect(custom_logger).to receive(:add).with(Logger::DEBUG, anything, 'public [FILTERED]').at_least(1)
custom_logger.debug("public #{private_token}")
end
diff --git a/spec/lib/gitlab/prometheus/adapter_spec.rb b/spec/lib/gitlab/prometheus/adapter_spec.rb
index 4762e4ad108..9d4806ea73b 100644
--- a/spec/lib/gitlab/prometheus/adapter_spec.rb
+++ b/spec/lib/gitlab/prometheus/adapter_spec.rb
@@ -32,6 +32,14 @@ RSpec.describe Gitlab::Prometheus::Adapter do
context "prometheus service can't execute queries" do
let(:prometheus_service) { double(:prometheus_service, can_query?: false) }
+ context 'with cluster with prometheus integration' do
+ let!(:prometheus_integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ it 'returns the integration' do
+ expect(subject.prometheus_adapter).to eq(prometheus_integration)
+ end
+ end
+
context 'with cluster with prometheus not available' do
let!(:prometheus) { create(:clusters_applications_prometheus, :installable, cluster: cluster) }
@@ -46,6 +54,14 @@ RSpec.describe Gitlab::Prometheus::Adapter do
it 'returns application handling all environments' do
expect(subject.prometheus_adapter).to eq(prometheus)
end
+
+ context 'with cluster with prometheus integration' do
+ let!(:prometheus_integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ it 'returns the integration instead' do
+ expect(subject.prometheus_adapter).to eq(prometheus_integration)
+ end
+ end
end
context 'with cluster without prometheus installed' do
diff --git a/spec/lib/gitlab/query_limiting/transaction_spec.rb b/spec/lib/gitlab/query_limiting/transaction_spec.rb
index 40804736b86..76bb2b4c4cc 100644
--- a/spec/lib/gitlab/query_limiting/transaction_spec.rb
+++ b/spec/lib/gitlab/query_limiting/transaction_spec.rb
@@ -68,11 +68,15 @@ RSpec.describe Gitlab::QueryLimiting::Transaction do
it 'increments the number of executed queries' do
transaction = described_class.new
- expect(transaction.count).to be_zero
+ expect { transaction.increment }.to change { transaction.count }.by(1)
+ end
+
+ it 'does not increment the number of executed queries when query limiting is disabled' do
+ transaction = described_class.new
- transaction.increment
+ allow(transaction).to receive(:enabled?).and_return(false)
- expect(transaction.count).to eq(1)
+ expect { transaction.increment }.not_to change { transaction.count }
end
end
diff --git a/spec/lib/gitlab/query_limiting_spec.rb b/spec/lib/gitlab/query_limiting_spec.rb
index 4f70c65adca..fbb12629056 100644
--- a/spec/lib/gitlab/query_limiting_spec.rb
+++ b/spec/lib/gitlab/query_limiting_spec.rb
@@ -2,81 +2,85 @@
require 'spec_helper'
-RSpec.describe Gitlab::QueryLimiting do
- describe '.enable?' do
+RSpec.describe Gitlab::QueryLimiting, :request_store do
+ describe '.enabled_for_env?' do
it 'returns true in a test environment' do
- expect(described_class.enable?).to eq(true)
+ expect(described_class.enabled_for_env?).to eq(true)
end
it 'returns true in a development environment' do
stub_rails_env('development')
stub_rails_env('development')
- expect(described_class.enable?).to eq(true)
+ expect(described_class.enabled_for_env?).to eq(true)
end
it 'returns false on GitLab.com' do
stub_rails_env('production')
allow(Gitlab).to receive(:com?).and_return(true)
- expect(described_class.enable?).to eq(false)
+ expect(described_class.enabled_for_env?).to eq(false)
end
it 'returns false in a non GitLab.com' do
allow(Gitlab).to receive(:com?).and_return(false)
stub_rails_env('production')
- expect(described_class.enable?).to eq(false)
+ expect(described_class.enabled_for_env?).to eq(false)
end
end
- describe '.whitelist' do
- it 'raises ArgumentError when an invalid issue URL is given' do
- expect { described_class.whitelist('foo') }
- .to raise_error(ArgumentError)
+ shared_context 'disable and enable' do |result|
+ let(:transaction) { Gitlab::QueryLimiting::Transaction.new }
+ let(:code) do
+ proc do
+ 2.times { User.count }
+ end
end
- context 'without a transaction' do
- it 'does nothing' do
- expect { described_class.whitelist('https://example.com') }
- .not_to raise_error
- end
+ before do
+ allow(Gitlab::QueryLimiting::Transaction)
+ .to receive(:current)
+ .and_return(transaction)
end
+ end
- context 'with a transaction' do
- let(:transaction) { Gitlab::QueryLimiting::Transaction.new }
+ describe '.disable!' do
+ include_context 'disable and enable'
- before do
- allow(Gitlab::QueryLimiting::Transaction)
- .to receive(:current)
- .and_return(transaction)
- end
+ it 'raises an ArgumentError when an invalid issue URL is given' do
+ expect { described_class.disable!('foo') }
+ .to raise_error(ArgumentError)
+ end
- it 'does not increment the number of SQL queries executed in the block' do
- before = transaction.count
+ it 'stops the number of SQL queries from being incremented' do
+ described_class.disable!('https://example.com')
- described_class.whitelist('https://example.com')
+ expect { code.call }.not_to change { transaction.count }
+ end
+ end
- 2.times do
- User.count
- end
+ describe '.enable!' do
+ include_context 'disable and enable'
- expect(transaction.count).to eq(before)
- end
+ it 'allows the number of SQL queries to be incremented' do
+ described_class.enable!
- it 'whitelists when enabled' do
- described_class.whitelist('https://example.com')
+ expect { code.call }.to change { transaction.count }.by(2)
+ end
+ end
- expect(transaction.whitelisted).to eq(true)
- end
+ describe '#enabled?' do
+ it 'returns true when enabled' do
+ Gitlab::SafeRequestStore[:query_limiting_disabled] = nil
- it 'does not whitelist when disabled' do
- allow(described_class).to receive(:enable?).and_return(false)
+ expect(described_class).to be_enabled
+ end
- described_class.whitelist('https://example.com')
+ it 'returns false when disabled' do
+ Gitlab::SafeRequestStore[:query_limiting_disabled] = true
- expect(transaction.whitelisted).to eq(false)
- end
+ expect(described_class).not_to be_enabled
end
end
end
diff --git a/spec/lib/gitlab/quick_actions/command_definition_spec.rb b/spec/lib/gitlab/quick_actions/command_definition_spec.rb
index d63c21954f2..73629ce3da2 100644
--- a/spec/lib/gitlab/quick_actions/command_definition_spec.rb
+++ b/spec/lib/gitlab/quick_actions/command_definition_spec.rb
@@ -127,10 +127,10 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do
subject.condition_block = proc { false }
end
- it "doesn't execute the command" do
+ it "counts the command as executed" do
subject.execute(context, nil)
- expect(context.commands_executed_count).to be_nil
+ expect(context.commands_executed_count).to eq(1)
expect(context.run).to be false
end
end
@@ -238,8 +238,8 @@ RSpec.describe Gitlab::QuickActions::CommandDefinition do
subject.condition_block = proc { false }
end
- it 'returns nil' do
- expect(subject.execute_message({}, nil)).to be_nil
+ it 'returns an error message' do
+ expect(subject.execute_message({}, nil)).to eq('Could not apply command command.')
end
end
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 1aca3dae41b..f62a3c74005 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -667,7 +667,14 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to match('1.2.3') }
it { is_expected.to match('1.3.350') }
- it { is_expected.not_to match('1.3.350-20201230123456') }
+ it { is_expected.to match('1.3.350-20201230123456') }
+ it { is_expected.to match('1.2.3-rc1') }
+ it { is_expected.to match('1.2.3g') }
+ it { is_expected.to match('1.2') }
+ it { is_expected.to match('1.2.bananas') }
+ it { is_expected.to match('v1.2.4-build') }
+ it { is_expected.to match('d50d836eb3de6177ce6c7a5482f27f9c2c84b672') }
+ it { is_expected.to match('this_is_a_string_only') }
it { is_expected.not_to match('..1.2.3') }
it { is_expected.not_to match(' 1.2.3') }
it { is_expected.not_to match("1.2.3 \r\t") }
diff --git a/spec/lib/gitlab/repository_cache_adapter_spec.rb b/spec/lib/gitlab/repository_cache_adapter_spec.rb
index 625dcf11546..d14c3f44c6f 100644
--- a/spec/lib/gitlab/repository_cache_adapter_spec.rb
+++ b/spec/lib/gitlab/repository_cache_adapter_spec.rb
@@ -29,10 +29,19 @@ RSpec.describe Gitlab::RepositoryCacheAdapter do
def project
end
+
+ def cached_methods
+ [:letters]
+ end
+
+ def exists?
+ true
+ end
end
end
let(:fake_repository) { klass.new }
+ let(:redis_set_cache) { fake_repository.redis_set_cache }
context 'with an existing repository' do
it 'caches the output, sorting the results' do
@@ -42,47 +51,43 @@ RSpec.describe Gitlab::RepositoryCacheAdapter do
expect(fake_repository.letters).to eq(%w(a b c))
end
- expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(true)
+ expect(redis_set_cache.exist?(:letters)).to eq(true)
expect(fake_repository.instance_variable_get(:@letters)).to eq(%w(a b c))
end
context 'membership checks' do
context 'when the cache key does not exist' do
it 'calls the original method and populates the cache' do
- expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(false)
+ expect(redis_set_cache.exist?(:letters)).to eq(false)
expect(fake_repository).to receive(:_uncached_letters).once.and_call_original
# This populates the cache and memoizes the full result
expect(fake_repository.letters_include?('a')).to eq(true)
expect(fake_repository.letters_include?('d')).to eq(false)
- expect(fake_repository.redis_set_cache.exist?(:letters)).to eq(true)
+ expect(redis_set_cache.exist?(:letters)).to eq(true)
end
end
context 'when the cache key exists' do
before do
- fake_repository.redis_set_cache.write(:letters, %w(b a c))
+ redis_set_cache.write(:letters, %w(b a c))
end
- it 'calls #include? on the set cache' do
- expect(fake_repository.redis_set_cache)
- .to receive(:include?).with(:letters, 'a').and_call_original
- expect(fake_repository.redis_set_cache)
- .to receive(:include?).with(:letters, 'd').and_call_original
+ it 'calls #try_include? on the set cache' do
+ expect(redis_set_cache).to receive(:try_include?).with(:letters, 'a').and_call_original
+ expect(redis_set_cache).to receive(:try_include?).with(:letters, 'd').and_call_original
expect(fake_repository.letters_include?('a')).to eq(true)
expect(fake_repository.letters_include?('d')).to eq(false)
end
it 'memoizes the result' do
- expect(fake_repository.redis_set_cache)
- .to receive(:include?).once.and_call_original
+ expect(redis_set_cache).to receive(:try_include?).once.and_call_original
expect(fake_repository.letters_include?('a')).to eq(true)
expect(fake_repository.letters_include?('a')).to eq(true)
- expect(fake_repository.redis_set_cache)
- .to receive(:include?).once.and_call_original
+ expect(redis_set_cache).to receive(:try_include?).once.and_call_original
expect(fake_repository.letters_include?('d')).to eq(false)
expect(fake_repository.letters_include?('d')).to eq(false)
diff --git a/spec/lib/gitlab/repository_set_cache_spec.rb b/spec/lib/gitlab/repository_set_cache_spec.rb
index 07f4d7c462d..eaecbb0233d 100644
--- a/spec/lib/gitlab/repository_set_cache_spec.rb
+++ b/spec/lib/gitlab/repository_set_cache_spec.rb
@@ -124,6 +124,18 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
end
end
+ describe '#search' do
+ subject do
+ cache.search(:foo, 'val*') do
+ %w[value helloworld notvalmatch]
+ end
+ end
+
+ it 'returns search pattern matches from the key' do
+ is_expected.to contain_exactly('value')
+ end
+ end
+
describe '#include?' do
it 'checks inclusion in the Redis set' do
cache.write(:foo, ['value'])
@@ -132,4 +144,15 @@ RSpec.describe Gitlab::RepositorySetCache, :clean_gitlab_redis_cache do
expect(cache.include?(:foo, 'bar')).to be(false)
end
end
+
+ describe '#try_include?' do
+ it 'checks existence of the redis set and inclusion' do
+ expect(cache.try_include?(:foo, 'value')).to eq([false, false])
+
+ cache.write(:foo, ['value'])
+
+ expect(cache.try_include?(:foo, 'value')).to eq([true, true])
+ expect(cache.try_include?(:foo, 'bar')).to eq([false, true])
+ end
+ end
end
diff --git a/spec/lib/gitlab/sanitizers/exif_spec.rb b/spec/lib/gitlab/sanitizers/exif_spec.rb
index 63b2f3fc693..fbda9e6d0be 100644
--- a/spec/lib/gitlab/sanitizers/exif_spec.rb
+++ b/spec/lib/gitlab/sanitizers/exif_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe Gitlab::Sanitizers::Exif do
it 'cleans only jpg/tiff images with the correct mime types' do
expect(sanitizer).not_to receive(:extra_tags)
- expect { subject }.to raise_error(RuntimeError, /File type text\/plain not supported/)
+ expect { subject }.to raise_error(RuntimeError, %r{File type text/plain not supported})
end
end
end
diff --git a/spec/lib/gitlab/search_context/builder_spec.rb b/spec/lib/gitlab/search_context/builder_spec.rb
index 5b4190fc67e..079477115bb 100644
--- a/spec/lib/gitlab/search_context/builder_spec.rb
+++ b/spec/lib/gitlab/search_context/builder_spec.rb
@@ -127,6 +127,35 @@ RSpec.describe Gitlab::SearchContext::Builder, type: :controller do
it { is_expected.to be_for_group }
it { is_expected.to be_search_context(group: group) }
+
+ context 'with group scope' do
+ let(:action_name) { '' }
+
+ before do
+ allow(controller).to receive(:controller_name).and_return('groups')
+ allow(controller).to receive(:action_name).and_return(action_name)
+ end
+
+ it 'returns nil without groups controller action' do
+ expect(subject.scope).to be(nil)
+ end
+
+ context 'when on issues scope' do
+ let(:action_name) { 'issues' }
+
+ it 'search context returns issues scope' do
+ expect(subject.scope).to be('issues')
+ end
+ end
+
+ context 'when on merge requests scope' do
+ let(:action_name) { 'merge_requests' }
+
+ it 'search context returns issues scope' do
+ expect(subject.scope).to be('merge_requests')
+ end
+ end
+ end
end
end
diff --git a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
index 74834fb9014..43cbe71dd6b 100644
--- a/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
+++ b/spec/lib/gitlab/sidekiq_cluster/cli_spec.rb
@@ -214,7 +214,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI do
expect(Gitlab::SidekiqCluster).not_to receive(:start)
expect { cli.run(%W(#{flag} unknown_field=chatops)) }
- .to raise_error(Gitlab::SidekiqConfig::CliMethods::QueryError)
+ .to raise_error(Gitlab::SidekiqConfig::WorkerMatcher::QueryError)
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
index 01e7c06249a..bc63289a344 100644
--- a/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
+++ b/spec/lib/gitlab/sidekiq_config/cli_methods_spec.rb
@@ -1,7 +1,6 @@
# frozen_string_literal: true
require 'fast_spec_helper'
-require 'rspec-parameterized'
RSpec.describe Gitlab::SidekiqConfig::CliMethods do
let(:dummy_root) { '/tmp/' }
@@ -122,10 +121,8 @@ RSpec.describe Gitlab::SidekiqConfig::CliMethods do
end
end
- describe '.query_workers' do
- using RSpec::Parameterized::TableSyntax
-
- let(:queues) do
+ describe '.query_queues' do
+ let(:worker_metadatas) do
[
{
name: 'a',
@@ -162,79 +159,16 @@ RSpec.describe Gitlab::SidekiqConfig::CliMethods do
]
end
- context 'with valid input' do
- where(:query, :selected_queues) do
- # feature_category
- 'feature_category=category_a' | %w(a a:2)
- 'feature_category=category_a,category_c' | %w(a a:2 c)
- 'feature_category=category_a|feature_category=category_c' | %w(a a:2 c)
- 'feature_category!=category_a' | %w(b c)
-
- # has_external_dependencies
- 'has_external_dependencies=true' | %w(b)
- 'has_external_dependencies=false' | %w(a a:2 c)
- 'has_external_dependencies=true,false' | %w(a a:2 b c)
- 'has_external_dependencies=true|has_external_dependencies=false' | %w(a a:2 b c)
- 'has_external_dependencies!=true' | %w(a a:2 c)
-
- # urgency
- 'urgency=high' | %w(a:2 b)
- 'urgency=low' | %w(a)
- 'urgency=high,low,throttled' | %w(a a:2 b c)
- 'urgency=low|urgency=throttled' | %w(a c)
- 'urgency!=high' | %w(a c)
-
- # name
- 'name=a' | %w(a)
- 'name=a,b' | %w(a b)
- 'name=a,a:2|name=b' | %w(a a:2 b)
- 'name!=a,a:2' | %w(b c)
-
- # resource_boundary
- 'resource_boundary=memory' | %w(b c)
- 'resource_boundary=memory,cpu' | %w(a b c)
- 'resource_boundary=memory|resource_boundary=cpu' | %w(a b c)
- 'resource_boundary!=memory,cpu' | %w(a:2)
-
- # tags
- 'tags=no_disk_io' | %w(a b)
- 'tags=no_disk_io,git_access' | %w(a a:2 b)
- 'tags=no_disk_io|tags=git_access' | %w(a a:2 b)
- 'tags=no_disk_io&tags=git_access' | %w(a)
- 'tags!=no_disk_io' | %w(a:2 c)
- 'tags!=no_disk_io,git_access' | %w(c)
- 'tags=unknown_tag' | []
- 'tags!=no_disk_io' | %w(a:2 c)
- 'tags!=no_disk_io,git_access' | %w(c)
- 'tags!=unknown_tag' | %w(a a:2 b c)
-
- # combinations
- 'feature_category=category_a&urgency=high' | %w(a:2)
- 'feature_category=category_a&urgency=high|feature_category=category_c' | %w(a:2 c)
- end
+ let(:worker_matcher) { double(:WorkerMatcher) }
+ let(:query) { 'feature_category=category_a,category_c' }
- with_them do
- it do
- expect(described_class.query_workers(query, queues))
- .to match_array(selected_queues)
- end
- end
+ before do
+ allow(::Gitlab::SidekiqConfig::WorkerMatcher).to receive(:new).with(query).and_return(worker_matcher)
+ allow(worker_matcher).to receive(:match?).and_return(true, true, false, true)
end
- context 'with invalid input' do
- where(:query, :error) do
- 'feature_category="category_a"' | described_class::InvalidTerm
- 'feature_category=' | described_class::InvalidTerm
- 'feature_category~category_a' | described_class::InvalidTerm
- 'worker_name=a' | described_class::UnknownPredicate
- end
-
- with_them do
- it do
- expect { described_class.query_workers(query, queues) }
- .to raise_error(error)
- end
- end
+ it 'returns the queue names of matched workers' do
+ expect(described_class.query_queues(query, worker_metadatas)).to match(%w(a a:2 c))
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb b/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
new file mode 100644
index 00000000000..75e9c8c100b
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_config/worker_matcher_spec.rb
@@ -0,0 +1,129 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rspec-parameterized'
+
+RSpec.describe Gitlab::SidekiqConfig::WorkerMatcher do
+ describe '#match?' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:worker_metadatas) do
+ [
+ {
+ name: 'a',
+ feature_category: :category_a,
+ has_external_dependencies: false,
+ urgency: :low,
+ resource_boundary: :cpu,
+ tags: [:no_disk_io, :git_access]
+ },
+ {
+ name: 'a:2',
+ feature_category: :category_a,
+ has_external_dependencies: false,
+ urgency: :high,
+ resource_boundary: :none,
+ tags: [:git_access]
+ },
+ {
+ name: 'b',
+ feature_category: :category_b,
+ has_external_dependencies: true,
+ urgency: :high,
+ resource_boundary: :memory,
+ tags: [:no_disk_io]
+ },
+ {
+ name: 'c',
+ feature_category: :category_c,
+ has_external_dependencies: false,
+ urgency: :throttled,
+ resource_boundary: :memory,
+ tags: []
+ }
+ ]
+ end
+
+ context 'with valid input' do
+ where(:query, :expected_metadatas) do
+ # feature_category
+ 'feature_category=category_a' | %w(a a:2)
+ 'feature_category=category_a,category_c' | %w(a a:2 c)
+ 'feature_category=category_a|feature_category=category_c' | %w(a a:2 c)
+ 'feature_category!=category_a' | %w(b c)
+
+ # has_external_dependencies
+ 'has_external_dependencies=true' | %w(b)
+ 'has_external_dependencies=false' | %w(a a:2 c)
+ 'has_external_dependencies=true,false' | %w(a a:2 b c)
+ 'has_external_dependencies=true|has_external_dependencies=false' | %w(a a:2 b c)
+ 'has_external_dependencies!=true' | %w(a a:2 c)
+
+ # urgency
+ 'urgency=high' | %w(a:2 b)
+ 'urgency=low' | %w(a)
+ 'urgency=high,low,throttled' | %w(a a:2 b c)
+ 'urgency=low|urgency=throttled' | %w(a c)
+ 'urgency!=high' | %w(a c)
+
+ # name
+ 'name=a' | %w(a)
+ 'name=a,b' | %w(a b)
+ 'name=a,a:2|name=b' | %w(a a:2 b)
+ 'name!=a,a:2' | %w(b c)
+
+ # resource_boundary
+ 'resource_boundary=memory' | %w(b c)
+ 'resource_boundary=memory,cpu' | %w(a b c)
+ 'resource_boundary=memory|resource_boundary=cpu' | %w(a b c)
+ 'resource_boundary!=memory,cpu' | %w(a:2)
+
+ # tags
+ 'tags=no_disk_io' | %w(a b)
+ 'tags=no_disk_io,git_access' | %w(a a:2 b)
+ 'tags=no_disk_io|tags=git_access' | %w(a a:2 b)
+ 'tags=no_disk_io&tags=git_access' | %w(a)
+ 'tags!=no_disk_io' | %w(a:2 c)
+ 'tags!=no_disk_io,git_access' | %w(c)
+ 'tags=unknown_tag' | []
+ 'tags!=no_disk_io' | %w(a:2 c)
+ 'tags!=no_disk_io,git_access' | %w(c)
+ 'tags!=unknown_tag' | %w(a a:2 b c)
+
+ # combinations
+ 'feature_category=category_a&urgency=high' | %w(a:2)
+ 'feature_category=category_a&urgency=high|feature_category=category_c' | %w(a:2 c)
+
+ # Match all
+ '*' | %w(a a:2 b c)
+ end
+
+ with_them do
+ it do
+ matched_metadatas = worker_metadatas.select do |metadata|
+ described_class.new(query).match?(metadata)
+ end
+ expect(matched_metadatas.map { |m| m[:name] }).to match_array(expected_metadatas)
+ end
+ end
+ end
+
+ context 'with invalid input' do
+ where(:query, :error) do
+ 'feature_category="category_a"' | described_class::InvalidTerm
+ 'feature_category=' | described_class::InvalidTerm
+ 'feature_category~category_a' | described_class::InvalidTerm
+ 'worker_name=a' | described_class::UnknownPredicate
+ end
+
+ with_them do
+ it do
+ worker_metadatas.each do |metadata|
+ expect { described_class.new(query).match?(metadata) }
+ .to raise_error(error)
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
index 3e8e117ec71..537844df72f 100644
--- a/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
+++ b/spec/lib/gitlab/sidekiq_logging/structured_logger_spec.rb
@@ -10,80 +10,7 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
describe '#call', :request_store do
- let(:timestamp) { Time.iso8601('2018-01-01T12:00:00.000Z') }
- let(:created_at) { timestamp - 1.second }
- let(:scheduling_latency_s) { 1.0 }
-
- let(:job) do
- {
- "class" => "TestWorker",
- "args" => [1234, 'hello', { 'key' => 'value' }],
- "retry" => false,
- "queue" => "cronjob:test_queue",
- "queue_namespace" => "cronjob",
- "jid" => "da883554ee4fe414012f5f42",
- "created_at" => created_at.to_f,
- "enqueued_at" => created_at.to_f,
- "correlation_id" => 'cid',
- "error_message" => "wrong number of arguments (2 for 3)",
- "error_class" => "ArgumentError",
- "error_backtrace" => []
- }
- end
-
- let(:logger) { double }
- let(:clock_realtime_start) { 0.222222299 }
- let(:clock_realtime_end) { 1.333333799 }
- let(:clock_thread_cputime_start) { 0.222222299 }
- let(:clock_thread_cputime_end) { 1.333333799 }
- let(:start_payload) do
- job.except('error_backtrace', 'error_class', 'error_message').merge(
- 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
- 'job_status' => 'start',
- 'pid' => Process.pid,
- 'created_at' => created_at.to_f,
- 'enqueued_at' => created_at.to_f,
- 'scheduling_latency_s' => scheduling_latency_s,
- 'job_size_bytes' => be > 0
- )
- end
-
- let(:end_payload) do
- start_payload.merge(
- 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
- 'job_status' => 'done',
- 'duration_s' => 0.0,
- 'completed_at' => timestamp.to_f,
- 'cpu_s' => 1.111112,
- 'db_duration_s' => 0.0,
- 'db_cached_count' => 0,
- 'db_count' => 0,
- 'db_write_count' => 0
- )
- end
-
- let(:exception_payload) do
- end_payload.merge(
- 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
- 'job_status' => 'fail',
- 'error_class' => 'ArgumentError',
- 'error_message' => 'Something went wrong',
- 'error_backtrace' => be_a(Array).and(be_present)
- )
- end
-
- before do
- allow(Sidekiq).to receive(:logger).and_return(logger)
-
- allow(subject).to receive(:current_time).and_return(timestamp.to_f)
-
- allow(Process).to receive(:clock_gettime).with(Process::CLOCK_REALTIME, :float_second)
- .and_return(clock_realtime_start, clock_realtime_end)
- allow(Process).to receive(:clock_gettime).with(Process::CLOCK_THREAD_CPUTIME_ID, :float_second)
- .and_return(clock_thread_cputime_start, clock_thread_cputime_end)
- end
-
- subject { described_class.new }
+ include_context 'structured_logger'
context 'with SIDEKIQ_LOG_ARGUMENTS enabled' do
before do
@@ -283,14 +210,19 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end_payload.merge(timing_data.stringify_keys)
end
- it 'logs with Gitaly and Rugged timing data' do
+ before do
+ allow(::Gitlab::InstrumentationHelper).to receive(:add_instrumentation_data).and_wrap_original do |method, values|
+ method.call(values)
+ values.merge!(timing_data)
+ end
+ end
+
+ it 'logs with Gitaly and Rugged timing data', :aggregate_failures do
Timecop.freeze(timestamp) do
expect(logger).to receive(:info).with(start_payload).ordered
expect(logger).to receive(:info).with(expected_end_payload).ordered
- call_subject(job, 'test_queue') do
- job.merge!(timing_data)
- end
+ call_subject(job, 'test_queue') { }
end
end
end
@@ -361,15 +293,6 @@ RSpec.describe Gitlab::SidekiqLogging::StructuredLogger do
end
end
end
-
- def call_subject(job, queue)
- # This structured logger strongly depends on execution of `InstrumentationLogger`
- subject.call(job, queue) do
- ::Gitlab::SidekiqMiddleware::InstrumentationLogger.new.call('worker', job, queue) do
- yield
- end
- end
- end
end
describe '#add_time_keys!' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb b/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
index 3ba08455d01..9d5d5f28eab 100644
--- a/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/admin_mode/client_spec.rb
@@ -74,9 +74,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::AdminMode::Client, :request_store do
end
end
- context 'admin mode feature disabled' do
+ context 'admin mode setting disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
it 'yields block' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
index e8322b11875..3ab1a9cd2f4 100644
--- a/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/admin_mode/server_spec.rb
@@ -52,9 +52,9 @@ RSpec.describe Gitlab::SidekiqMiddleware::AdminMode::Server, :request_store do
end
end
- context 'admin mode feature disabled' do
+ context 'admin mode setting disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
it 'yields block' do
diff --git a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
index e2b36125b4e..82ca84f0697 100644
--- a/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/client_metrics_spec.rb
@@ -3,156 +3,33 @@
require 'spec_helper'
RSpec.describe Gitlab::SidekiqMiddleware::ClientMetrics do
- context "with worker attribution" do
- subject { described_class.new }
+ shared_examples "a metrics middleware" do
+ context "with mocked prometheus" do
+ let(:enqueued_jobs_metric) { double('enqueued jobs metric', increment: true) }
- let(:queue) { :test }
- let(:worker_class) { worker.class }
- let(:job) { {} }
- let(:default_labels) do
- { queue: queue.to_s,
- worker: worker_class.to_s,
- boundary: "",
- external_dependencies: "no",
- feature_category: "",
- urgency: "low" }
- end
-
- shared_examples "a metrics client middleware" do
- context "with mocked prometheus" do
- let(:enqueued_jobs_metric) { double('enqueued jobs metric', increment: true) }
-
- before do
- allow(Gitlab::Metrics).to receive(:counter).with(described_class::ENQUEUED, anything).and_return(enqueued_jobs_metric)
- end
-
- describe '#call' do
- it 'yields block' do
- expect { |b| subject.call(worker_class, job, :test, double, &b) }.to yield_control.once
- end
-
- it 'increments enqueued jobs metric with correct labels when worker is a string of the class' do
- expect(enqueued_jobs_metric).to receive(:increment).with(labels, 1)
-
- subject.call(worker_class.to_s, job, :test, double) { nil }
- end
-
- it 'increments enqueued jobs metric with correct labels' do
- expect(enqueued_jobs_metric).to receive(:increment).with(labels, 1)
-
- subject.call(worker_class, job, :test, double) { nil }
- end
- end
- end
- end
-
- context "when workers are not attributed" do
before do
- stub_const('TestNonAttributedWorker', Class.new)
- TestNonAttributedWorker.class_eval do
- include Sidekiq::Worker
- end
- end
-
- it_behaves_like "a metrics client middleware" do
- let(:worker) { TestNonAttributedWorker.new }
- let(:labels) { default_labels.merge(urgency: "") }
- end
- end
-
- context "when a worker is wrapped into ActiveJob" do
- before do
- stub_const('TestWrappedWorker', Class.new)
- TestWrappedWorker.class_eval do
- include Sidekiq::Worker
- end
- end
-
- it_behaves_like "a metrics client middleware" do
- let(:job) do
- {
- "class" => ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper,
- "wrapped" => TestWrappedWorker
- }
- end
-
- let(:worker) { TestWrappedWorker.new }
- let(:labels) { default_labels.merge(urgency: "") }
- end
- end
-
- context "when workers are attributed" do
- def create_attributed_worker_class(urgency, external_dependencies, resource_boundary, category)
- klass = Class.new do
- include Sidekiq::Worker
- include WorkerAttributes
-
- urgency urgency if urgency
- worker_has_external_dependencies! if external_dependencies
- worker_resource_boundary resource_boundary unless resource_boundary == :unknown
- feature_category category unless category.nil?
- end
- stub_const("TestAttributedWorker", klass)
- end
-
- let(:urgency) { nil }
- let(:external_dependencies) { false }
- let(:resource_boundary) { :unknown }
- let(:feature_category) { nil }
- let(:worker_class) { create_attributed_worker_class(urgency, external_dependencies, resource_boundary, feature_category) }
- let(:worker) { worker_class.new }
-
- context "high urgency" do
- it_behaves_like "a metrics client middleware" do
- let(:urgency) { :high }
- let(:labels) { default_labels.merge(urgency: "high") }
- end
+ allow(Gitlab::Metrics).to receive(:counter).with(described_class::ENQUEUED, anything).and_return(enqueued_jobs_metric)
end
- context "no urgency" do
- it_behaves_like "a metrics client middleware" do
- let(:urgency) { :throttled }
- let(:labels) { default_labels.merge(urgency: "throttled") }
+ describe '#call' do
+ it 'yields block' do
+ expect { |b| subject.call(worker_class, job, :test, double, &b) }.to yield_control.once
end
- end
- context "external dependencies" do
- it_behaves_like "a metrics client middleware" do
- let(:external_dependencies) { true }
- let(:labels) { default_labels.merge(external_dependencies: "yes") }
- end
- end
+ it 'increments enqueued jobs metric with correct labels when worker is a string of the class' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(labels, 1)
- context "cpu boundary" do
- it_behaves_like "a metrics client middleware" do
- let(:resource_boundary) { :cpu }
- let(:labels) { default_labels.merge(boundary: "cpu") }
+ subject.call(worker_class.to_s, job, :test, double) { nil }
end
- end
- context "memory boundary" do
- it_behaves_like "a metrics client middleware" do
- let(:resource_boundary) { :memory }
- let(:labels) { default_labels.merge(boundary: "memory") }
- end
- end
+ it 'increments enqueued jobs metric with correct labels' do
+ expect(enqueued_jobs_metric).to receive(:increment).with(labels, 1)
- context "feature category" do
- it_behaves_like "a metrics client middleware" do
- let(:feature_category) { :authentication }
- let(:labels) { default_labels.merge(feature_category: "authentication") }
- end
- end
-
- context "combined" do
- it_behaves_like "a metrics client middleware" do
- let(:urgency) { :high }
- let(:external_dependencies) { true }
- let(:resource_boundary) { :cpu }
- let(:feature_category) { :authentication }
- let(:labels) { default_labels.merge(urgency: "high", external_dependencies: "yes", boundary: "cpu", feature_category: "authentication") }
+ subject.call(worker_class, job, :test, double) { nil }
end
end
end
end
+
+ it_behaves_like 'metrics middleware with worker attribution'
end
diff --git a/spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb b/spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb
new file mode 100644
index 00000000000..eb9ba50cdcd
--- /dev/null
+++ b/spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::SidekiqMiddleware::InstrumentationLogger do
+ let(:job) { { 'jid' => 123 } }
+ let(:queue) { 'test_queue' }
+ let(:worker) do
+ Class.new do
+ def self.name
+ 'TestDWorker'
+ end
+
+ include ApplicationWorker
+
+ def perform(*args)
+ end
+ end
+ end
+
+ subject { described_class.new }
+
+ before do
+ stub_const('TestWorker', worker)
+ end
+
+ describe '.keys' do
+ it 'returns all available payload keys' do
+ expected_keys = [
+ :cpu_s,
+ :gitaly_calls,
+ :gitaly_duration_s,
+ :rugged_calls,
+ :rugged_duration_s,
+ :elasticsearch_calls,
+ :elasticsearch_duration_s,
+ :elasticsearch_timed_out_count,
+ :mem_objects,
+ :mem_bytes,
+ :mem_mallocs,
+ :redis_calls,
+ :redis_duration_s,
+ :redis_read_bytes,
+ :redis_write_bytes,
+ :redis_action_cable_calls,
+ :redis_action_cable_duration_s,
+ :redis_action_cable_read_bytes,
+ :redis_action_cable_write_bytes,
+ :redis_cache_calls,
+ :redis_cache_duration_s,
+ :redis_cache_read_bytes,
+ :redis_cache_write_bytes,
+ :redis_queues_calls,
+ :redis_queues_duration_s,
+ :redis_queues_read_bytes,
+ :redis_queues_write_bytes,
+ :redis_shared_state_calls,
+ :redis_shared_state_duration_s,
+ :redis_shared_state_read_bytes,
+ :redis_shared_state_write_bytes,
+ :db_count,
+ :db_write_count,
+ :db_cached_count,
+ :external_http_count,
+ :external_http_duration_s,
+ :rack_attack_redis_count,
+ :rack_attack_redis_duration_s
+ ]
+
+ expect(described_class.keys).to include(*expected_keys)
+ end
+ end
+
+ describe '#call', :request_store do
+ let(:instrumentation_values) do
+ {
+ cpu_s: 10,
+ unknown_attribute: 123,
+ db_count: 0,
+ db_cached_count: 0,
+ db_write_count: 0,
+ gitaly_calls: 0,
+ redis_calls: 0
+ }
+ end
+
+ before do
+ allow(::Gitlab::InstrumentationHelper).to receive(:add_instrumentation_data) do |values|
+ values.merge!(instrumentation_values)
+ end
+ end
+
+ it 'merges correct instrumentation data in the job' do
+ expect { |b| subject.call(worker, job, queue, &b) }.to yield_control
+
+ expected_values = instrumentation_values.except(:unknown_attribute)
+
+ expect(job[:instrumentation]).to eq(expected_values)
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
index 71f4f2a3b64..95be76ce351 100644
--- a/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb
@@ -4,296 +4,108 @@ require 'spec_helper'
# rubocop: disable RSpec/MultipleMemoizedHelpers
RSpec.describe Gitlab::SidekiqMiddleware::ServerMetrics do
- context "with worker attribution" do
- subject { described_class.new }
+ shared_examples "a metrics middleware" do
+ context "with mocked prometheus" do
+ include_context 'server metrics with mocked prometheus'
- let(:queue) { :test }
- let(:worker_class) { worker.class }
- let(:job) { {} }
- let(:job_status) { :done }
- let(:labels_with_job_status) { labels.merge(job_status: job_status.to_s) }
- let(:default_labels) do
- { queue: queue.to_s,
- worker: worker_class.to_s,
- boundary: "",
- external_dependencies: "no",
- feature_category: "",
- urgency: "low" }
- end
-
- shared_examples "a metrics middleware" do
- context "with mocked prometheus" do
- let(:concurrency_metric) { double('concurrency metric') }
-
- let(:queue_duration_seconds) { double('queue duration seconds metric') }
- let(:completion_seconds_metric) { double('completion seconds metric') }
- let(:user_execution_seconds_metric) { double('user execution seconds metric') }
- let(:db_seconds_metric) { double('db seconds metric') }
- let(:gitaly_seconds_metric) { double('gitaly seconds metric') }
- let(:failed_total_metric) { double('failed total metric') }
- let(:retried_total_metric) { double('retried total metric') }
- let(:redis_requests_total) { double('redis calls total metric') }
- let(:running_jobs_metric) { double('running jobs metric') }
- let(:redis_seconds_metric) { double('redis seconds metric') }
- let(:elasticsearch_seconds_metric) { double('elasticsearch seconds metric') }
- let(:elasticsearch_requests_total) { double('elasticsearch calls total metric') }
+ describe '#initialize' do
+ it 'sets concurrency metrics' do
+ expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
- before do
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_queue_duration_seconds, anything, anything, anything).and_return(queue_duration_seconds)
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_completion_seconds, anything, anything, anything).and_return(completion_seconds_metric)
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_db_seconds, anything, anything, anything).and_return(db_seconds_metric)
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything).and_return(gitaly_seconds_metric)
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_redis_requests_duration_seconds, anything, anything, anything).and_return(redis_seconds_metric)
- allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_elasticsearch_requests_duration_seconds, anything, anything, anything).and_return(elasticsearch_seconds_metric)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_redis_requests_total, anything).and_return(redis_requests_total)
- allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_elasticsearch_requests_total, anything).and_return(elasticsearch_requests_total)
- allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
- allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric)
-
- allow(concurrency_metric).to receive(:set)
+ subject
end
+ end
- describe '#initialize' do
- it 'sets concurrency metrics' do
- expect(concurrency_metric).to receive(:set).with({}, Sidekiq.options[:concurrency].to_i)
+ describe '#call' do
+ include_context 'server metrics call'
- subject
- end
+ it 'yields block' do
+ expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
end
- describe '#call' do
- let(:thread_cputime_before) { 1 }
- let(:thread_cputime_after) { 2 }
- let(:thread_cputime_duration) { thread_cputime_after - thread_cputime_before }
-
- let(:monotonic_time_before) { 11 }
- let(:monotonic_time_after) { 20 }
- let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
-
- let(:queue_duration_for_job) { 0.01 }
-
- let(:db_duration) { 3 }
- let(:gitaly_duration) { 4 }
-
- let(:redis_calls) { 2 }
- let(:redis_duration) { 0.01 }
-
- let(:elasticsearch_calls) { 8 }
- let(:elasticsearch_duration) { 0.54 }
-
- before do
- allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
- allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
- allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job)
- allow(ActiveRecord::LogSubscriber).to receive(:runtime).and_return(db_duration * 1000)
-
- job[:gitaly_duration_s] = gitaly_duration
- job[:redis_calls] = redis_calls
- job[:redis_duration_s] = redis_duration
-
- job[:elasticsearch_calls] = elasticsearch_calls
- job[:elasticsearch_duration_s] = elasticsearch_duration
-
- allow(running_jobs_metric).to receive(:increment)
- allow(redis_requests_total).to receive(:increment)
- allow(elasticsearch_requests_total).to receive(:increment)
- allow(queue_duration_seconds).to receive(:observe)
- allow(user_execution_seconds_metric).to receive(:observe)
- allow(db_seconds_metric).to receive(:observe)
- allow(gitaly_seconds_metric).to receive(:observe)
- allow(completion_seconds_metric).to receive(:observe)
- allow(redis_seconds_metric).to receive(:observe)
- allow(elasticsearch_seconds_metric).to receive(:observe)
+ it 'calls BackgroundTransaction' do
+ expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |instance|
+ expect(instance).to receive(:run)
end
- it 'yields block' do
- expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
- end
+ subject.call(worker, job, :test) {}
+ end
- it 'calls BackgroundTransaction' do
- expect_next_instance_of(Gitlab::Metrics::BackgroundTransaction) do |instance|
- expect(instance).to receive(:run)
- end
+ it 'sets queue specific metrics' do
+ expect(running_jobs_metric).to receive(:increment).with(labels, -1)
+ expect(running_jobs_metric).to receive(:increment).with(labels, 1)
+ expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
+ expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
+ expect(db_seconds_metric).to receive(:observe).with(labels_with_job_status, db_duration)
+ expect(gitaly_seconds_metric).to receive(:observe).with(labels_with_job_status, gitaly_duration)
+ expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
+ expect(redis_seconds_metric).to receive(:observe).with(labels_with_job_status, redis_duration)
+ expect(elasticsearch_seconds_metric).to receive(:observe).with(labels_with_job_status, elasticsearch_duration)
+ expect(redis_requests_total).to receive(:increment).with(labels_with_job_status, redis_calls)
+ expect(elasticsearch_requests_total).to receive(:increment).with(labels_with_job_status, elasticsearch_calls)
+
+ subject.call(worker, job, :test) { nil }
+ end
- subject.call(worker, job, :test) {}
- end
+ it 'sets the thread name if it was nil' do
+ allow(Thread.current).to receive(:name).and_return(nil)
+ expect(Thread.current).to receive(:name=).with(Gitlab::Metrics::Samplers::ThreadsSampler::SIDEKIQ_WORKER_THREAD_NAME)
- it 'sets queue specific metrics' do
- expect(running_jobs_metric).to receive(:increment).with(labels, -1)
- expect(running_jobs_metric).to receive(:increment).with(labels, 1)
- expect(queue_duration_seconds).to receive(:observe).with(labels, queue_duration_for_job) if queue_duration_for_job
- expect(user_execution_seconds_metric).to receive(:observe).with(labels_with_job_status, thread_cputime_duration)
- expect(db_seconds_metric).to receive(:observe).with(labels_with_job_status, db_duration)
- expect(gitaly_seconds_metric).to receive(:observe).with(labels_with_job_status, gitaly_duration)
- expect(completion_seconds_metric).to receive(:observe).with(labels_with_job_status, monotonic_time_duration)
- expect(redis_seconds_metric).to receive(:observe).with(labels_with_job_status, redis_duration)
- expect(elasticsearch_seconds_metric).to receive(:observe).with(labels_with_job_status, elasticsearch_duration)
- expect(redis_requests_total).to receive(:increment).with(labels_with_job_status, redis_calls)
- expect(elasticsearch_requests_total).to receive(:increment).with(labels_with_job_status, elasticsearch_calls)
+ subject.call(worker, job, :test) { nil }
+ end
- subject.call(worker, job, :test) { nil }
- end
+ context 'when job_duration is not available' do
+ let(:queue_duration_for_job) { nil }
- it 'sets the thread name if it was nil' do
- allow(Thread.current).to receive(:name).and_return(nil)
- expect(Thread.current).to receive(:name=).with(Gitlab::Metrics::Samplers::ThreadsSampler::SIDEKIQ_WORKER_THREAD_NAME)
+ it 'does not set the queue_duration_seconds histogram' do
+ expect(queue_duration_seconds).not_to receive(:observe)
subject.call(worker, job, :test) { nil }
end
+ end
- context 'when job_duration is not available' do
- let(:queue_duration_for_job) { nil }
-
- it 'does not set the queue_duration_seconds histogram' do
- expect(queue_duration_seconds).not_to receive(:observe)
-
- subject.call(worker, job, :test) { nil }
- end
- end
-
- context 'when error is raised' do
- let(:job_status) { :fail }
-
- it 'sets sidekiq_jobs_failed_total and reraises' do
- expect(failed_total_metric).to receive(:increment).with(labels, 1)
-
- expect { subject.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
- end
- end
-
- context 'when job is retried' do
- let(:job) { { 'retry_count' => 1 } }
+ context 'when error is raised' do
+ let(:job_status) { :fail }
- it 'sets sidekiq_jobs_retried_total metric' do
- expect(retried_total_metric).to receive(:increment)
+ it 'sets sidekiq_jobs_failed_total and reraises' do
+ expect(failed_total_metric).to receive(:increment).with(labels, 1)
- subject.call(worker, job, :test) { nil }
- end
+ expect { subject.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
end
end
- end
- context "with prometheus integrated" do
- describe '#call' do
- it 'yields block' do
- expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
- end
+ context 'when job is retried' do
+ let(:job) { { 'retry_count' => 1 } }
- context 'when error is raised' do
- let(:job_status) { :fail }
+ it 'sets sidekiq_jobs_retried_total metric' do
+ expect(retried_total_metric).to receive(:increment)
- it 'sets sidekiq_jobs_failed_total and reraises' do
- expect { subject.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
- end
+ subject.call(worker, job, :test) { nil }
end
end
end
end
- context "when workers are not attributed" do
- before do
- stub_const('TestNonAttributedWorker', Class.new)
- TestNonAttributedWorker.class_eval do
- include Sidekiq::Worker
+ context "with prometheus integrated" do
+ describe '#call' do
+ it 'yields block' do
+ expect { |b| subject.call(worker, job, :test, &b) }.to yield_control.once
end
- end
- let(:worker) { TestNonAttributedWorker.new }
- let(:labels) { default_labels.merge(urgency: "") }
+ context 'when error is raised' do
+ let(:job_status) { :fail }
- it_behaves_like "a metrics middleware"
- end
-
- context "when a worker is wrapped into ActiveJob" do
- before do
- stub_const('TestWrappedWorker', Class.new)
- TestWrappedWorker.class_eval do
- include Sidekiq::Worker
+ it 'sets sidekiq_jobs_failed_total and reraises' do
+ expect { subject.call(worker, job, :test) { raise StandardError, "Failed" } }.to raise_error(StandardError, "Failed")
+ end
end
end
-
- let(:job) do
- {
- "class" => ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper,
- "wrapped" => TestWrappedWorker
- }
- end
-
- let(:worker) { TestWrappedWorker.new }
- let(:worker_class) { TestWrappedWorker }
- let(:labels) { default_labels.merge(urgency: "") }
-
- it_behaves_like "a metrics middleware"
end
+ end
- context "when workers are attributed" do
- def create_attributed_worker_class(urgency, external_dependencies, resource_boundary, category)
- Class.new do
- include Sidekiq::Worker
- include WorkerAttributes
-
- urgency urgency if urgency
- worker_has_external_dependencies! if external_dependencies
- worker_resource_boundary resource_boundary unless resource_boundary == :unknown
- feature_category category unless category.nil?
- end
- end
-
- let(:urgency) { nil }
- let(:external_dependencies) { false }
- let(:resource_boundary) { :unknown }
- let(:feature_category) { nil }
- let(:worker_class) { create_attributed_worker_class(urgency, external_dependencies, resource_boundary, feature_category) }
- let(:worker) { worker_class.new }
-
- context "high urgency" do
- let(:urgency) { :high }
- let(:labels) { default_labels.merge(urgency: "high") }
-
- it_behaves_like "a metrics middleware"
- end
-
- context "external dependencies" do
- let(:external_dependencies) { true }
- let(:labels) { default_labels.merge(external_dependencies: "yes") }
-
- it_behaves_like "a metrics middleware"
- end
-
- context "cpu boundary" do
- let(:resource_boundary) { :cpu }
- let(:labels) { default_labels.merge(boundary: "cpu") }
-
- it_behaves_like "a metrics middleware"
- end
-
- context "memory boundary" do
- let(:resource_boundary) { :memory }
- let(:labels) { default_labels.merge(boundary: "memory") }
-
- it_behaves_like "a metrics middleware"
- end
-
- context "feature category" do
- let(:feature_category) { :authentication }
- let(:labels) { default_labels.merge(feature_category: "authentication") }
-
- it_behaves_like "a metrics middleware"
- end
-
- context "combined" do
- let(:urgency) { :throttled }
- let(:external_dependencies) { true }
- let(:resource_boundary) { :cpu }
- let(:feature_category) { :authentication }
- let(:labels) { default_labels.merge(urgency: "throttled", external_dependencies: "yes", boundary: "cpu", feature_category: "authentication") }
-
- it_behaves_like "a metrics middleware"
- end
- end
+ it_behaves_like 'metrics middleware with worker attribution' do
+ let(:job_status) { :done }
+ let(:labels_with_job_status) { labels.merge(job_status: job_status.to_s) }
end
end
# rubocop: enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
index ca473462d2e..f736a7db774 100644
--- a/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware/worker_context/server_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::SidekiqMiddleware::WorkerContext::Server do
worker_context user: nil
def perform(identifier, *args)
- self.class.contexts.merge!(identifier => Labkit::Context.current.to_h)
+ self.class.contexts.merge!(identifier => Gitlab::ApplicationContext.current)
end
end
end
diff --git a/spec/lib/gitlab/sidekiq_middleware_spec.rb b/spec/lib/gitlab/sidekiq_middleware_spec.rb
index 755f6004e52..0efdef0c999 100644
--- a/spec/lib/gitlab/sidekiq_middleware_spec.rb
+++ b/spec/lib/gitlab/sidekiq_middleware_spec.rb
@@ -69,11 +69,13 @@ RSpec.describe Gitlab::SidekiqMiddleware do
shared_examples "a server middleware chain" do
it "passes through the right server middlewares" do
enabled_sidekiq_middlewares.each do |middleware|
- expect_any_instance_of(middleware).to receive(:call).with(*middleware_expected_args).once.and_call_original
+ expect_next_instance_of(middleware) do |middleware_instance|
+ expect(middleware_instance).to receive(:call).with(*middleware_expected_args).once.and_call_original
+ end
end
disabled_sidekiq_middlewares.each do |middleware|
- expect_any_instance_of(middleware).not_to receive(:call)
+ expect(middleware).not_to receive(:new)
end
worker_class.perform_async(*job_args)
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb
index 109b4b8fee1..690ffb15a5d 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_comment_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::SlashCommands::Presenters::IssueComment do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:note) { create(:note, project: project, noteable: issue) }
+
let(:author) { note.author }
describe '#present' do
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
index a4d8e3957cf..7b3440b40a7 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_move_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::SlashCommands::Presenters::IssueMove do
let_it_be(:project, reload: true) { create(:project) }
let_it_be(:other_project) { create(:project) }
let_it_be(:old_issue, reload: true) { create(:issue, project: project) }
+
let(:new_issue) { Issues::MoveService.new(project, user).execute(old_issue, other_project) }
let(:attachment) { subject[:attachments].first }
diff --git a/spec/lib/gitlab/slash_commands/presenters/issue_new_spec.rb b/spec/lib/gitlab/slash_commands/presenters/issue_new_spec.rb
index 03a94ea5e29..21a983090fb 100644
--- a/spec/lib/gitlab/slash_commands/presenters/issue_new_spec.rb
+++ b/spec/lib/gitlab/slash_commands/presenters/issue_new_spec.rb
@@ -1,19 +1,22 @@
# frozen_string_literal: true
-
require 'spec_helper'
RSpec.describe Gitlab::SlashCommands::Presenters::IssueNew do
+ include Gitlab::Routing
let(:project) { create(:project) }
let(:issue) { create(:issue, project: project) }
- let(:attachment) { subject[:attachments].first }
subject { described_class.new(issue).present }
it { is_expected.to be_a(Hash) }
it 'shows the issue' do
- expect(subject[:response_type]).to be(:in_channel)
- expect(subject).to have_key(:attachments)
- expect(attachment[:title]).to start_with(issue.title)
+ expected_text = "I created an issue on <#{url_for(issue.author)}|#{issue.author.to_reference}>'s behalf: *<#{project_issue_url(issue.project, issue)}|#{issue.to_reference}>* in <#{project.web_url}|#{project.full_name}>"
+
+ expect(subject).to eq(
+ response_type: :in_channel,
+ status: 200,
+ text: expected_text
+ )
end
end
diff --git a/spec/lib/gitlab/slash_commands/run_spec.rb b/spec/lib/gitlab/slash_commands/run_spec.rb
index c9ff580d586..9d204228d21 100644
--- a/spec/lib/gitlab/slash_commands/run_spec.rb
+++ b/spec/lib/gitlab/slash_commands/run_spec.rb
@@ -3,6 +3,26 @@
require 'spec_helper'
RSpec.describe Gitlab::SlashCommands::Run do
+ describe '.match' do
+ it 'returns true for a run command' do
+ expect(described_class.match('run foo')).to be_an_instance_of(MatchData)
+ end
+
+ it 'returns true for a run command with arguments' do
+ expect(described_class.match('run foo bar baz'))
+ .to be_an_instance_of(MatchData)
+ end
+
+ it 'returns true for a command containing newlines' do
+ expect(described_class.match("run foo\nbar\nbaz"))
+ .to be_an_instance_of(MatchData)
+ end
+
+ it 'returns false for an unrelated command' do
+ expect(described_class.match('foo bar')).to be_nil
+ end
+ end
+
describe '.available?' do
it 'returns true when builds are enabled for the project' do
project = double(:project, builds_enabled?: true)
diff --git a/spec/lib/gitlab/snippet_search_results_spec.rb b/spec/lib/gitlab/snippet_search_results_spec.rb
index 2177b2be6d6..fc342b7e9b1 100644
--- a/spec/lib/gitlab/snippet_search_results_spec.rb
+++ b/spec/lib/gitlab/snippet_search_results_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Gitlab::SnippetSearchResults do
include SearchHelpers
let_it_be(:snippet) { create(:snippet, content: 'foo', file_name: 'foo') }
+
let(:results) { described_class.new(snippet.author, 'foo') }
describe '#snippet_titles_count' do
diff --git a/spec/lib/gitlab/sourcegraph_spec.rb b/spec/lib/gitlab/sourcegraph_spec.rb
index ad947475f06..6bebd1ca3e6 100644
--- a/spec/lib/gitlab/sourcegraph_spec.rb
+++ b/spec/lib/gitlab/sourcegraph_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Sourcegraph do
let_it_be(:user) { create(:user) }
+
let(:feature_scope) { true }
before do
diff --git a/spec/lib/gitlab/sql/cte_spec.rb b/spec/lib/gitlab/sql/cte_spec.rb
index fdc150cd4b9..4cf94f4dcab 100644
--- a/spec/lib/gitlab/sql/cte_spec.rb
+++ b/spec/lib/gitlab/sql/cte_spec.rb
@@ -14,7 +14,14 @@ RSpec.describe Gitlab::SQL::CTE do
relation.except(:order).to_sql
end
- expect(sql).to eq("#{name} AS (#{sql1})")
+ expected = [
+ "#{name} AS ",
+ Gitlab::Database::AsWithMaterialized.materialized_if_supported,
+ (' ' unless Gitlab::Database::AsWithMaterialized.materialized_if_supported.blank?),
+ "(#{sql1})"
+ ].join
+
+ expect(sql).to eq(expected)
end
end
@@ -41,4 +48,15 @@ RSpec.describe Gitlab::SQL::CTE do
expect(relation.to_a).to eq(User.where(id: user.id).to_a)
end
end
+
+ it_behaves_like 'CTE with MATERIALIZED keyword examples' do
+ let(:expected_query_block_with_materialized) { 'WITH "some_cte" AS MATERIALIZED (' }
+ let(:expected_query_block_without_materialized) { 'WITH "some_cte" AS (' }
+
+ let(:query) do
+ cte = described_class.new(:some_cte, User.active, **options)
+
+ User.with(cte.to_arel).to_sql
+ end
+ end
end
diff --git a/spec/lib/gitlab/sql/recursive_cte_spec.rb b/spec/lib/gitlab/sql/recursive_cte_spec.rb
index 02611620989..edcacd404c2 100644
--- a/spec/lib/gitlab/sql/recursive_cte_spec.rb
+++ b/spec/lib/gitlab/sql/recursive_cte_spec.rb
@@ -57,4 +57,17 @@ RSpec.describe Gitlab::SQL::RecursiveCTE do
expect(relation.to_a).to eq(User.where(id: user.id).to_a)
end
end
+
+ it_behaves_like 'CTE with MATERIALIZED keyword examples' do
+ # MATERIALIZED keyword is not needed for recursive queries
+ let(:expected_query_block_with_materialized) { 'WITH RECURSIVE "some_cte" AS (' }
+ let(:expected_query_block_without_materialized) { 'WITH RECURSIVE "some_cte" AS (' }
+
+ let(:query) do
+ recursive_cte = described_class.new(:some_cte)
+ recursive_cte << User.active
+
+ User.with.recursive(recursive_cte.to_arel).to_sql
+ end
+ end
end
diff --git a/spec/lib/gitlab/subscription_portal_spec.rb b/spec/lib/gitlab/subscription_portal_spec.rb
index 351af3c07d2..ad1affdac0b 100644
--- a/spec/lib/gitlab/subscription_portal_spec.rb
+++ b/spec/lib/gitlab/subscription_portal_spec.rb
@@ -3,39 +3,41 @@
require 'spec_helper'
RSpec.describe ::Gitlab::SubscriptionPortal do
- describe '.default_subscriptions_url' do
- subject { described_class.default_subscriptions_url }
-
- context 'on non test and non dev environments' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
+ unless Gitlab.jh?
+ describe '.default_subscriptions_url' do
+ subject { described_class.default_subscriptions_url }
+
+ context 'on non test and non dev environments' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
+ end
+
+ it 'returns production subscriptions app URL' do
+ is_expected.to eq('https://customers.gitlab.com')
+ end
end
- it 'returns production subscriptions app URL' do
- is_expected.to eq('https://customers.gitlab.com')
- end
- end
+ context 'on dev environment' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
+ end
- context 'on dev environment' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(false)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(true)
+ it 'returns staging subscriptions app url' do
+ is_expected.to eq('https://customers.stg.gitlab.com')
+ end
end
- it 'returns staging subscriptions app url' do
- is_expected.to eq('https://customers.stg.gitlab.com')
- end
- end
-
- context 'on test environment' do
- before do
- allow(Rails).to receive_message_chain(:env, :test?).and_return(true)
- allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
- end
+ context 'on test environment' do
+ before do
+ allow(Rails).to receive_message_chain(:env, :test?).and_return(true)
+ allow(Rails).to receive_message_chain(:env, :development?).and_return(false)
+ end
- it 'returns staging subscriptions app url' do
- is_expected.to eq('https://customers.stg.gitlab.com')
+ it 'returns staging subscriptions app url' do
+ is_expected.to eq('https://customers.stg.gitlab.com')
+ end
end
end
end
diff --git a/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb b/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb
index 05f351be702..793ad1c1959 100644
--- a/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb
+++ b/spec/lib/gitlab/template/finders/repo_template_finders_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Template::Finders::RepoTemplateFinder do
let_it_be(:project) { create(:project, :repository) }
+
let(:categories) { { 'HTML' => 'html' } }
subject(:finder) { described_class.new(project, 'files/', '.html', categories) }
diff --git a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
index 0e8647ad78a..65597e6568d 100644
--- a/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
+++ b/spec/lib/gitlab/tracking/destinations/snowplow_spec.rb
@@ -41,21 +41,6 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
.with('category', 'action', 'label', 'property', 1.5, nil, (Time.now.to_f * 1000).to_i)
end
end
-
- describe '#self_describing_event' do
- it 'sends event to tracker' do
- allow(tracker).to receive(:track_self_describing_event).and_call_original
-
- subject.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', data: { foo: 'bar' })
-
- expect(tracker).to have_received(:track_self_describing_event) do |event, context, timestamp|
- expect(event.to_json[:schema]).to eq('iglu:com.gitlab/foo/jsonschema/1-0-0')
- expect(event.to_json[:data]).to eq(foo: 'bar')
- expect(context).to eq(nil)
- expect(timestamp).to eq((Time.now.to_f * 1000).to_i)
- end
- end
- end
end
context 'when snowplow is not enabled' do
@@ -66,13 +51,5 @@ RSpec.describe Gitlab::Tracking::Destinations::Snowplow do
subject.event('category', 'action', label: 'label', property: 'property', value: 1.5)
end
end
-
- describe '#self_describing_event' do
- it 'does not send event to tracker' do
- expect_any_instance_of(SnowplowTracker::Tracker).not_to receive(:track_self_describing_event)
-
- subject.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', data: { foo: 'bar' })
- end
- end
end
end
diff --git a/spec/lib/gitlab/tracking/standard_context_spec.rb b/spec/lib/gitlab/tracking/standard_context_spec.rb
index 561edbd38f8..dacd08cf12b 100644
--- a/spec/lib/gitlab/tracking/standard_context_spec.rb
+++ b/spec/lib/gitlab/tracking/standard_context_spec.rb
@@ -58,10 +58,16 @@ RSpec.describe Gitlab::Tracking::StandardContext do
end
context 'with extra data' do
- subject { described_class.new(foo: 'bar') }
+ subject { described_class.new(extra_key_1: 'extra value 1', extra_key_2: 'extra value 2') }
- it 'creates a Snowplow context with the given data' do
- expect(snowplow_context.to_json.dig(:data, :foo)).to eq('bar')
+ it 'includes extra data in `extra` hash' do
+ expect(snowplow_context.to_json.dig(:data, :extra)).to eq(extra_key_1: 'extra value 1', extra_key_2: 'extra value 2')
+ end
+ end
+
+ context 'without extra data' do
+ it 'contains an empty `extra` hash' do
+ expect(snowplow_context.to_json.dig(:data, :extra)).to be_empty
end
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index ac052bd7a80..4d856205609 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -36,12 +36,12 @@ RSpec.describe Gitlab::Tracking do
end
describe '.event' do
- before do
- allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow).to receive(:event)
- allow_any_instance_of(Gitlab::Tracking::Destinations::ProductAnalytics).to receive(:event)
- end
-
shared_examples 'delegates to destination' do |klass|
+ before do
+ allow_any_instance_of(Gitlab::Tracking::Destinations::Snowplow).to receive(:event)
+ allow_any_instance_of(Gitlab::Tracking::Destinations::ProductAnalytics).to receive(:event)
+ end
+
it "delegates to #{klass} destination" do
other_context = double(:context)
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::Tracking do
expect(Gitlab::Tracking::StandardContext)
.to receive(:new)
- .with(project: project, user: user, namespace: namespace)
+ .with(project: project, user: user, namespace: namespace, extra_key_1: 'extra value 1', extra_key_2: 'extra value 2')
.and_call_original
expect_any_instance_of(klass).to receive(:event) do |_, category, action, args|
@@ -66,21 +66,21 @@ RSpec.describe Gitlab::Tracking do
end
described_class.event('category', 'action', label: 'label', property: 'property', value: 1.5,
- context: [other_context], project: project, user: user, namespace: namespace)
+ context: [other_context], project: project, user: user, namespace: namespace,
+ extra_key_1: 'extra value 1', extra_key_2: 'extra value 2')
end
end
- include_examples 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow
- include_examples 'delegates to destination', Gitlab::Tracking::Destinations::ProductAnalytics
- end
+ it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::Snowplow
+ it_behaves_like 'delegates to destination', Gitlab::Tracking::Destinations::ProductAnalytics
- describe '.self_describing_event' do
- it 'delegates to snowplow destination' do
- expect_any_instance_of(Gitlab::Tracking::Destinations::Snowplow)
- .to receive(:self_describing_event)
- .with('iglu:com.gitlab/foo/jsonschema/1-0-0', data: { foo: 'bar' }, context: nil)
+ it 'tracks errors' do
+ expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).with(
+ an_instance_of(ContractError),
+ snowplow_category: nil, snowplow_action: 'some_action'
+ )
- described_class.self_describing_event('iglu:com.gitlab/foo/jsonschema/1-0-0', data: { foo: 'bar' })
+ described_class.event(nil, 'some_action')
end
end
end
diff --git a/spec/lib/gitlab/tree_summary_spec.rb b/spec/lib/gitlab/tree_summary_spec.rb
index 661ef507a82..a86afa9cba5 100644
--- a/spec/lib/gitlab/tree_summary_spec.rb
+++ b/spec/lib/gitlab/tree_summary_spec.rb
@@ -226,6 +226,7 @@ RSpec.describe Gitlab::TreeSummary do
describe 'References in commit messages' do
let_it_be(:project) { create(:project, :empty_repo) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:entries) { summary.summarize.first }
let(:entry) { entries.find { |entry| entry[:file_name] == 'issue.txt' } }
diff --git a/spec/lib/gitlab/untrusted_regexp_spec.rb b/spec/lib/gitlab/untrusted_regexp_spec.rb
index aac3d5e27f5..270c4beec97 100644
--- a/spec/lib/gitlab/untrusted_regexp_spec.rb
+++ b/spec/lib/gitlab/untrusted_regexp_spec.rb
@@ -136,4 +136,22 @@ RSpec.describe Gitlab::UntrustedRegexp do
end
end
end
+
+ describe '#match' do
+ context 'when there are matches' do
+ it 'returns a match object' do
+ result = described_class.new('(?P<number>\d+)').match('hello 10')
+
+ expect(result[:number]).to eq('10')
+ end
+ end
+
+ context 'when there are no matches' do
+ it 'returns nil' do
+ result = described_class.new('(?P<number>\d+)').match('hello')
+
+ expect(result).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/url_builder_spec.rb b/spec/lib/gitlab/url_builder_spec.rb
index 6d055fe3643..b359eb422d7 100644
--- a/spec/lib/gitlab/url_builder_spec.rb
+++ b/spec/lib/gitlab/url_builder_spec.rb
@@ -92,6 +92,7 @@ RSpec.describe Gitlab::UrlBuilder do
context 'when passing a Snippet' do
let_it_be(:personal_snippet) { create(:personal_snippet, :repository) }
let_it_be(:project_snippet) { create(:project_snippet, :repository) }
+
let(:blob) { snippet.blobs.first }
let(:ref) { blob.repository.root_ref }
diff --git a/spec/lib/gitlab/usage/metric_definition_spec.rb b/spec/lib/gitlab/usage/metric_definition_spec.rb
index 8b592838f5d..e99d720058a 100644
--- a/spec/lib/gitlab/usage/metric_definition_spec.rb
+++ b/spec/lib/gitlab/usage/metric_definition_spec.rb
@@ -16,7 +16,8 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
time_frame: 'none',
data_source: 'database',
distribution: %w(ee ce),
- tier: %w(free starter premium ultimate bronze silver gold)
+ tier: %w(free starter premium ultimate bronze silver gold),
+ name: 'count_boards'
}
end
@@ -24,6 +25,13 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
let(:definition) { described_class.new(path, attributes) }
let(:yaml_content) { attributes.deep_stringify_keys.to_yaml }
+ def write_metric(metric, path, content)
+ path = File.join(metric, path)
+ dir = File.dirname(path)
+ FileUtils.mkdir_p(dir)
+ File.write(path, content)
+ end
+
it 'has all definitons valid' do
expect { described_class.definitions }.not_to raise_error(Gitlab::Usage::Metric::InvalidMetricError)
end
@@ -53,6 +61,7 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
:distribution | nil
:distribution | 'test'
:tier | %w(test ee)
+ :name | 'count_<adjective_describing>_boards'
end
with_them do
@@ -82,6 +91,28 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
end
end
+ describe 'statuses' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :skip_validation?) do
+ 'deprecated' | true
+ 'removed' | true
+ 'data_available' | false
+ 'implemented' | false
+ 'not_used' | false
+ end
+
+ with_them do
+ subject(:validation) do
+ described_class.new(path, attributes.merge( { status: status } )).send(:skip_validation?)
+ end
+
+ it 'returns true/false for skip_validation' do
+ expect(validation).to eq(skip_validation?)
+ end
+ end
+ end
+
describe '.load_all!' do
let(:metric1) { Dir.mktmpdir('metric1') }
let(:metric2) { Dir.mktmpdir('metric2') }
@@ -121,12 +152,54 @@ RSpec.describe Gitlab::Usage::MetricDefinition do
FileUtils.rm_rf(metric1)
FileUtils.rm_rf(metric2)
end
+ end
+
+ describe 'dump_metrics_yaml' do
+ let(:other_attributes) do
+ {
+ description: 'Test metric definition',
+ value_type: 'string',
+ product_category: 'collection',
+ product_stage: 'growth',
+ status: 'data_available',
+ default_generation: 'generation_1',
+ key_path: 'counter.category.event',
+ product_group: 'group::product analytics',
+ time_frame: 'none',
+ data_source: 'database',
+ distribution: %w(ee ce),
+ tier: %w(free starter premium ultimate bronze silver gold)
+ }
+ end
+
+ let(:other_yaml_content) { other_attributes.deep_stringify_keys.to_yaml }
+ let(:other_path) { File.join('metrics', 'test_metric.yml') }
+ let(:metric1) { Dir.mktmpdir('metric1') }
+ let(:metric2) { Dir.mktmpdir('metric2') }
+
+ before do
+ allow(described_class).to receive(:paths).and_return(
+ [
+ File.join(metric1, '**', '*.yml'),
+ File.join(metric2, '**', '*.yml')
+ ]
+ )
+ # Reset memoized `definitions` result
+ described_class.instance_variable_set(:@definitions, nil)
+ end
+
+ after do
+ FileUtils.rm_rf(metric1)
+ FileUtils.rm_rf(metric2)
+ end
+
+ subject { described_class.dump_metrics_yaml }
+
+ it 'returns a YAML with both metrics in a sequence' do
+ write_metric(metric1, path, yaml_content)
+ write_metric(metric2, other_path, other_yaml_content)
- def write_metric(metric, path, content)
- path = File.join(metric, path)
- dir = File.dirname(path)
- FileUtils.mkdir_p(dir)
- File.write(path, content)
+ is_expected.to eq([attributes, other_attributes].map(&:deep_stringify_keys).to_yaml)
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
index a2a40f17269..db878828cd6 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/sources/postgres_hll_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Sources::PostgresHll, :clean_
let_it_be(:end_date) { Date.current }
let_it_be(:recorded_at) { Time.current }
let_it_be(:time_period) { { created_at: (start_date..end_date) } }
+
let(:metric_1) { 'metric_1' }
let(:metric_2) { 'metric_2' }
let(:metric_names) { [metric_1, metric_2] }
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
index cd0413feab4..34b073b4729 100644
--- a/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
describe '#generate' do
shared_examples 'name suggestion' do
it 'return correct name' do
- expect(described_class.generate(key_path)).to eq name_suggestion
+ expect(described_class.generate(key_path)).to match name_suggestion
end
end
@@ -20,7 +20,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with count(Board)
let(:key_path) { 'counts.boards' }
- let(:name_suggestion) { 'count_boards' }
+ let(:name_suggestion) { /count_boards/ }
end
end
@@ -28,7 +28,44 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with distinct_count(ZoomMeeting, :issue_id)
let(:key_path) { 'counts.issues_using_zoom_quick_actions' }
- let(:name_suggestion) { 'count_distinct_issue_id_from_zoom_meetings' }
+ let(:name_suggestion) { /count_distinct_issue_id_from_zoom_meetings/ }
+ end
+ end
+
+ context 'joined relations' do
+ context 'counted attribute comes from joined relation' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with:
+ # distinct_count(
+ # ::Clusters::Applications::Ingress.modsecurity_enabled.logging
+ # .joins(cluster: :deployments)
+ # .merge(::Clusters::Cluster.enabled)
+ # .merge(Deployment.success),
+ # ::Deployment.arel_table[:environment_id]
+ # )
+ let(:key_path) { 'counts.ingress_modsecurity_logging' }
+ let(:name_suggestion) do
+ constrains = /'\(clusters_applications_ingress\.modsecurity_enabled = TRUE AND clusters_applications_ingress\.modsecurity_mode = \d+ AND clusters.enabled = TRUE AND deployments.status = \d+\)'/
+ /count_distinct_environment_id_from_<adjective describing\: #{constrains}>_deployments_<with>_<adjective describing\: #{constrains}>_clusters_<having>_<adjective describing\: #{constrains}>_clusters_applications_ingress/
+ end
+ end
+ end
+
+ context 'counted attribute comes from source relation' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with count(Issue.with_alert_management_alerts.not_authored_by(::User.alert_bot), start: issue_minimum_id, finish: issue_maximum_id)
+ let(:key_path) { 'counts.issues_created_manually_from_alerts' }
+ let(:name_suggestion) { /count_<adjective describing\: '\(issues\.author_id != \d+\)'>_issues_<with>_alert_management_alerts/ }
+ end
+ end
+ end
+
+ context 'strips off time period constraint' do
+ it_behaves_like 'name suggestion' do
+ # corresponding metric is collected with distinct_count(::Clusters::Cluster.aws_installed.enabled.where(time_period), :user_id)
+ let(:key_path) { 'usage_activity_by_stage_monthly.configure.clusters_platforms_eks' }
+ let(:constraints) { /<adjective describing\: '\(clusters.provider_type = \d+ AND \(cluster_providers_aws\.status IN \(\d+\)\) AND clusters\.enabled = TRUE\)'>/ }
+ let(:name_suggestion) { /count_distinct_user_id_from_#{constraints}_clusters_<with>_#{constraints}_cluster_providers_aws/ }
end
end
@@ -36,7 +73,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with sum(JiraImportState.finished, :imported_issues_count)
let(:key_path) { 'counts.jira_imports_total_imported_issues_count' }
- let(:name_suggestion) { "sum_imported_issues_count_from_<adjective describing: '(jira_imports.status = 4)'>_jira_imports" }
+ let(:name_suggestion) { /sum_imported_issues_count_from_<adjective describing\: '\(jira_imports\.status = \d+\)'>_jira_imports/ }
end
end
@@ -44,7 +81,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with add(data[:personal_snippets], data[:project_snippets])
let(:key_path) { 'counts.snippets' }
- let(:name_suggestion) { "add_count_<adjective describing: '(snippets.type = 'PersonalSnippet')'>_snippets_and_count_<adjective describing: '(snippets.type = 'ProjectSnippet')'>_snippets" }
+ let(:name_suggestion) { /add_count_<adjective describing\: '\(snippets\.type = 'PersonalSnippet'\)'>_snippets_and_count_<adjective describing\: '\(snippets\.type = 'ProjectSnippet'\)'>_snippets/ }
end
end
@@ -52,7 +89,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with redis_usage_data { unique_visit_service.unique_visits_for(targets: :analytics) }
let(:key_path) { 'analytics_unique_visits.analytics_unique_visits_for_any_target' }
- let(:name_suggestion) { '<please fill metric name>' }
+ let(:name_suggestion) { /<please fill metric name, suggested format is: {subject}_{verb}{ing|ed}_{object} eg: users_creating_epics or merge_requests_viewed_in_single_file_mode>/ }
end
end
@@ -60,7 +97,7 @@ RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::Generator do
it_behaves_like 'name suggestion' do
# corresponding metric is collected with alt_usage_data(fallback: nil) { operating_system }
let(:key_path) { 'settings.operating_system' }
- let(:name_suggestion) { '<please fill metric name>' }
+ let(:name_suggestion) { /<please fill metric name>/ }
end
end
end
diff --git a/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb
new file mode 100644
index 00000000000..fb3bd564e34
--- /dev/null
+++ b/spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Usage::Metrics::NamesSuggestions::RelationParsers::Joins do
+ describe '#accept' do
+ let(:collector) { Arel::Collectors::SubstituteBinds.new(ActiveRecord::Base.connection, Arel::Collectors::SQLString.new) }
+
+ context 'with join added via string' do
+ it 'collects join parts' do
+ arel = Issue.joins('LEFT JOIN projects ON projects.id = issue.project_id')
+
+ arel = arel.arel
+ result = described_class.new(ApplicationRecord.connection).accept(arel)
+
+ expect(result).to match_array [{ source: "projects", constraints: "projects.id = issue.project_id" }]
+ end
+ end
+
+ context 'with join added via arel node' do
+ it 'collects join parts' do
+ source_table = Arel::Table.new('records')
+ joined_table = Arel::Table.new('joins')
+ second_level_joined_table = Arel::Table.new('second_level_joins')
+
+ arel = source_table
+ .from
+ .project(source_table['id'].count)
+ .join(joined_table, Arel::Nodes::OuterJoin)
+ .on(source_table[:id].eq(joined_table[:records_id]))
+ .join(second_level_joined_table, Arel::Nodes::OuterJoin)
+ .on(joined_table[:id].eq(second_level_joined_table[:joins_id]))
+
+ result = described_class.new(ApplicationRecord.connection).accept(arel)
+
+ expect(result).to match_array [{ source: "joins", constraints: "records.id = joins.records_id" }, { source: "second_level_joins", constraints: "joins.id = second_level_joins.joins_id" }]
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
index 664e7938a7e..a1dee442131 100644
--- a/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/code_review_events_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
# If this spec fails, we need to add the new code review event to the correct aggregated metric
RSpec.describe 'Code review events' do
it 'the aggregated metrics contain all the code review metrics' do
- path = Rails.root.join('lib/gitlab/usage_data_counters/aggregated_metrics/code_review.yml')
+ path = Rails.root.join('config/metrics/aggregates/code_review.yml')
aggregated_events = YAML.safe_load(File.read(path), aliases: true)&.map(&:with_indifferent_access)
code_review_aggregated_events = aggregated_events
diff --git a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
index d12dcdae955..9fc28f6c4ec 100644
--- a/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/hll_redis_counter_spec.rb
@@ -34,6 +34,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'source_code',
'incident_management',
'incident_management_alerts',
+ 'incident_management_oncall',
'testing',
'issues_edit',
'ci_secrets_management',
@@ -43,7 +44,8 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
'ci_templates',
'quickactions',
'pipeline_authoring',
- 'epics_usage'
+ 'epics_usage',
+ 'secure'
)
end
end
@@ -93,7 +95,25 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
describe '.track_event' do
- context 'with feature flag set' do
+ context 'with redis_hll_tracking' do
+ it 'tracks the event when feature enabled' do
+ stub_feature_flags(redis_hll_tracking: true)
+
+ expect(Gitlab::Redis::HLL).to receive(:add)
+
+ described_class.track_event(weekly_event, values: 1)
+ end
+
+ it 'does not track the event with feature flag disabled' do
+ stub_feature_flags(redis_hll_tracking: false)
+
+ expect(Gitlab::Redis::HLL).not_to receive(:add)
+
+ described_class.track_event(weekly_event, values: 1)
+ end
+ end
+
+ context 'with event feature flag set' do
it 'tracks the event when feature enabled' do
stub_feature_flags(feature => true)
@@ -111,7 +131,7 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
end
end
- context 'with no feature flag set' do
+ context 'with no event feature flag set' do
it 'tracks the event' do
expect(Gitlab::Redis::HLL).to receive(:add)
@@ -289,6 +309,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
described_class.track_event(daily_event, values: entity4, time: 29.days.ago)
end
+ it 'returns 0 if there are no keys for the given events' do
+ expect(Gitlab::Redis::HLL).not_to receive(:count)
+ expect(described_class.unique_events(event_names: [weekly_event], start_date: Date.current, end_date: 4.weeks.ago)).to eq(-1)
+ end
+
it 'raise error if metrics are not in the same slot' do
expect do
described_class.unique_events(event_names: [compliance_slot_event, analytics_slot_event], start_date: 4.weeks.ago, end_date: Date.current)
@@ -508,6 +533,11 @@ RSpec.describe Gitlab::UsageDataCounters::HLLRedisCounter, :clean_gitlab_redis_s
expect { described_class.calculate_events_union(**time_range.merge(event_names: %w[event1_slot event4])) }.to raise_error described_class::SlotMismatch
expect { described_class.calculate_events_union(**time_range.merge(event_names: %w[event5_slot event3_slot])) }.to raise_error described_class::AggregationMismatch
end
+
+ it 'returns 0 if there are no keys for given events' do
+ expect(Gitlab::Redis::HLL).not_to receive(:count)
+ expect(described_class.calculate_events_union(event_names: %w[event1_slot event2_slot event3_slot], start_date: Date.current, end_date: 4.weeks.ago)).to eq(-1)
+ end
end
describe '.weekly_time_range' do
diff --git a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
index f8f6494b92e..1b73e5269d7 100644
--- a/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/issue_activity_unique_counter_spec.rb
@@ -3,9 +3,10 @@
require 'spec_helper'
RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_gitlab_redis_shared_state do
- let(:user1) { build(:user, id: 1) }
- let(:user2) { build(:user, id: 2) }
- let(:user3) { build(:user, id: 3) }
+ let_it_be(:user1) { build(:user, id: 1) }
+ let_it_be(:user2) { build(:user, id: 2) }
+ let_it_be(:user3) { build(:user, id: 3) }
+
let(:time) { Time.zone.now }
context 'for Issue title edit actions' do
@@ -272,10 +273,13 @@ RSpec.describe Gitlab::UsageDataCounters::IssueActivityUniqueCounter, :clean_git
described_class.track_issue_title_changed_action(author: user1)
described_class.track_issue_description_changed_action(author: user1)
described_class.track_issue_assignee_changed_action(author: user1)
- described_class.track_issue_title_changed_action(author: user2, time: time - 2.days)
- described_class.track_issue_title_changed_action(author: user3, time: time - 3.days)
- described_class.track_issue_description_changed_action(author: user3, time: time - 3.days)
- described_class.track_issue_assignee_changed_action(author: user3, time: time - 3.days)
+
+ travel_to(2.days.ago) do
+ described_class.track_issue_title_changed_action(author: user2)
+ described_class.track_issue_title_changed_action(author: user3)
+ described_class.track_issue_description_changed_action(author: user3)
+ described_class.track_issue_assignee_changed_action(author: user3)
+ end
events = Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(described_class::ISSUE_CATEGORY)
today_count = Gitlab::UsageDataCounters::HLLRedisCounter.unique_events(event_names: events, start_date: time, end_date: time)
diff --git a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
index 2df0f331f73..1940442d2ad 100644
--- a/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
+++ b/spec/lib/gitlab/usage_data_counters/quick_action_activity_unique_counter_spec.rb
@@ -115,6 +115,26 @@ RSpec.describe Gitlab::UsageDataCounters::QuickActionActivityUniqueCounter, :cle
end
end
+ context 'tracking spent' do
+ let(:quickaction_name) { 'spent' }
+
+ context 'adding time' do
+ let(:args) { '1d' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_spend_add' }
+ end
+ end
+
+ context 'removing time' do
+ let(:args) { '-1d' }
+
+ it_behaves_like 'a tracked quick action unique event' do
+ let(:action) { 'i_quickactions_spend_subtract' }
+ end
+ end
+ end
+
context 'tracking unassign' do
let(:quickaction_name) { 'unassign' }
diff --git a/spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb b/spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb
new file mode 100644
index 00000000000..32d1288c59c
--- /dev/null
+++ b/spec/lib/gitlab/usage_data_non_sql_metrics_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::UsageDataNonSqlMetrics do
+ let(:default_count) { Gitlab::UsageDataNonSqlMetrics::SQL_METRIC_DEFAULT }
+
+ describe '.count' do
+ it 'returns default value for count' do
+ expect(described_class.count(User)).to eq(default_count)
+ end
+ end
+
+ describe '.distinct_count' do
+ it 'returns default value for distinct count' do
+ expect(described_class.distinct_count(User)).to eq(default_count)
+ end
+ end
+
+ describe '.estimate_batch_distinct_count' do
+ it 'returns default value for estimate_batch_distinct_count' do
+ expect(described_class.estimate_batch_distinct_count(User)).to eq(default_count)
+ end
+ end
+
+ describe '.sum' do
+ it 'returns default value for sum' do
+ expect(described_class.sum(JiraImportState.finished, :imported_issues_count)).to eq(default_count)
+ end
+ end
+
+ describe '.histogram' do
+ it 'returns default value for histogram' do
+ expect(described_class.histogram(JiraImportState.finished, :imported_issues_count, buckets: [], bucket_size: 0)).to eq(default_count)
+ end
+ end
+
+ describe 'min/max methods' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:model, :result) do
+ User | nil
+ Issue | nil
+ Deployment | nil
+ Project | nil
+ end
+
+ with_them do
+ it 'returns nil' do
+ expect(described_class.minimum_id(model)).to eq(result)
+ expect(described_class.maximum_id(model)).to eq(result)
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 12eac643383..718ab3b2d95 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -11,12 +11,24 @@ RSpec.describe Gitlab::UsageDataQueries do
it 'returns the raw SQL' do
expect(described_class.count(User)).to start_with('SELECT COUNT("users"."id") FROM "users"')
end
+
+ it 'does not mix a nil column with keyword arguments' do
+ expect(described_class).to receive(:raw_sql).with(User, nil)
+
+ described_class.count(User, start: 1, finish: 2)
+ end
end
describe '.distinct_count' do
it 'returns the raw SQL' do
expect(described_class.distinct_count(Issue, :author_id)).to eq('SELECT COUNT(DISTINCT "issues"."author_id") FROM "issues"')
end
+
+ it 'does not mix a nil column with keyword arguments' do
+ expect(described_class).to receive(:raw_sql).with(Issue, nil, :distinct)
+
+ described_class.distinct_count(Issue, nil, start: 1, finish: 2)
+ end
end
describe '.redis_usage_data' do
@@ -46,4 +58,24 @@ RSpec.describe Gitlab::UsageDataQueries do
.to eq('SELECT (SELECT COUNT("users"."id") FROM "users") + (SELECT COUNT("issues"."id") FROM "issues")')
end
end
+
+ describe 'min/max methods' do
+ it 'returns nil' do
+ # user min/max
+ expect(described_class.minimum_id(User)).to eq(nil)
+ expect(described_class.maximum_id(User)).to eq(nil)
+
+ # issue min/max
+ expect(described_class.minimum_id(Issue)).to eq(nil)
+ expect(described_class.maximum_id(Issue)).to eq(nil)
+
+ # deployment min/max
+ expect(described_class.minimum_id(Deployment)).to eq(nil)
+ expect(described_class.maximum_id(Deployment)).to eq(nil)
+
+ # project min/max
+ expect(described_class.minimum_id(Project)).to eq(nil)
+ expect(described_class.maximum_id(Project)).to eq(nil)
+ end
+ end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index b1581bf02a6..01701f7aebd 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -167,7 +167,10 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
create(:key, user: user)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: true)
create(:project, creator: user, disable_overriding_approvers_per_merge_request: false)
- create(:remote_mirror, project: project)
+ create(:remote_mirror, project: project, enabled: true)
+ another_user = create(:user)
+ another_project = create(:project, :repository, creator: another_user)
+ create(:remote_mirror, project: another_project, enabled: false)
create(:snippet, author: user)
end
@@ -176,7 +179,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
keys: 2,
merge_requests: 2,
projects_with_disable_overriding_approvers_per_merge_request: 2,
- projects_without_disable_overriding_approvers_per_merge_request: 4,
+ projects_without_disable_overriding_approvers_per_merge_request: 6,
remote_mirrors: 2,
snippets: 2
)
@@ -185,7 +188,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
keys: 1,
merge_requests: 1,
projects_with_disable_overriding_approvers_per_merge_request: 1,
- projects_without_disable_overriding_approvers_per_merge_request: 2,
+ projects_without_disable_overriding_approvers_per_merge_request: 3,
remote_mirrors: 1,
snippets: 1
)
@@ -1288,6 +1291,7 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
'p_analytics_repo' => 123,
'i_analytics_cohorts' => 123,
'i_analytics_dev_ops_score' => 123,
+ 'i_analytics_dev_ops_adoption' => 123,
'i_analytics_instance_statistics' => 123,
'p_analytics_merge_request' => 123,
'g_analytics_merge_request' => 123,
@@ -1358,24 +1362,36 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
let(:categories) { ::Gitlab::UsageDataCounters::HLLRedisCounter.categories }
let(:ineligible_total_categories) do
- %w[source_code ci_secrets_management incident_management_alerts snippets terraform epics_usage]
+ %w[source_code ci_secrets_management incident_management_alerts snippets terraform incident_management_oncall secure]
end
- it 'has all known_events' do
- expect(subject).to have_key(:redis_hll_counters)
+ context 'with redis_hll_tracking feature enabled' do
+ it 'has all known_events' do
+ stub_feature_flags(redis_hll_tracking: true)
- expect(subject[:redis_hll_counters].keys).to match_array(categories)
+ expect(subject).to have_key(:redis_hll_counters)
- categories.each do |category|
- keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
+ expect(subject[:redis_hll_counters].keys).to match_array(categories)
- metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
+ categories.each do |category|
+ keys = ::Gitlab::UsageDataCounters::HLLRedisCounter.events_for_category(category)
- if ineligible_total_categories.exclude?(category)
- metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
+ metrics = keys.map { |key| "#{key}_weekly" } + keys.map { |key| "#{key}_monthly" }
+
+ if ineligible_total_categories.exclude?(category)
+ metrics.append("#{category}_total_unique_counts_weekly", "#{category}_total_unique_counts_monthly")
+ end
+
+ expect(subject[:redis_hll_counters][category].keys).to match_array(metrics)
end
+ end
+ end
+
+ context 'with redis_hll_tracking disabled' do
+ it 'does not have redis_hll_tracking key' do
+ stub_feature_flags(redis_hll_tracking: false)
- expect(subject[:redis_hll_counters][category].keys).to match_array(metrics)
+ expect(subject).not_to have_key(:redis_hll_counters)
end
end
end
diff --git a/spec/lib/gitlab/utils/lazy_attributes_spec.rb b/spec/lib/gitlab/utils/lazy_attributes_spec.rb
index dfffe70defb..1ebc9b0d711 100644
--- a/spec/lib/gitlab/utils/lazy_attributes_spec.rb
+++ b/spec/lib/gitlab/utils/lazy_attributes_spec.rb
@@ -13,8 +13,10 @@ RSpec.describe Gitlab::Utils::LazyAttributes do
def initialize
@number = -> { 1 }
- @reader_1, @reader_2 = 'reader_1', -> { 'reader_2' }
- @incorrect_type, @accessor_2 = -> { :incorrect_type }, -> { 'accessor_2' }
+ @reader_1 = 'reader_1'
+ @reader_2 = -> { 'reader_2' }
+ @incorrect_type = -> { :incorrect_type }
+ @accessor_2 = -> { 'accessor_2' }
end
end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 6e1904c43e1..11b2a12f228 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -187,6 +187,7 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#histogram' do
let_it_be(:projects) { create_list(:project, 3) }
+
let(:project1) { projects.first }
let(:project2) { projects.second }
let(:project3) { projects.third }
@@ -478,4 +479,22 @@ RSpec.describe Gitlab::Utils::UsageData do
expect { described_class.track_usage_event(unknown_event, value) }.to raise_error(Gitlab::UsageDataCounters::HLLRedisCounter::UnknownEvent)
end
end
+
+ describe 'min/max' do
+ let(:model) { double(:relation) }
+
+ it 'returns min from the model' do
+ allow(model).to receive(:minimum).and_return(2)
+ allow(model).to receive(:name).and_return('sample_min_model')
+
+ expect(described_class.minimum_id(model)).to eq(2)
+ end
+
+ it 'returns max from the model' do
+ allow(model).to receive(:maximum).and_return(100)
+ allow(model).to receive(:name).and_return('sample_max_model')
+
+ expect(described_class.maximum_id(model)).to eq(100)
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils_spec.rb b/spec/lib/gitlab/utils_spec.rb
index 665eebdfd9e..11dba610faf 100644
--- a/spec/lib/gitlab/utils_spec.rb
+++ b/spec/lib/gitlab/utils_spec.rb
@@ -162,7 +162,7 @@ RSpec.describe Gitlab::Utils do
describe '.nlbr' do
it 'replaces new lines with <br>' do
- expect(described_class.nlbr("<b>hello</b>\n<i>world</i>".freeze)).to eq("hello<br>world")
+ expect(described_class.nlbr("<b>hello</b>\n<i>world</i>")).to eq("hello<br>world")
end
end
@@ -192,6 +192,7 @@ RSpec.describe Gitlab::Utils do
expect(to_boolean('YeS')).to be(true)
expect(to_boolean('t')).to be(true)
expect(to_boolean('1')).to be(true)
+ expect(to_boolean(1)).to be(true)
expect(to_boolean('ON')).to be(true)
expect(to_boolean('FaLse')).to be(false)
@@ -199,6 +200,7 @@ RSpec.describe Gitlab::Utils do
expect(to_boolean('NO')).to be(false)
expect(to_boolean('n')).to be(false)
expect(to_boolean('0')).to be(false)
+ expect(to_boolean(0)).to be(false)
expect(to_boolean('oFF')).to be(false)
end
@@ -388,8 +390,8 @@ RSpec.describe Gitlab::Utils do
describe ".safe_downcase!" do
where(:str, :result) do
- "test".freeze | "test"
- "Test".freeze | "test"
+ "test" | "test"
+ "Test" | "test"
"test" | "test"
"Test" | "test"
end
diff --git a/spec/lib/gitlab/web_ide/config/entry/global_spec.rb b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
index 3e29bf89785..8dbe64af1c7 100644
--- a/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
+++ b/spec/lib/gitlab/web_ide/config/entry/global_spec.rb
@@ -83,6 +83,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Global do
expect(global.terminal_value).to eq({
tag_list: [],
yaml_variables: [],
+ job_variables: [],
options: {
before_script: ['ls'],
script: ['sleep 10s'],
diff --git a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
index 0df0f56f440..d6d0fc4224d 100644
--- a/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
+++ b/spec/lib/gitlab/web_ide/config/entry/terminal_spec.rb
@@ -132,7 +132,7 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Terminal do
{ before_script: %w[ls pwd],
script: 'sleep 100',
tags: ['webide'],
- image: 'ruby:2.5',
+ image: 'ruby:3.0',
services: ['mysql'],
variables: { KEY: 'value' } }
end
@@ -142,8 +142,9 @@ RSpec.describe Gitlab::WebIde::Config::Entry::Terminal do
.to eq(
tag_list: ['webide'],
yaml_variables: [{ key: 'KEY', value: 'value', public: true }],
+ job_variables: [{ key: 'KEY', value: 'value', public: true }],
options: {
- image: { name: "ruby:2.5" },
+ image: { name: "ruby:3.0" },
services: [{ name: "mysql" }],
before_script: %w[ls pwd],
script: ['sleep 100']
diff --git a/spec/lib/gitlab/word_diff/chunk_collection_spec.rb b/spec/lib/gitlab/word_diff/chunk_collection_spec.rb
index aa837f760c1..73e9ff3974a 100644
--- a/spec/lib/gitlab/word_diff/chunk_collection_spec.rb
+++ b/spec/lib/gitlab/word_diff/chunk_collection_spec.rb
@@ -41,4 +41,27 @@ RSpec.describe Gitlab::WordDiff::ChunkCollection do
expect(collection.content).to eq('')
end
end
+
+ describe '#marker_ranges' do
+ let(:chunks) do
+ [
+ Gitlab::WordDiff::Segments::Chunk.new(' Hello '),
+ Gitlab::WordDiff::Segments::Chunk.new('-World'),
+ Gitlab::WordDiff::Segments::Chunk.new('+GitLab'),
+ Gitlab::WordDiff::Segments::Chunk.new('+!!!')
+ ]
+ end
+
+ it 'returns marker ranges for every chunk with changes' do
+ chunks.each { |chunk| collection.add(chunk) }
+
+ expect(collection.marker_ranges).to eq(
+ [
+ Gitlab::MarkerRange.new(6, 10, mode: :deletion),
+ Gitlab::MarkerRange.new(11, 16, mode: :addition),
+ Gitlab::MarkerRange.new(17, 19, mode: :addition)
+ ]
+ )
+ end
+ end
end
diff --git a/spec/lib/gitlab/word_diff/parser_spec.rb b/spec/lib/gitlab/word_diff/parser_spec.rb
index 3aeefb57a02..e793e44fd45 100644
--- a/spec/lib/gitlab/word_diff/parser_spec.rb
+++ b/spec/lib/gitlab/word_diff/parser_spec.rb
@@ -36,15 +36,26 @@ RSpec.describe Gitlab::WordDiff::Parser do
aggregate_failures do
expect(diff_lines.count).to eq(7)
- expect(diff_lines.map(&:to_hash)).to match_array(
+ expect(diff_lines.map { |line| diff_line_attributes(line) }).to eq(
[
- a_hash_including(index: 0, old_pos: 1, new_pos: 1, text: '', type: nil),
- a_hash_including(index: 1, old_pos: 2, new_pos: 2, text: 'Unchanged line', type: nil),
- a_hash_including(index: 2, old_pos: 3, new_pos: 3, text: '', type: nil),
- a_hash_including(index: 3, old_pos: 4, new_pos: 4, text: 'Old changeNew addition unchanged content', type: nil),
- a_hash_including(index: 4, old_pos: 50, new_pos: 50, text: '@@ -50,14 +50,13 @@', type: 'match'),
- a_hash_including(index: 5, old_pos: 50, new_pos: 50, text: 'First change same same same_removed_added_end of the line', type: nil),
- a_hash_including(index: 6, old_pos: 51, new_pos: 51, text: '', type: nil)
+ { index: 0, old_pos: 1, new_pos: 1, text: '', type: nil, marker_ranges: [] },
+ { index: 1, old_pos: 2, new_pos: 2, text: 'Unchanged line', type: nil, marker_ranges: [] },
+ { index: 2, old_pos: 3, new_pos: 3, text: '', type: nil, marker_ranges: [] },
+ { index: 3, old_pos: 4, new_pos: 4, text: 'Old changeNew addition unchanged content', type: nil,
+ marker_ranges: [
+ Gitlab::MarkerRange.new(0, 9, mode: :deletion),
+ Gitlab::MarkerRange.new(10, 21, mode: :addition)
+ ] },
+
+ { index: 4, old_pos: 50, new_pos: 50, text: '@@ -50,14 +50,13 @@', type: 'match', marker_ranges: [] },
+ { index: 5, old_pos: 50, new_pos: 50, text: 'First change same same same_removed_added_end of the line', type: nil,
+ marker_ranges: [
+ Gitlab::MarkerRange.new(0, 11, mode: :addition),
+ Gitlab::MarkerRange.new(28, 35, mode: :deletion),
+ Gitlab::MarkerRange.new(36, 41, mode: :addition)
+ ] },
+
+ { index: 6, old_pos: 51, new_pos: 51, text: '', type: nil, marker_ranges: [] }
]
)
end
@@ -64,4 +75,17 @@ RSpec.describe Gitlab::WordDiff::Parser do
it { is_expected.to eq([]) }
end
end
+
+ private
+
+ def diff_line_attributes(diff_line)
+ {
+ index: diff_line.index,
+ old_pos: diff_line.old_pos,
+ new_pos: diff_line.new_pos,
+ text: diff_line.text,
+ type: diff_line.type,
+ marker_ranges: diff_line.marker_ranges
+ }
+ end
end
diff --git a/spec/lib/gitlab/workhorse_spec.rb b/spec/lib/gitlab/workhorse_spec.rb
index c22df5dd063..d40ecc7e04e 100644
--- a/spec/lib/gitlab/workhorse_spec.rb
+++ b/spec/lib/gitlab/workhorse_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Workhorse do
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
def decode_workhorse_header(array)
diff --git a/spec/lib/gitlab_spec.rb b/spec/lib/gitlab_spec.rb
index c5738ae730f..4df00eaa439 100644
--- a/spec/lib/gitlab_spec.rb
+++ b/spec/lib/gitlab_spec.rb
@@ -247,75 +247,117 @@ RSpec.describe Gitlab do
end
end
- describe '.ee?' do
+ describe 'ee? and jh?' do
before do
- stub_env('FOSS_ONLY', nil) # Make sure the ENV is clean
+ # Make sure the ENV is clean
+ stub_env('FOSS_ONLY', nil)
+ stub_env('EE_ONLY', nil)
+
described_class.instance_variable_set(:@is_ee, nil)
+ described_class.instance_variable_set(:@is_jh, nil)
end
after do
described_class.instance_variable_set(:@is_ee, nil)
+ described_class.instance_variable_set(:@is_jh, nil)
end
- context 'for EE' do
- before do
- root = Pathname.new('dummy')
- license_path = double(:path, exist?: true)
+ def stub_path(*paths, **arguments)
+ root = Pathname.new('dummy')
+ pathname = double(:path, **arguments)
- allow(described_class)
- .to receive(:root)
- .and_return(root)
+ allow(described_class)
+ .to receive(:root)
+ .and_return(root)
+ allow(root).to receive(:join)
+
+ paths.each do |path|
allow(root)
.to receive(:join)
- .with('ee/app/models/license.rb')
- .and_return(license_path)
+ .with(path)
+ .and_return(pathname)
end
+ end
- context 'when using FOSS_ONLY=1' do
+ describe '.ee?' do
+ context 'for EE' do
before do
- stub_env('FOSS_ONLY', '1')
+ stub_path('ee/app/models/license.rb', exist?: true)
end
- it 'returns not to be EE' do
- expect(described_class).not_to be_ee
+ context 'when using FOSS_ONLY=1' do
+ before do
+ stub_env('FOSS_ONLY', '1')
+ end
+
+ it 'returns not to be EE' do
+ expect(described_class).not_to be_ee
+ end
end
- end
- context 'when using FOSS_ONLY=0' do
- before do
- stub_env('FOSS_ONLY', '0')
+ context 'when using FOSS_ONLY=0' do
+ before do
+ stub_env('FOSS_ONLY', '0')
+ end
+
+ it 'returns to be EE' do
+ expect(described_class).to be_ee
+ end
end
- it 'returns to be EE' do
- expect(described_class).to be_ee
+ context 'when using default FOSS_ONLY' do
+ it 'returns to be EE' do
+ expect(described_class).to be_ee
+ end
end
end
- context 'when using default FOSS_ONLY' do
- it 'returns to be EE' do
- expect(described_class).to be_ee
+ context 'for CE' do
+ before do
+ stub_path('ee/app/models/license.rb', exist?: false)
+ end
+
+ it 'returns not to be EE' do
+ expect(described_class).not_to be_ee
end
end
end
- context 'for CE' do
- before do
- root = double(:path)
- license_path = double(:path, exists?: false)
+ describe '.jh?' do
+ context 'for JH' do
+ before do
+ stub_path(
+ 'ee/app/models/license.rb',
+ 'jh',
+ exist?: true)
+ end
- allow(described_class)
- .to receive(:root)
- .and_return(Pathname.new('dummy'))
+ context 'when using default FOSS_ONLY and EE_ONLY' do
+ it 'returns to be JH' do
+ expect(described_class).to be_jh
+ end
+ end
- allow(root)
- .to receive(:join)
- .with('ee/app/models/license.rb')
- .and_return(license_path)
- end
+ context 'when using FOSS_ONLY=1' do
+ before do
+ stub_env('FOSS_ONLY', '1')
+ end
+
+ it 'returns not to be JH' do
+ expect(described_class).not_to be_jh
+ end
+ end
+
+ context 'when using EE_ONLY=1' do
+ before do
+ stub_env('EE_ONLY', '1')
+ end
- it 'returns not to be EE' do
- expect(described_class).not_to be_ee
+ it 'returns not to be JH' do
+ expect(described_class).not_to be_jh
+ end
+ end
end
end
end
diff --git a/spec/initializers/kramdown_patch_spec.rb b/spec/lib/kramdown/kramdown_spec.rb
index 49dda9252bb..986a8d9959e 100644
--- a/spec/initializers/kramdown_patch_spec.rb
+++ b/spec/lib/kramdown/kramdown_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Kramdown patch for syntax highlighting formatters' do
+RSpec.describe 'Ensure kramdown detects invalid syntax highlighting formatters' do
subject { Kramdown::Document.new(options + "\n" + code).to_html }
let(:code) do
diff --git a/spec/lib/marginalia_spec.rb b/spec/lib/marginalia_spec.rb
index 2ee27fbe20c..040f70236c6 100644
--- a/spec/lib/marginalia_spec.rb
+++ b/spec/lib/marginalia_spec.rb
@@ -3,18 +3,28 @@
require 'spec_helper'
RSpec.describe 'Marginalia spec' do
- class MarginaliaTestController < ActionController::Base
+ class MarginaliaTestController < ApplicationController
+ skip_before_action :authenticate_user!, :check_two_factor_requirement
+
def first_user
User.first
render body: nil
end
+
+ private
+
+ [:auth_user, :current_user, :set_experimentation_subject_id_cookie, :signed_in?].each do |method|
+ define_method(method) { }
+ end
end
class MarginaliaTestJob
include Sidekiq::Worker
def perform
- User.first
+ Gitlab::ApplicationContext.with_context(caller_id: self.class.name) do
+ User.first
+ end
end
end
@@ -30,10 +40,9 @@ RSpec.describe 'Marginalia spec' do
let(:component_map) do
{
- "application" => "test",
- "controller" => "marginalia_test",
- "action" => "first_user",
- "correlation_id" => correlation_id
+ "application" => "test",
+ "endpoint_id" => "MarginaliaTestController#first_user",
+ "correlation_id" => correlation_id
}
end
@@ -47,6 +56,7 @@ RSpec.describe 'Marginalia spec' do
describe 'for Sidekiq worker jobs' do
around do |example|
with_sidekiq_server_middleware do |chain|
+ chain.add Labkit::Middleware::Sidekiq::Context::Server
chain.add Marginalia::SidekiqInstrumentation::Middleware
Marginalia.application_name = "sidekiq"
example.run
@@ -66,10 +76,10 @@ RSpec.describe 'Marginalia spec' do
let(:component_map) do
{
- "application" => "sidekiq",
- "job_class" => "MarginaliaTestJob",
- "correlation_id" => sidekiq_job['correlation_id'],
- "jid" => sidekiq_job['jid']
+ "application" => "sidekiq",
+ "endpoint_id" => "MarginaliaTestJob",
+ "correlation_id" => sidekiq_job['correlation_id'],
+ "jid" => sidekiq_job['jid']
}
end
@@ -80,19 +90,33 @@ RSpec.describe 'Marginalia spec' do
end
describe 'for ActionMailer delivery jobs' do
+ # We need to ensure that this runs through Sidekiq to take
+ # advantage of the middleware. There is a Rails bug that means we
+ # have to do some extra steps to make this happen:
+ # https://github.com/rails/rails/issues/37270#issuecomment-553927324
+ around do |example|
+ descendants = ActiveJob::Base.descendants + [ActiveJob::Base]
+ descendants.each(&:disable_test_adapter)
+ ActiveJob::Base.queue_adapter = :sidekiq
+
+ example.run
+
+ descendants.each { |a| a.queue_adapter = :test }
+ end
+
let(:delivery_job) { MarginaliaTestMailer.first_user.deliver_later }
let(:recorded) do
ActiveRecord::QueryRecorder.new do
- delivery_job.perform_now
+ Sidekiq::Worker.drain_all
end
end
let(:component_map) do
{
- "application" => "sidekiq",
- "jid" => delivery_job.job_id,
- "job_class" => delivery_job.arguments.first
+ "application" => "sidekiq",
+ "endpoint_id" => "ActionMailer::MailDeliveryJob",
+ "jid" => delivery_job.job_id
}
end
diff --git a/spec/lib/mattermost/command_spec.rb b/spec/lib/mattermost/command_spec.rb
index 26d1ec32232..0f2711e0b11 100644
--- a/spec/lib/mattermost/command_spec.rb
+++ b/spec/lib/mattermost/command_spec.rb
@@ -19,7 +19,7 @@ RSpec.describe Mattermost::Command do
trigger: 'gitlab' }
end
- subject { described_class.new(nil).create(params) }
+ subject { described_class.new(nil).create(params) } # rubocop:disable Rails/SaveBang
context 'for valid trigger word' do
before do
diff --git a/spec/lib/mattermost/session_spec.rb b/spec/lib/mattermost/session_spec.rb
index 93422b01ca7..67ccb48e3a7 100644
--- a/spec/lib/mattermost/session_spec.rb
+++ b/spec/lib/mattermost/session_spec.rb
@@ -39,7 +39,7 @@ RSpec.describe Mattermost::Session, type: :request do
context 'with oauth_uri' do
let!(:doorkeeper) do
- Doorkeeper::Application.create(
+ Doorkeeper::Application.create!(
name: 'GitLab Mattermost',
redirect_uri: "#{mattermost_url}/signup/gitlab/complete\n#{mattermost_url}/login/gitlab/complete",
scopes: '')
diff --git a/spec/lib/mattermost/team_spec.rb b/spec/lib/mattermost/team_spec.rb
index 0870114ca28..e3ef5ff5377 100644
--- a/spec/lib/mattermost/team_spec.rb
+++ b/spec/lib/mattermost/team_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Mattermost::Team do
end
describe '#create' do
- subject { described_class.new(nil).create(name: "devteam", display_name: "Dev Team", type: "O") }
+ subject { described_class.new(nil).create(name: "devteam", display_name: "Dev Team", type: "O") } # rubocop:disable Rails/SaveBang
context 'for a new team' do
let(:response) do
diff --git a/spec/lib/peek/views/active_record_spec.rb b/spec/lib/peek/views/active_record_spec.rb
index dad5a2bf461..9eeeca4de61 100644
--- a/spec/lib/peek/views/active_record_spec.rb
+++ b/spec/lib/peek/views/active_record_spec.rb
@@ -5,14 +5,16 @@ require 'spec_helper'
RSpec.describe Peek::Views::ActiveRecord, :request_store do
subject { Peek.views.find { |v| v.instance_of?(Peek::Views::ActiveRecord) } }
- let(:connection) { double(:connection) }
+ let(:connection_1) { double(:connection) }
+ let(:connection_2) { double(:connection) }
+ let(:connection_3) { double(:connection) }
let(:event_1) do
{
name: 'SQL',
sql: 'SELECT * FROM users WHERE id = 10',
cached: false,
- connection: connection
+ connection: connection_1
}
end
@@ -21,7 +23,7 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
name: 'SQL',
sql: 'SELECT * FROM users WHERE id = 10',
cached: true,
- connection: connection
+ connection: connection_2
}
end
@@ -30,12 +32,15 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
name: 'SQL',
sql: 'UPDATE users SET admin = true WHERE id = 10',
cached: false,
- connection: connection
+ connection: connection_3
}
end
before do
allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true)
+ allow(connection_1).to receive(:transaction_open?).and_return(false)
+ allow(connection_2).to receive(:transaction_open?).and_return(false)
+ allow(connection_3).to receive(:transaction_open?).and_return(true)
end
it 'subscribes and store data into peek views' do
@@ -46,22 +51,32 @@ RSpec.describe Peek::Views::ActiveRecord, :request_store do
end
expect(subject.results).to match(
- calls: '3 (1 cached)',
+ calls: 3,
+ summary: {
+ "Cached" => 1,
+ "In a transaction" => 1
+ },
duration: '6000.00ms',
warnings: ["active-record duration: 6000.0 over 3000"],
details: contain_exactly(
a_hash_including(
+ start: be_a(Time),
cached: '',
+ transaction: '',
duration: 1000.0,
sql: 'SELECT * FROM users WHERE id = 10'
),
a_hash_including(
- cached: 'cached',
+ start: be_a(Time),
+ cached: 'Cached',
+ transaction: '',
duration: 2000.0,
sql: 'SELECT * FROM users WHERE id = 10'
),
a_hash_including(
+ start: be_a(Time),
cached: '',
+ transaction: 'In a transaction',
duration: 3000.0,
sql: 'UPDATE users SET admin = true WHERE id = 10'
)
diff --git a/spec/lib/peek/views/external_http_spec.rb b/spec/lib/peek/views/external_http_spec.rb
index 98c4f771f33..18ae1326493 100644
--- a/spec/lib/peek/views/external_http_spec.rb
+++ b/spec/lib/peek/views/external_http_spec.rb
@@ -11,6 +11,10 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
allow(Gitlab::PerformanceBar).to receive(:enabled_for_request?).and_return(true)
end
+ around do |example|
+ freeze_time { example.run }
+ end
+
let(:event_1) do
{
method: 'POST', code: "200", duration: 0.03,
@@ -44,9 +48,9 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
end
it 'returns aggregated results' do
- subscriber.request(double(:event, payload: event_1))
- subscriber.request(double(:event, payload: event_2))
- subscriber.request(double(:event, payload: event_3))
+ subscriber.request(double(:event, payload: event_1, time: Time.current))
+ subscriber.request(double(:event, payload: event_2, time: Time.current))
+ subscriber.request(double(:event, payload: event_3, time: Time.current))
results = subject.results
expect(results[:calls]).to eq(3)
@@ -55,6 +59,7 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
expected = [
{
+ start: be_like_time(Time.current),
duration: 30.0,
label: "POST https://gitlab.com:80/api/v4/projects?current=true",
code: "Response status: 200",
@@ -63,6 +68,7 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
warnings: []
},
{
+ start: be_like_time(Time.current),
duration: 1300,
label: "POST http://gitlab.com:80/api/v4/projects/2/issues?current=true",
code: nil,
@@ -71,6 +77,7 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
warnings: ["1300.0 over 100"]
},
{
+ start: be_like_time(Time.current),
duration: 5.0,
label: "GET http://gitlab.com:80/api/v4/projects/2?current=true",
code: "Response status: 301",
@@ -81,7 +88,7 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
]
expect(
- results[:details].map { |data| data.slice(:duration, :label, :code, :proxy, :error, :warnings) }
+ results[:details].map { |data| data.slice(:start, :duration, :label, :code, :proxy, :error, :warnings) }
).to match_array(expected)
end
@@ -91,10 +98,11 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
end
it 'displays IPv4 in the label' do
- subscriber.request(double(:event, payload: event_1))
+ subscriber.request(double(:event, payload: event_1, time: Time.current))
expect(subject.results[:details]).to contain_exactly(
a_hash_including(
+ start: be_like_time(Time.current),
duration: 30.0,
label: "POST https://1.2.3.4:80/api/v4/projects?current=true",
code: "Response status: 200",
@@ -112,10 +120,11 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
end
it 'displays IPv6 in the label' do
- subscriber.request(double(:event, payload: event_1))
+ subscriber.request(double(:event, payload: event_1, time: Time.current))
expect(subject.results[:details]).to contain_exactly(
a_hash_including(
+ start: be_like_time(Time.current),
duration: 30.0,
label: "POST https://[2606:4700:90:0:f22e:fbec:5bed:a9b9]:80/api/v4/projects?current=true",
code: "Response status: 200",
@@ -133,10 +142,11 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
end
it 'converts query hash into a query string' do
- subscriber.request(double(:event, payload: event_1))
+ subscriber.request(double(:event, payload: event_1, time: Time.current))
expect(subject.results[:details]).to contain_exactly(
a_hash_including(
+ start: be_like_time(Time.current),
duration: 30.0,
label: "POST https://gitlab.com:80/api/v4/projects?current=true&item1=string&item2%5B%5D=1&item2%5B%5D=2",
code: "Response status: 200",
@@ -154,10 +164,11 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
end
it 'displays unknown in the label' do
- subscriber.request(double(:event, payload: event_1))
+ subscriber.request(double(:event, payload: event_1, time: Time.current))
expect(subject.results[:details]).to contain_exactly(
a_hash_including(
+ start: be_like_time(Time.current),
duration: 30.0,
label: "POST unknown",
code: "Response status: 200",
@@ -176,10 +187,11 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
end
it 'displays unknown in the label' do
- subscriber.request(double(:event, payload: event_1))
+ subscriber.request(double(:event, payload: event_1, time: Time.current))
expect(subject.results[:details]).to contain_exactly(
a_hash_including(
+ start: be_like_time(Time.current),
duration: 30.0,
label: "POST unknown",
code: "Response status: 200",
@@ -198,10 +210,11 @@ RSpec.describe Peek::Views::ExternalHttp, :request_store do
end
it 'displays unknown in the label' do
- subscriber.request(double(:event, payload: event_1))
+ subscriber.request(double(:event, payload: event_1, time: Time.current))
expect(subject.results[:details]).to contain_exactly(
a_hash_including(
+ start: be_like_time(Time.current),
duration: 30.0,
label: "POST unknown",
code: "Response status: 200",
diff --git a/spec/lib/rouge/formatters/html_gitlab_spec.rb b/spec/lib/rouge/formatters/html_gitlab_spec.rb
new file mode 100644
index 00000000000..d45c8c2a8c5
--- /dev/null
+++ b/spec/lib/rouge/formatters/html_gitlab_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Rouge::Formatters::HTMLGitlab do
+ describe '#format' do
+ subject { described_class.format(tokens, options) }
+
+ let(:lang) { 'ruby' }
+ let(:lexer) { Rouge::Lexer.find_fancy(lang) }
+ let(:tokens) { lexer.lex("def hello", continue: false) }
+ let(:options) { { tag: lang } }
+
+ it 'returns highlighted ruby code' do
+ code = %q{<span id="LC1" class="line" lang="ruby"><span class="k">def</span> <span class="nf">hello</span></span>}
+
+ is_expected.to eq(code)
+ end
+
+ context 'when options are empty' do
+ let(:options) { {} }
+
+ it 'returns highlighted code without language' do
+ code = %q{<span id="LC1" class="line" lang=""><span class="k">def</span> <span class="nf">hello</span></span>}
+
+ is_expected.to eq(code)
+ end
+ end
+
+ context 'when line number is provided' do
+ let(:options) { { tag: lang, line_number: 10 } }
+
+ it 'returns highlighted ruby code with correct line number' do
+ code = %q{<span id="LC10" class="line" lang="ruby"><span class="k">def</span> <span class="nf">hello</span></span>}
+
+ is_expected.to eq(code)
+ end
+ end
+ end
+end
diff --git a/spec/mailers/devise_mailer_spec.rb b/spec/mailers/devise_mailer_spec.rb
index c9dfee8255d..2634d7c722b 100644
--- a/spec/mailers/devise_mailer_spec.rb
+++ b/spec/mailers/devise_mailer_spec.rb
@@ -94,4 +94,36 @@ RSpec.describe DeviseMailer do
is_expected.to have_link(Gitlab.config.gitlab.url)
end
end
+
+ describe '#reset_password_instructions' do
+ subject { described_class.reset_password_instructions(user, 'faketoken') }
+
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+
+ it 'is sent to the user' do
+ is_expected.to deliver_to user.email
+ end
+
+ it 'has the correct subject' do
+ is_expected.to have_subject 'Reset password instructions'
+ end
+
+ it 'greets the user' do
+ is_expected.to have_body_text /Hello, #{user.name}!/
+ end
+
+ it 'includes the correct content' do
+ is_expected.to have_text /Someone, hopefully you, has requested to reset the password for your GitLab account on #{Gitlab.config.gitlab.url}/
+ is_expected.to have_body_text /If you did not perform this request, you can safely ignore this email./
+ is_expected.to have_body_text /Otherwise, click the link below to complete the process./
+ end
+
+ it 'includes a link to reset the password' do
+ is_expected.to have_link("Reset password", href: "#{Gitlab.config.gitlab.url}/users/password/edit?reset_password_token=faketoken")
+ end
+ end
end
diff --git a/spec/mailers/emails/in_product_marketing_spec.rb b/spec/mailers/emails/in_product_marketing_spec.rb
index e4157eaf5dc..25735e64bdf 100644
--- a/spec/mailers/emails/in_product_marketing_spec.rb
+++ b/spec/mailers/emails/in_product_marketing_spec.rb
@@ -13,6 +13,38 @@ RSpec.describe Emails::InProductMarketing do
describe '#in_product_marketing_email' do
using RSpec::Parameterized::TableSyntax
+ let(:track) { :create }
+ let(:series) { 0 }
+
+ subject { Notify.in_product_marketing_email(user.id, group.id, track, series) }
+
+ include_context 'gitlab email notification'
+
+ it 'sends to the right user with a link to unsubscribe' do
+ aggregate_failures do
+ expect(subject).to deliver_to(user.notification_email)
+ expect(subject).to have_body_text(profile_notifications_url)
+ end
+ end
+
+ context 'when on gitlab.com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'has custom headers' do
+ aggregate_failures do
+ expect(subject).to deliver_from(described_class::FROM_ADDRESS)
+ expect(subject).to reply_to(described_class::FROM_ADDRESS)
+ expect(subject).to have_header('X-Mailgun-Track', 'yes')
+ expect(subject).to have_header('X-Mailgun-Track-Clicks', 'yes')
+ expect(subject).to have_header('X-Mailgun-Track-Opens', 'yes')
+ expect(subject).to have_header('X-Mailgun-Tag', 'marketing')
+ expect(subject).to have_body_text('%tag_unsubscribe_url%')
+ end
+ end
+ end
+
where(:track, :series) do
:create | 0
:create | 1
@@ -29,8 +61,6 @@ RSpec.describe Emails::InProductMarketing do
end
with_them do
- subject { Notify.in_product_marketing_email(user.id, group.id, track, series) }
-
it 'has the correct subject and content' do
aggregate_failures do
is_expected.to have_subject(subject_line(track, series))
diff --git a/spec/mailers/emails/merge_requests_spec.rb b/spec/mailers/emails/merge_requests_spec.rb
index 0c0dae6d7e6..dea54f7315d 100644
--- a/spec/mailers/emails/merge_requests_spec.rb
+++ b/spec/mailers/emails/merge_requests_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe Emails::MergeRequests do
aggregate_failures do
is_expected.to have_referable_subject(merge_request, reply: true)
is_expected.to have_body_text(project_merge_request_path(project, merge_request))
- is_expected.to have_body_text('You have been mentioned in Merge Request')
+ is_expected.to have_body_text('You have been mentioned in merge request')
is_expected.to have_link(merge_request.to_reference, href: project_merge_request_url(merge_request.target_project, merge_request))
is_expected.to have_text_part_content(assignee.name)
is_expected.to have_text_part_content(reviewer.name)
@@ -55,9 +55,7 @@ RSpec.describe Emails::MergeRequests do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the merge request author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(merge_request.author.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(merge_request.author)
end
it 'has the correct subject and body' do
@@ -85,9 +83,7 @@ RSpec.describe Emails::MergeRequests do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
end
it 'has the correct subject and body' do
@@ -120,9 +116,7 @@ RSpec.describe Emails::MergeRequests do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the merge author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(merge_author.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(merge_author)
end
it 'has the correct subject and body' do
@@ -153,9 +147,7 @@ RSpec.describe Emails::MergeRequests do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
end
it 'has the correct subject and body' do
@@ -229,4 +221,10 @@ RSpec.describe Emails::MergeRequests do
it { expect(subject).to have_content('attachment has been truncated to avoid exceeding the maximum allowed attachment size of 15 MB.') }
end
end
+
+ def expect_sender(user)
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq("#{user.name} (@#{user.username})")
+ expect(sender.address).to eq(gitlab_sender)
+ end
end
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index a32e566fc90..8ac1f15d67e 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -212,6 +212,106 @@ RSpec.describe Emails::Profile do
end
end
+ describe 'SSH key notification' do
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be(:fingerprints) { ["aa:bb:cc:dd:ee:zz"] }
+
+ shared_examples 'is sent to the user' do
+ it { is_expected.to deliver_to user.email }
+ end
+
+ shared_examples 'has the correct subject' do |subject_text|
+ it { is_expected.to have_subject subject_text }
+ end
+
+ shared_examples 'has the correct body text' do |body_text|
+ it { is_expected.to have_body_text body_text }
+ end
+
+ shared_examples 'includes a link to ssh key page' do
+ it { is_expected.to have_body_text /#{profile_keys_url}/ }
+ end
+
+ shared_examples 'includes the email reason' do
+ it { is_expected.to have_body_text /You're receiving this email because of your account on localhost/ }
+ end
+
+ shared_examples 'valid use case' do
+ it_behaves_like 'an email sent from GitLab'
+ it_behaves_like 'it should not have Gmail Actions links'
+ it_behaves_like 'a user cannot unsubscribe through footer link'
+ it_behaves_like 'is sent to the user'
+ it_behaves_like 'includes a link to ssh key page'
+ it_behaves_like 'includes the email reason'
+ end
+
+ shared_examples 'does not send email' do
+ it do
+ expect { subject }.not_to change { ActionMailer::Base.deliveries.count }
+ end
+ end
+
+ shared_context 'block user' do
+ before do
+ user.block!
+ end
+ end
+
+ context 'notification email for expired ssh key' do
+ context 'when valid' do
+ subject { Notify.ssh_key_expired_email(user, fingerprints) }
+
+ include_examples 'valid use case'
+
+ it_behaves_like 'has the correct subject', /Your SSH key has expired/
+ it_behaves_like 'has the correct body text', /Your SSH keys with the following fingerprints has expired/
+ end
+
+ context 'when invalid' do
+ context 'when user does not exist' do
+ subject { Notify.ssh_key_expired_email(nil, fingerprints) }
+
+ it_behaves_like 'does not send email'
+ end
+
+ context 'when user is not active' do
+ subject { Notify.ssh_key_expired_email(user, fingerprints) }
+
+ include_context 'block user'
+
+ it_behaves_like 'does not send email'
+ end
+ end
+ end
+
+ context 'notification email for expiring ssh key' do
+ context 'when valid' do
+ subject { Notify.ssh_key_expiring_soon_email(user, fingerprints) }
+
+ include_examples 'valid use case'
+
+ it_behaves_like 'has the correct subject', /Your SSH key is expiring soon/
+ it_behaves_like 'has the correct body text', /Your SSH keys with the following fingerprints are scheduled to expire soon/
+ end
+
+ context 'when invalid' do
+ context 'when user does not exist' do
+ subject { Notify.ssh_key_expiring_soon_email(nil, fingerprints) }
+
+ it_behaves_like 'does not send email'
+ end
+
+ context 'when user is not active' do
+ subject { Notify.ssh_key_expiring_soon_email(user, fingerprints) }
+
+ include_context 'block user'
+
+ it_behaves_like 'does not send email'
+ end
+ end
+ end
+ end
+
describe 'user unknown sign in email' do
let_it_be(:user) { create(:user) }
let_it_be(:ip) { '169.0.0.1' }
diff --git a/spec/mailers/emails/projects_spec.rb b/spec/mailers/emails/projects_spec.rb
index a1f19a972f1..a5b89d16bc2 100644
--- a/spec/mailers/emails/projects_spec.rb
+++ b/spec/mailers/emails/projects_spec.rb
@@ -81,6 +81,7 @@ RSpec.describe Emails::Projects do
context 'with environment' do
let_it_be(:environment) { create(:environment, project: project) }
+
let(:payload) { { 'gitlab_environment_name' => environment.name } }
let(:metrics_url) { metrics_project_environment_url(project, environment) }
diff --git a/spec/mailers/emails/releases_spec.rb b/spec/mailers/emails/releases_spec.rb
index 6ee87724c83..287971d35a8 100644
--- a/spec/mailers/emails/releases_spec.rb
+++ b/spec/mailers/emails/releases_spec.rb
@@ -10,6 +10,7 @@ RSpec.describe Emails::Releases do
describe '#new_release_email' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
+
let(:release) { create(:release, project: project) }
subject { Notify.new_release_email(user.id, release) }
diff --git a/spec/mailers/emails/service_desk_spec.rb b/spec/mailers/emails/service_desk_spec.rb
index cb74194020d..57fa990d399 100644
--- a/spec/mailers/emails/service_desk_spec.rb
+++ b/spec/mailers/emails/service_desk_spec.rb
@@ -14,6 +14,7 @@ RSpec.describe Emails::ServiceDesk do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:email) { 'someone@gitlab.com' }
+
let(:template) { double(content: template_content) }
before_all do
diff --git a/spec/mailers/notify_spec.rb b/spec/mailers/notify_spec.rb
index 79358d3e40c..94a081ae0c9 100644
--- a/spec/mailers/notify_spec.rb
+++ b/spec/mailers/notify_spec.rb
@@ -69,11 +69,8 @@ RSpec.describe Notify do
it_behaves_like 'an email sent to a user'
it 'is sent to the assignee as the author' do
- sender = subject.header[:from].addrs.first
-
aggregate_failures do
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
expect(subject).to deliver_to(recipient.notification_email)
end
end
@@ -146,9 +143,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
end
it 'has the correct subject and body' do
@@ -187,9 +182,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
end
it 'has the correct subject and body' do
@@ -251,9 +244,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
end
it 'has the correct subject and body' do
@@ -389,9 +380,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
end
it 'has the correct subject and body' do
@@ -456,9 +445,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(current_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(current_user)
end
it 'has the correct subject and body' do
@@ -486,10 +473,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the push user' do
- sender = subject.header[:from].addrs[0]
-
- expect(sender.display_name).to eq(push_user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(push_user)
end
it 'has the correct subject and body' do
@@ -815,14 +799,14 @@ RSpec.describe Notify do
is_expected.to have_link('Join now', href: invite_url(project_member.invite_token, invite_type: Members::InviteEmailExperiment::INVITE_TYPE))
end
- it 'contains invite link for the avatar', :experiment do
+ it 'contains invite link for the avatar' do
stub_experiments('members/invite_email': :avatar)
is_expected.not_to have_content('You are invited!')
is_expected.not_to have_body_text 'What is a GitLab'
end
- it 'contains invite link for the avatar', :experiment do
+ it 'contains invite link for the avatar' do
stub_experiments('members/invite_email': :permission_info)
is_expected.not_to have_content('You are invited!')
@@ -1002,11 +986,8 @@ RSpec.describe Notify do
it_behaves_like 'it should have Gmail Actions links'
it 'is sent to the given recipient as the author' do
- sender = subject.header[:from].addrs[0]
-
aggregate_failures do
- expect(sender.display_name).to eq(note_author.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(note_author)
expect(subject).to deliver_to(recipient.notification_email)
end
end
@@ -1162,11 +1143,8 @@ RSpec.describe Notify do
it_behaves_like 'it should have Gmail Actions links'
it 'is sent to the given recipient as the author' do
- sender = subject.header[:from].addrs[0]
-
aggregate_failures do
- expect(sender.display_name).to eq(note_author.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(note_author)
expect(subject).to deliver_to(recipient.notification_email)
end
end
@@ -1221,12 +1199,6 @@ RSpec.describe Notify do
issue.issue_email_participants.create!(email: 'service.desk@example.com')
end
- def expect_sender(username)
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(username)
- expect(sender.address).to eq(gitlab_sender)
- end
-
describe 'thank you email' do
subject { described_class.service_desk_thank_you_email(issue.id) }
@@ -1244,14 +1216,16 @@ RSpec.describe Notify do
end
it 'uses service bot name by default' do
- expect_sender(User.support_bot.name)
+ expect_sender(User.support_bot)
end
context 'when custom outgoing name is set' do
let_it_be(:settings) { create(:service_desk_setting, project: project, outgoing_name: 'some custom name') }
it 'uses custom name in "from" header' do
- expect_sender('some custom name')
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq('some custom name')
+ expect(sender.address).to eq(gitlab_sender)
end
end
@@ -1259,7 +1233,7 @@ RSpec.describe Notify do
let_it_be(:settings) { create(:service_desk_setting, project: project, outgoing_name: '') }
it 'uses service bot name' do
- expect_sender(User.support_bot.name)
+ expect_sender(User.support_bot)
end
end
end
@@ -1276,7 +1250,7 @@ RSpec.describe Notify do
end
it 'uses author\'s name in "from" header' do
- expect_sender(first_note.author.name)
+ expect_sender(first_note.author)
end
it 'has the correct subject and body' do
@@ -1672,9 +1646,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(user)
end
it 'has the correct subject and body' do
@@ -1699,9 +1671,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(user)
end
it 'has the correct subject and body' do
@@ -1725,9 +1695,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(user)
end
it 'has the correct subject' do
@@ -1748,9 +1716,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(user)
end
it 'has the correct subject' do
@@ -1777,9 +1743,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(user)
end
it 'has the correct subject and body' do
@@ -1870,9 +1834,7 @@ RSpec.describe Notify do
it_behaves_like 'appearance header and footer not enabled'
it 'is sent as the author' do
- sender = subject.header[:from].addrs[0]
- expect(sender.display_name).to eq(user.name)
- expect(sender.address).to eq(gitlab_sender)
+ expect_sender(user)
end
it 'has the correct subject and body' do
@@ -1962,12 +1924,8 @@ RSpec.describe Notify do
it_behaves_like 'an unsubscribeable thread'
it 'is sent to the given recipient as the author' do
- sender = subject.header[:from].addrs[0]
-
aggregate_failures do
- expect(sender.display_name).to eq(review.author_name)
- expect(sender.address).to eq(gitlab_sender)
- expect(subject).to deliver_to(recipient.notification_email)
+ expect_sender(review.author)
end
end
@@ -2002,4 +1960,10 @@ RSpec.describe Notify do
end
end
end
+
+ def expect_sender(user)
+ sender = subject.header[:from].addrs[0]
+ expect(sender.display_name).to eq("#{user.name} (@#{user.username})")
+ expect(sender.address).to eq(gitlab_sender)
+ end
end
diff --git a/spec/migrations/20210226141517_dedup_issue_metrics_spec.rb b/spec/migrations/20210226141517_dedup_issue_metrics_spec.rb
new file mode 100644
index 00000000000..043884eb7b2
--- /dev/null
+++ b/spec/migrations/20210226141517_dedup_issue_metrics_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210226141517_dedup_issue_metrics.rb')
+
+RSpec.describe DedupIssueMetrics, :migration, schema: 20210205104425 do
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:issues) { table(:issues) }
+ let(:metrics) { table(:issue_metrics) }
+ let(:issue_params) { { title: 'title', project_id: project.id } }
+
+ let!(:namespace) { namespaces.create!(name: 'foo', path: 'foo') }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:issue_1) { issues.create!(issue_params) }
+ let!(:issue_2) { issues.create!(issue_params) }
+ let!(:issue_3) { issues.create!(issue_params) }
+
+ let!(:duplicated_metrics_1) { metrics.create!(issue_id: issue_1.id, first_mentioned_in_commit_at: 1.day.ago, first_added_to_board_at: 5.days.ago, updated_at: 2.months.ago) }
+ let!(:duplicated_metrics_2) { metrics.create!(issue_id: issue_1.id, first_mentioned_in_commit_at: Time.now, first_associated_with_milestone_at: Time.now, updated_at: 1.month.ago) }
+
+ let!(:duplicated_metrics_3) { metrics.create!(issue_id: issue_3.id, first_mentioned_in_commit_at: 1.day.ago, updated_at: 2.months.ago) }
+ let!(:duplicated_metrics_4) { metrics.create!(issue_id: issue_3.id, first_added_to_board_at: 1.day.ago, updated_at: 1.month.ago) }
+
+ let!(:non_duplicated_metrics) { metrics.create!(issue_id: issue_2.id, first_added_to_board_at: 2.days.ago) }
+
+ it 'deduplicates issue_metrics table' do
+ expect { migrate! }.to change { metrics.count }.from(5).to(3)
+ end
+
+ it 'merges `duplicated_metrics_1` with `duplicated_metrics_2`' do
+ migrate!
+
+ expect(metrics.where(id: duplicated_metrics_1.id)).not_to exist
+
+ merged_metrics = metrics.find_by(id: duplicated_metrics_2.id)
+
+ expect(merged_metrics).to be_present
+ expect(merged_metrics.first_mentioned_in_commit_at).to be_like_time(duplicated_metrics_2.first_mentioned_in_commit_at)
+ expect(merged_metrics.first_added_to_board_at).to be_like_time(duplicated_metrics_1.first_added_to_board_at)
+ end
+
+ it 'merges `duplicated_metrics_3` with `duplicated_metrics_4`' do
+ migrate!
+
+ expect(metrics.where(id: duplicated_metrics_3.id)).not_to exist
+
+ merged_metrics = metrics.find_by(id: duplicated_metrics_4.id)
+
+ expect(merged_metrics).to be_present
+ expect(merged_metrics.first_mentioned_in_commit_at).to be_like_time(duplicated_metrics_3.first_mentioned_in_commit_at)
+ expect(merged_metrics.first_added_to_board_at).to be_like_time(duplicated_metrics_4.first_added_to_board_at)
+ end
+
+ it 'does not change non duplicated records' do
+ expect { migrate! }.not_to change { non_duplicated_metrics.reload.attributes }
+ end
+
+ it 'does nothing when there are no metrics' do
+ metrics.delete_all
+
+ migrate!
+
+ expect(metrics.count).to eq(0)
+ end
+end
diff --git a/spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb b/spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb
new file mode 100644
index 00000000000..e1727cb2a1c
--- /dev/null
+++ b/spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210406144743_backfill_total_tuple_count_for_batched_migrations.rb')
+
+RSpec.describe BackfillTotalTupleCountForBatchedMigrations, :migration, schema: 20210406140057 do
+ let_it_be(:table_name) { 'projects' }
+
+ let_it_be(:migrations) { table(:batched_background_migrations) }
+
+ let_it_be(:migration) do
+ migrations.create!(
+ created_at: Time.now,
+ updated_at: Time.now,
+ min_value: 1,
+ max_value: 10_000,
+ batch_size: 1_000,
+ sub_batch_size: 100,
+ interval: 120,
+ status: 0,
+ job_class_name: 'Foo',
+ table_name: table_name,
+ column_name: :id,
+ total_tuple_count: nil
+ )
+ end
+
+ describe '#up' do
+ before do
+ expect(Gitlab::Database::PgClass).to receive(:for_table).with(table_name).and_return(estimate)
+ end
+
+ let(:estimate) { double('estimate', cardinality_estimate: 42) }
+
+ it 'updates total_tuple_count attribute' do
+ migrate!
+
+ migrations.all.each do |migration|
+ expect(migration.total_tuple_count).to eq(estimate.cardinality_estimate)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/20210413132500_reschedule_artifact_expiry_backfill_again_spec.rb b/spec/migrations/20210413132500_reschedule_artifact_expiry_backfill_again_spec.rb
new file mode 100644
index 00000000000..4f36a95f9cf
--- /dev/null
+++ b/spec/migrations/20210413132500_reschedule_artifact_expiry_backfill_again_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'post_migrate', '20210413132500_reschedule_artifact_expiry_backfill_again.rb')
+
+RSpec.describe RescheduleArtifactExpiryBackfillAgain, :migration do
+ let(:migration_class) { Gitlab::BackgroundMigration::BackfillArtifactExpiryDate }
+ let(:migration_name) { migration_class.to_s.demodulize }
+
+ before do
+ table(:namespaces).create!(id: 123, name: 'test_namespace', path: 'test_namespace')
+ table(:projects).create!(id: 123, name: 'sample_project', path: 'sample_project', namespace_id: 123)
+ end
+
+ it 'correctly schedules background migrations' do
+ first_artifact = create_artifact(job_id: 0, expire_at: nil, created_at: Date.new(2020, 06, 21))
+ second_artifact = create_artifact(job_id: 1, expire_at: nil, created_at: Date.new(2020, 06, 21))
+ create_artifact(job_id: 2, expire_at: Date.yesterday, created_at: Date.new(2020, 06, 21))
+ create_artifact(job_id: 3, expire_at: nil, created_at: Date.new(2020, 06, 23))
+
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ migrate!
+
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ expect(migration_name).to be_scheduled_migration_with_multiple_args(first_artifact.id, second_artifact.id)
+ end
+ end
+ end
+
+ private
+
+ def create_artifact(params)
+ table(:ci_builds).create!(id: params[:job_id], project_id: 123)
+ table(:ci_job_artifacts).create!(project_id: 123, file_type: 1, **params)
+ end
+end
diff --git a/spec/migrations/add_new_trail_plans_spec.rb b/spec/migrations/add_new_trail_plans_spec.rb
new file mode 100644
index 00000000000..8ba6da11ad1
--- /dev/null
+++ b/spec/migrations/add_new_trail_plans_spec.rb
@@ -0,0 +1,95 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe AddNewTrailPlans, :migration do
+ describe '#up' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return true
+ end
+
+ it 'creates 2 entries within the plans table' do
+ expect { migrate! }.to change { AddNewTrailPlans::Plan.count }.by 2
+ expect(AddNewTrailPlans::Plan.last(2).pluck(:name)).to match_array(%w(ultimate_trial premium_trial))
+ end
+
+ it 'creates 2 entries for plan limits' do
+ expect { migrate! }.to change { AddNewTrailPlans::PlanLimits.count }.by 2
+ end
+
+ context 'when the plan limits for gold and silver exists' do
+ before do
+ table(:plans).create!(id: 1, name: 'gold', title: 'Gold')
+ table(:plan_limits).create!(id: 1, plan_id: 1, storage_size_limit: 2000)
+ table(:plans).create!(id: 2, name: 'silver', title: 'Silver')
+ table(:plan_limits).create!(id: 2, plan_id: 2, storage_size_limit: 1000)
+ end
+
+ it 'duplicates the gold and silvers plan limits entries' do
+ migrate!
+
+ ultimate_plan_limits = AddNewTrailPlans::Plan.find_by(name: 'ultimate_trial').limits
+ expect(ultimate_plan_limits.storage_size_limit).to be 2000
+
+ premium_plan_limits = AddNewTrailPlans::Plan.find_by(name: 'premium_trial').limits
+ expect(premium_plan_limits.storage_size_limit).to be 1000
+ end
+ end
+
+ context 'when the instance is not SaaS' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return false
+ end
+
+ it 'does not create plans and plan limits and returns' do
+ expect { migrate! }.not_to change { AddNewTrailPlans::Plan.count }
+ expect { migrate! }.not_to change { AddNewTrailPlans::Plan.count }
+ end
+ end
+ end
+
+ describe '#down' do
+ before do
+ table(:plans).create!(id: 3, name: 'other')
+ table(:plan_limits).create!(plan_id: 3)
+ end
+
+ context 'when the instance is SaaS' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return true
+ end
+
+ it 'removes the newly added ultimate and premium trial entries' do
+ migrate!
+
+ expect { described_class.new.down }.to change { AddNewTrailPlans::Plan.count }.by(-2)
+ expect(AddNewTrailPlans::Plan.find_by(name: 'premium_trial')).to be_nil
+ expect(AddNewTrailPlans::Plan.find_by(name: 'ultimate_trial')).to be_nil
+
+ other_plan = AddNewTrailPlans::Plan.find_by(name: 'other')
+ expect(other_plan).to be_persisted
+ expect(AddNewTrailPlans::PlanLimits.count).to eq(1)
+ expect(AddNewTrailPlans::PlanLimits.first.plan_id).to eq(other_plan.id)
+ end
+ end
+
+ context 'when the instance is not SaaS' do
+ before do
+ allow(Gitlab).to receive(:dev_env_or_com?).and_return false
+ table(:plans).create!(id: 1, name: 'ultimate_trial', title: 'Ultimate Trial')
+ table(:plans).create!(id: 2, name: 'premium_trial', title: 'Premium Trial')
+ table(:plan_limits).create!(id: 1, plan_id: 1)
+ table(:plan_limits).create!(id: 2, plan_id: 2)
+ end
+
+ it 'does not delete plans and plan limits and returns' do
+ migrate!
+
+ expect { described_class.new.down }.not_to change { AddNewTrailPlans::Plan.count }
+ expect(AddNewTrailPlans::PlanLimits.count).to eq(3)
+ end
+ end
+ end
+end
diff --git a/spec/migrations/backfill_operations_feature_flags_active_spec.rb b/spec/migrations/backfill_operations_feature_flags_active_spec.rb
index 4ec2a4a2a92..e49b317d850 100644
--- a/spec/migrations/backfill_operations_feature_flags_active_spec.rb
+++ b/spec/migrations/backfill_operations_feature_flags_active_spec.rb
@@ -10,9 +10,7 @@ RSpec.describe BackfillOperationsFeatureFlagsActive do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
-
- project
+ projects.create!(namespace_id: namespace.id)
end
it 'executes successfully when there are no flags in the table' do
diff --git a/spec/migrations/backfill_operations_feature_flags_iid_spec.rb b/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
index bafe5830652..1ade08f657e 100644
--- a/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
+++ b/spec/migrations/backfill_operations_feature_flags_iid_spec.rb
@@ -10,9 +10,7 @@ RSpec.describe BackfillOperationsFeatureFlagsIid do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
-
- project
+ projects.create!(namespace_id: namespace.id)
end
it 'migrates successfully when there are no flags in the database' do
diff --git a/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb b/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
index 531c1dbb76a..268fadee0af 100644
--- a/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
+++ b/spec/migrations/clean_up_noteable_id_for_notes_on_commits_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe CleanUpNoteableIdForNotesOnCommits do
end
def dirty_notes_on_commits
- notes.where(noteable_type: 'Commit').where('noteable_id IS NOT NULL')
+ notes.where(noteable_type: 'Commit').where.not(noteable_id: nil)
end
def other_notes
diff --git a/spec/migrations/confirm_support_bot_user_spec.rb b/spec/migrations/confirm_support_bot_user_spec.rb
new file mode 100644
index 00000000000..f6bcab4aa7d
--- /dev/null
+++ b/spec/migrations/confirm_support_bot_user_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ConfirmSupportBotUser, :migration do
+ let(:users) { table(:users) }
+
+ context 'when support bot user is currently unconfirmed' do
+ let!(:support_bot) do
+ create_user!(
+ created_at: 2.days.ago,
+ user_type: User::USER_TYPES['support_bot']
+ )
+ end
+
+ it 'updates the `confirmed_at` attribute' do
+ expect { migrate! }.to change { support_bot.reload.confirmed_at }
+ end
+
+ it 'sets `confirmed_at` to be the same as their `created_at` attribute' do
+ migrate!
+
+ expect(support_bot.reload.confirmed_at).to eq(support_bot.created_at)
+ end
+ end
+
+ context 'when support bot user is already confirmed' do
+ let!(:confirmed_support_bot) do
+ create_user!(
+ user_type: User::USER_TYPES['support_bot'],
+ confirmed_at: 1.day.ago
+ )
+ end
+
+ it 'does not change their `confirmed_at` attribute' do
+ expect { migrate! }.not_to change { confirmed_support_bot.reload.confirmed_at }
+ end
+ end
+
+ context 'when support bot user created_at is null' do
+ let!(:support_bot) do
+ create_user!(
+ user_type: User::USER_TYPES['support_bot'],
+ confirmed_at: nil,
+ record_timestamps: false
+ )
+ end
+
+ it 'updates the `confirmed_at` attribute' do
+ expect { migrate! }.to change { support_bot.reload.confirmed_at }.from(nil)
+ end
+
+ it 'does not change the `created_at` attribute' do
+ expect { migrate!}.not_to change { support_bot.reload.created_at }.from(nil)
+ end
+ end
+
+ context 'with human users that are currently unconfirmed' do
+ let!(:unconfirmed_human) do
+ create_user!(
+ name: 'human',
+ email: 'human@example.com',
+ user_type: nil
+ )
+ end
+
+ it 'does not update their `confirmed_at` attribute' do
+ expect { migrate! }.not_to change { unconfirmed_human.reload.confirmed_at }
+ end
+ end
+
+ private
+
+ def create_user!(name: 'GitLab Support Bot', email: 'support@example.com', user_type:, created_at: Time.now, confirmed_at: nil, record_timestamps: true)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 0,
+ user_type: user_type,
+ confirmed_at: confirmed_at,
+ record_timestamps: record_timestamps
+ )
+ end
+end
diff --git a/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb b/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
index a58121fb708..50b5897220a 100644
--- a/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
+++ b/spec/migrations/delete_internal_ids_where_feature_flags_usage_spec.rb
@@ -10,9 +10,7 @@ RSpec.describe DeleteInternalIdsWhereFeatureFlagsUsage do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
- project = projects.create!(namespace_id: namespace.id)
-
- project
+ projects.create!(namespace_id: namespace.id)
end
it 'deletes feature flag rows from the internal_ids table' do
diff --git a/spec/migrations/delete_security_findings_without_uuid_spec.rb b/spec/migrations/delete_security_findings_without_uuid_spec.rb
new file mode 100644
index 00000000000..b32ea89f8aa
--- /dev/null
+++ b/spec/migrations/delete_security_findings_without_uuid_spec.rb
@@ -0,0 +1,35 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DeleteSecurityFindingsWithoutUuid do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:ci_pipelines) { table(:ci_pipelines) }
+ let(:ci_builds) { table(:ci_builds) }
+ let(:ci_artifacts) { table(:ci_job_artifacts) }
+ let(:scanners) { table(:vulnerability_scanners) }
+ let(:security_scans) { table(:security_scans) }
+ let(:security_findings) { table(:security_findings) }
+ let(:sast_file_type) { 5 }
+ let(:sast_scan_type) { 1 }
+
+ let(:user) { users.create!(email: 'test@gitlab.com', projects_limit: 5) }
+ let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
+ let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
+ let(:ci_pipeline) { ci_pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
+ let(:ci_build) { ci_builds.create!(commit_id: ci_pipeline.id, retried: false, type: 'Ci::Build') }
+ let(:ci_artifact) { ci_artifacts.create!(project_id: project.id, job_id: ci_build.id, file_type: sast_file_type, file_format: 1) }
+ let(:scanner) { scanners.create!(project_id: project.id, external_id: 'bandit', name: 'Bandit') }
+ let(:security_scan) { security_scans.create!(build_id: ci_build.id, scan_type: sast_scan_type) }
+
+ let!(:finding_1) { security_findings.create!(scan_id: security_scan.id, scanner_id: scanner.id, severity: 0, confidence: 0, project_fingerprint: Digest::SHA1.hexdigest(SecureRandom.uuid)) }
+ let!(:finding_2) { security_findings.create!(scan_id: security_scan.id, scanner_id: scanner.id, severity: 0, confidence: 0, project_fingerprint: Digest::SHA1.hexdigest(SecureRandom.uuid), uuid: SecureRandom.uuid) }
+
+ it 'successfully runs and does not schedule any job' do
+ expect { migrate! }.to change { described_class::SecurityFinding.count }.by(-1)
+ .and change { described_class::SecurityFinding.where(id: finding_1) }
+ end
+end
diff --git a/spec/migrations/migrate_bot_type_to_user_type_spec.rb b/spec/migrations/migrate_bot_type_to_user_type_spec.rb
index 2b85f2a7f69..fcd7f1ebcb8 100644
--- a/spec/migrations/migrate_bot_type_to_user_type_spec.rb
+++ b/spec/migrations/migrate_bot_type_to_user_type_spec.rb
@@ -15,6 +15,6 @@ RSpec.describe MigrateBotTypeToUserType, :migration do
migrate!
- expect(users.where('user_type IS NOT NULL').map(&:user_type)).to match_array([1, 2, 3])
+ expect(users.where.not(user_type: nil).map(&:user_type)).to match_array([1, 2, 3])
end
end
diff --git a/spec/migrations/migrate_elastic_index_settings_spec.rb b/spec/migrations/migrate_elastic_index_settings_spec.rb
new file mode 100644
index 00000000000..41483773903
--- /dev/null
+++ b/spec/migrations/migrate_elastic_index_settings_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require Rails.root.join('db', 'migrate', '20210324131727_migrate_elastic_index_settings.rb')
+
+RSpec.describe MigrateElasticIndexSettings do
+ let(:elastic_index_settings) { table(:elastic_index_settings) }
+ let(:application_settings) { table(:application_settings) }
+
+ context 'with application_settings present' do
+ before do
+ application_settings.create!(elasticsearch_replicas: 2, elasticsearch_shards: 15)
+ end
+
+ it 'migrates settings' do
+ migrate!
+
+ settings = elastic_index_settings.all
+
+ expect(settings.size).to eq 1
+
+ setting = settings.first
+
+ expect(setting.number_of_replicas).to eq(2)
+ expect(setting.number_of_shards).to eq(15)
+ end
+ end
+
+ context 'without application_settings present' do
+ it 'migrates settings' do
+ migrate!
+
+ settings = elastic_index_settings.all
+
+ expect(settings.size).to eq 1
+
+ setting = elastic_index_settings.first
+
+ expect(setting.number_of_replicas).to eq(1)
+ expect(setting.number_of_shards).to eq(5)
+ end
+ end
+end
diff --git a/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb b/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
index b2c36db2e1d..f066b9c90cd 100644
--- a/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
+++ b/spec/migrations/migrate_ops_feature_flags_scopes_target_user_ids_spec.rb
@@ -12,9 +12,7 @@ RSpec.describe MigrateOpsFeatureFlagsScopesTargetUserIds do
def setup
namespace = namespaces.create!(name: 'foo', path: 'foo')
project = projects.create!(namespace_id: namespace.id)
- flag = flags.create!(project_id: project.id, active: true, name: 'test_flag')
-
- flag
+ flags.create!(project_id: project.id, active: true, name: 'test_flag')
end
it 'migrates successfully when there are no scopes in the database' do
diff --git a/spec/migrations/move_container_registry_enabled_to_project_features_spec.rb b/spec/migrations/move_container_registry_enabled_to_project_features2_spec.rb
index c7b07f3ef37..11d43a36bc9 100644
--- a/spec/migrations/move_container_registry_enabled_to_project_features_spec.rb
+++ b/spec/migrations/move_container_registry_enabled_to_project_features2_spec.rb
@@ -1,9 +1,9 @@
# frozen_string_literal: true
require 'spec_helper'
-require Rails.root.join('db', 'post_migrate', '20210226120851_move_container_registry_enabled_to_project_features.rb')
+require Rails.root.join('db', 'post_migrate', '20210401131948_move_container_registry_enabled_to_project_features2.rb')
-RSpec.describe MoveContainerRegistryEnabledToProjectFeatures, :migration do
+RSpec.describe MoveContainerRegistryEnabledToProjectFeatures2, :migration do
let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
let!(:projects) do
@@ -30,6 +30,10 @@ RSpec.describe MoveContainerRegistryEnabledToProjectFeatures, :migration do
it 'schedules jobs for ranges of projects' do
migrate!
+ # Since track_jobs is true, each job should have an entry in the background_migration_jobs
+ # table.
+ expect(table(:background_migration_jobs).count).to eq(2)
+
expect(described_class::MIGRATION)
.to be_scheduled_delayed_migration(2.minutes, projects[0].id, projects[2].id)
diff --git a/spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb b/spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb
new file mode 100644
index 00000000000..1db52781956
--- /dev/null
+++ b/spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe PopulateDismissalInformationForVulnerabilities do
+ let(:users) { table(:users) }
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+
+ let(:existing_dismissed_at) { Time.now }
+ let(:states) { { detected: 1, dismissed: 2, resolved: 3, confirmed: 4 } }
+ let!(:namespace) { namespaces.create!(name: "foo", path: "bar") }
+ let!(:user_1) { users.create!(name: 'John Doe', email: 'john_doe+1@example.com', projects_limit: 5) }
+ let!(:user_2) { users.create!(name: 'John Doe', email: 'john_doe+2@example.com', projects_limit: 5) }
+ let!(:project) { projects.create!(namespace_id: namespace.id) }
+ let!(:vulnerability_params) do
+ {
+ project_id: project.id,
+ author_id: user_1.id,
+ title: 'Vulnerability',
+ severity: 5,
+ confidence: 5,
+ report_type: 5
+ }
+ end
+
+ let!(:detected_vulnerability) { vulnerabilities.create!(**vulnerability_params, state: states[:detected]) }
+ let!(:resolved_vulnerability) { vulnerabilities.create!(**vulnerability_params, state: states[:resolved]) }
+ let!(:confirmed_vulnerability) { vulnerabilities.create!(**vulnerability_params, state: states[:confirmed]) }
+
+ let!(:dismissed_vulnerability_1) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], updated_by_id: user_2.id) }
+ let!(:dismissed_vulnerability_2) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], last_edited_by_id: user_2.id) }
+ let!(:dismissed_vulnerability_3) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], dismissed_at: existing_dismissed_at, author_id: user_2.id) }
+ let!(:dismissed_vulnerability_4) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], dismissed_by_id: user_1.id, author_id: user_2.id) }
+ let!(:dismissed_vulnerability_5) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], dismissed_at: existing_dismissed_at, dismissed_by_id: user_1.id, updated_by_id: user_2.id) }
+
+ around do |example|
+ freeze_time { example.run }
+ end
+
+ it 'updates the dismissal information for vulnerabilities' do
+ expect { migrate! }.to change { dismissed_vulnerability_1.reload.dismissed_at }.from(nil).to(dismissed_vulnerability_1.updated_at)
+ .and change { dismissed_vulnerability_1.reload.dismissed_by_id }.from(nil).to(user_2.id)
+ .and change { dismissed_vulnerability_2.reload.dismissed_at }.from(nil).to(dismissed_vulnerability_2.updated_at)
+ .and change { dismissed_vulnerability_2.reload.dismissed_by_id }.from(nil).to(user_2.id)
+ .and change { dismissed_vulnerability_3.reload.dismissed_by_id }.from(nil).to(user_2.id)
+ .and change { dismissed_vulnerability_4.reload.dismissed_at }.from(nil).to(dismissed_vulnerability_4.updated_at)
+ .and not_change { dismissed_vulnerability_3.reload.dismissed_at }.from(existing_dismissed_at)
+ .and not_change { dismissed_vulnerability_4.reload.dismissed_by_id }.from(user_1.id)
+ .and not_change { dismissed_vulnerability_5.reload.dismissed_at }.from(existing_dismissed_at)
+ .and not_change { dismissed_vulnerability_5.reload.dismissed_by_id }.from(user_1.id)
+ .and not_change { detected_vulnerability.reload.dismissed_at }.from(nil)
+ .and not_change { detected_vulnerability.reload.dismissed_by_id }.from(nil)
+ .and not_change { resolved_vulnerability.reload.dismissed_at }.from(nil)
+ .and not_change { resolved_vulnerability.reload.dismissed_by_id }.from(nil)
+ .and not_change { confirmed_vulnerability.reload.dismissed_at }.from(nil)
+ .and not_change { confirmed_vulnerability.reload.dismissed_by_id }.from(nil)
+ end
+end
diff --git a/spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb b/spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb
new file mode 100644
index 00000000000..a28ca12a10d
--- /dev/null
+++ b/spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+require Rails.root.join('db', 'migrate', '20210325092215_add_not_valid_foreign_key_to_group_hooks.rb')
+
+RSpec.describe RemoveRecordsWithoutGroupFromWebhooksTable, schema: 20210330091751 do
+ let(:web_hooks) { table(:web_hooks) }
+ let(:groups) { table(:namespaces) }
+
+ before do
+ group = groups.create!(name: 'gitlab', path: 'gitlab-org')
+ web_hooks.create!(group_id: group.id, type: 'GroupHook')
+ web_hooks.create!(group_id: nil)
+
+ AddNotValidForeignKeyToGroupHooks.new.down
+ web_hooks.create!(group_id: non_existing_record_id, type: 'GroupHook')
+ AddNotValidForeignKeyToGroupHooks.new.up
+ end
+
+ it 'removes group hooks where the referenced group does not exist', :aggregate_failures do
+ expect { RemoveRecordsWithoutGroupFromWebhooksTable.new.up }.to change { web_hooks.count }.by(-1)
+ expect(web_hooks.where.not(group_id: groups.select(:id)).count).to eq(0)
+ expect(web_hooks.where.not(group_id: nil).count).to eq(1)
+ end
+end
diff --git a/spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb b/spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb
new file mode 100644
index 00000000000..1d35da528e4
--- /dev/null
+++ b/spec/migrations/schedule_migrate_pages_to_zip_storage_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require Rails.root.join('db', 'post_migrate', '20210302150310_schedule_migrate_pages_to_zip_storage.rb')
+
+RSpec.describe ScheduleMigratePagesToZipStorage, :sidekiq_might_not_need_inline, schema: 20201231133921 do
+ let(:migration_class) { described_class::MIGRATION }
+ let(:migration_name) { migration_class.to_s.demodulize }
+
+ let(:namespaces_table) { table(:namespaces) }
+ let(:projects_table) { table(:projects) }
+ let(:metadata_table) { table(:project_pages_metadata) }
+ let(:deployments_table) { table(:pages_deployments) }
+
+ let(:namespace) { namespaces_table.create!(path: "group", name: "group") }
+
+ def create_project_metadata(path, deployed, with_deployment)
+ project = projects_table.create!(path: path, namespace_id: namespace.id)
+
+ deployment_id = nil
+
+ if with_deployment
+ deployment_id = deployments_table.create!(project_id: project.id, file_store: 1, file: '1', file_count: 1, file_sha256: '123', size: 1).id
+ end
+
+ metadata_table.create!(project_id: project.id, deployed: deployed, pages_deployment_id: deployment_id)
+ end
+
+ it 'correctly schedules background migrations' do
+ Sidekiq::Testing.fake! do
+ freeze_time do
+ create_project_metadata("not-deployed-project", false, false)
+
+ first_id = create_project_metadata("project1", true, false).id
+ last_id = create_project_metadata("project2", true, false).id
+
+ create_project_metadata("project-with-deployment", true, true)
+
+ migrate!
+
+ expect(migration_name).to be_scheduled_delayed_migration(5.minutes, first_id, last_id)
+ expect(BackgroundMigrationWorker.jobs.size).to eq(1)
+ end
+ end
+ end
+end
diff --git a/spec/models/application_record_spec.rb b/spec/models/application_record_spec.rb
index 6a0f2290b4c..7e6ac351e68 100644
--- a/spec/models/application_record_spec.rb
+++ b/spec/models/application_record_spec.rb
@@ -93,13 +93,6 @@ RSpec.describe ApplicationRecord do
end
end
- describe '.at_most' do
- it 'limits the number of records returned' do
- create_list(:user, 3)
- expect(User.at_most(2).count).to eq(2)
- end
- end
-
describe '.where_exists' do
it 'produces a WHERE EXISTS query' do
user = create(:user)
@@ -107,4 +100,33 @@ RSpec.describe ApplicationRecord do
expect(User.where_exists(User.limit(1))).to eq([user])
end
end
+
+ describe '.with_fast_read_statement_timeout' do
+ context 'when the query runs faster than configured timeout' do
+ it 'executes the query without error' do
+ result = nil
+
+ expect do
+ described_class.with_fast_read_statement_timeout(100) do
+ result = described_class.connection.exec_query('SELECT 1')
+ end
+ end.not_to raise_error
+
+ expect(result).not_to be_nil
+ end
+ end
+
+ # This query hangs for 10ms and then gets cancelled. As there is no
+ # other way to test the timeout for sure, 10ms of waiting seems to be
+ # reasonable!
+ context 'when the query runs longer than configured timeout' do
+ it 'cancels the query and raises an exception' do
+ expect do
+ described_class.with_fast_read_statement_timeout(10) do
+ described_class.connection.exec_query('SELECT pg_sleep(0.1)')
+ end
+ end.to raise_error(ActiveRecord::QueryCanceled)
+ end
+ end
+ end
end
diff --git a/spec/models/audit_event_archived_spec.rb b/spec/models/audit_event_archived_spec.rb
deleted file mode 100644
index 43a2e8434b0..00000000000
--- a/spec/models/audit_event_archived_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe AuditEventArchived do
- let(:source_table) { AuditEvent }
- let(:destination_table) { described_class }
-
- it 'has the same columns as the source table' do
- column_names_from_source_table = column_names(source_table)
- column_names_from_destination_table = column_names(destination_table)
-
- expect(column_names_from_destination_table).to match_array(column_names_from_source_table)
- end
-
- it 'has the same null constraints as the source table' do
- constraints_from_source_table = null_constraints(source_table)
- constraints_from_destination_table = null_constraints(destination_table)
-
- expect(constraints_from_destination_table.to_a).to match_array(constraints_from_source_table.to_a)
- end
-
- it 'inserts the same record as the one in the source table', :aggregate_failures do
- expect { create(:audit_event) }.to change { destination_table.count }.by(1)
-
- event_from_source_table = source_table.connection.select_one(
- "SELECT * FROM #{source_table.table_name} ORDER BY created_at desc LIMIT 1"
- )
- event_from_destination_table = destination_table.connection.select_one(
- "SELECT * FROM #{destination_table.table_name} ORDER BY created_at desc LIMIT 1"
- )
-
- expect(event_from_destination_table).to eq(event_from_source_table)
- end
-
- def column_names(table)
- table.connection.select_all(<<~SQL)
- SELECT c.column_name
- FROM information_schema.columns c
- WHERE c.table_name = '#{table.table_name}'
- SQL
- end
-
- def null_constraints(table)
- table.connection.select_all(<<~SQL)
- SELECT c.column_name, c.is_nullable
- FROM information_schema.columns c
- WHERE c.table_name = '#{table.table_name}'
- AND c.column_name != 'created_at'
- SQL
- end
-end
diff --git a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
index cd885d312dc..803614d90a5 100644
--- a/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
+++ b/spec/models/blob_viewer/gitlab_ci_yml_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe BlobViewer::GitlabCiYml do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:data) { File.read(Rails.root.join('spec/support/gitlab_stubs/gitlab_ci.yml')) }
let(:blob) { fake_blob(path: '.gitlab-ci.yml', data: data) }
let(:sha) { sample_commit.id }
diff --git a/spec/models/blob_viewer/metrics_dashboard_yml_spec.rb b/spec/models/blob_viewer/metrics_dashboard_yml_spec.rb
index 84dfc5186a8..8d5c7ce84f6 100644
--- a/spec/models/blob_viewer/metrics_dashboard_yml_spec.rb
+++ b/spec/models/blob_viewer/metrics_dashboard_yml_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe BlobViewer::MetricsDashboardYml do
include RepoHelpers
let_it_be(:project) { create(:project, :repository) }
+
let(:blob) { fake_blob(path: '.gitlab/dashboards/custom-dashboard.yml', data: data) }
let(:sha) { sample_commit.id }
let(:data) { fixture_file('lib/gitlab/metrics/dashboard/sample_dashboard.yml') }
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index 17ab4d5954c..652ea431696 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -125,68 +125,4 @@ RSpec.describe BulkImports::Entity, type: :model do
end
end
end
-
- describe "#update_tracker_for" do
- let(:entity) { create(:bulk_import_entity) }
-
- it "inserts new tracker when it does not exist" do
- expect do
- entity.update_tracker_for(relation: :relation, has_next_page: false)
- end.to change(BulkImports::Tracker, :count).by(1)
-
- tracker = entity.trackers.last
-
- expect(tracker.relation).to eq('relation')
- expect(tracker.has_next_page).to eq(false)
- expect(tracker.next_page).to eq(nil)
- end
-
- it "updates the tracker if it already exist" do
- create(
- :bulk_import_tracker,
- relation: :relation,
- has_next_page: false,
- entity: entity
- )
-
- expect do
- entity.update_tracker_for(relation: :relation, has_next_page: true, next_page: 'nextPage')
- end.not_to change(BulkImports::Tracker, :count)
-
- tracker = entity.trackers.last
-
- expect(tracker.relation).to eq('relation')
- expect(tracker.has_next_page).to eq(true)
- expect(tracker.next_page).to eq('nextPage')
- end
- end
-
- describe "#has_next_page?" do
- it "queries for the given relation if it has more pages to be fetched" do
- entity = create(:bulk_import_entity)
- create(
- :bulk_import_tracker,
- relation: :relation,
- has_next_page: false,
- entity: entity
- )
-
- expect(entity.has_next_page?(:relation)).to eq(false)
- end
- end
-
- describe "#next_page_for" do
- it "queries for the next page of the given relation" do
- entity = create(:bulk_import_entity)
- create(
- :bulk_import_tracker,
- relation: :relation,
- has_next_page: false,
- next_page: 'nextPage',
- entity: entity
- )
-
- expect(entity.next_page_for(:relation)).to eq('nextPage')
- end
- end
end
diff --git a/spec/models/bulk_imports/stage_spec.rb b/spec/models/bulk_imports/stage_spec.rb
new file mode 100644
index 00000000000..7765fd4c5c4
--- /dev/null
+++ b/spec/models/bulk_imports/stage_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::Stage do
+ let(:pipelines) do
+ if Gitlab.ee?
+ [
+ [0, BulkImports::Groups::Pipelines::GroupPipeline],
+ [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
+ [1, BulkImports::Groups::Pipelines::MembersPipeline],
+ [1, BulkImports::Groups::Pipelines::LabelsPipeline],
+ [1, BulkImports::Groups::Pipelines::MilestonesPipeline],
+ [1, BulkImports::Groups::Pipelines::BadgesPipeline],
+ [1, 'BulkImports::Groups::Pipelines::IterationsPipeline'.constantize],
+ [2, 'BulkImports::Groups::Pipelines::EpicsPipeline'.constantize],
+ [3, 'BulkImports::Groups::Pipelines::EpicAwardEmojiPipeline'.constantize],
+ [3, 'BulkImports::Groups::Pipelines::EpicEventsPipeline'.constantize],
+ [4, BulkImports::Groups::Pipelines::EntityFinisher]
+ ]
+ else
+ [
+ [0, BulkImports::Groups::Pipelines::GroupPipeline],
+ [1, BulkImports::Groups::Pipelines::SubgroupEntitiesPipeline],
+ [1, BulkImports::Groups::Pipelines::MembersPipeline],
+ [1, BulkImports::Groups::Pipelines::LabelsPipeline],
+ [1, BulkImports::Groups::Pipelines::MilestonesPipeline],
+ [1, BulkImports::Groups::Pipelines::BadgesPipeline],
+ [2, BulkImports::Groups::Pipelines::EntityFinisher]
+ ]
+ end
+ end
+
+ describe '.pipelines' do
+ it 'list all the pipelines with their stage number, ordered by stage' do
+ expect(described_class.pipelines).to match_array(pipelines)
+ end
+ end
+
+ describe '.pipeline_exists?' do
+ it 'returns true when the given pipeline name exists in the pipelines list' do
+ expect(described_class.pipeline_exists?(BulkImports::Groups::Pipelines::GroupPipeline)).to eq(true)
+ expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::GroupPipeline')).to eq(true)
+ end
+
+ it 'returns false when the given pipeline name exists in the pipelines list' do
+ expect(described_class.pipeline_exists?('BulkImports::Groups::Pipelines::InexistentPipeline')).to eq(false)
+ end
+ end
+end
diff --git a/spec/models/bulk_imports/tracker_spec.rb b/spec/models/bulk_imports/tracker_spec.rb
index 77896105959..0f00aeb9c1d 100644
--- a/spec/models/bulk_imports/tracker_spec.rb
+++ b/spec/models/bulk_imports/tracker_spec.rb
@@ -26,4 +26,60 @@ RSpec.describe BulkImports::Tracker, type: :model do
end
end
end
+
+ describe '.stage_running?' do
+ it 'returns true if there is any unfinished pipeline in the given stage' do
+ tracker = create(:bulk_import_tracker)
+
+ expect(described_class.stage_running?(tracker.entity.id, 0))
+ .to eq(true)
+ end
+
+ it 'returns false if there are no unfinished pipeline in the given stage' do
+ tracker = create(:bulk_import_tracker, :finished)
+
+ expect(described_class.stage_running?(tracker.entity.id, 0))
+ .to eq(false)
+ end
+ end
+
+ describe '.next_pipeline_trackers_for' do
+ let_it_be(:entity) { create(:bulk_import_entity) }
+ let_it_be(:stage_0_tracker) { create(:bulk_import_tracker, :finished, entity: entity) }
+
+ it 'returns empty when all the stages pipelines are finished' do
+ expect(described_class.next_pipeline_trackers_for(entity.id))
+ .to eq([])
+ end
+
+ it 'returns the not started pipeline trackers from the minimum stage number' do
+ stage_1_tracker = create(:bulk_import_tracker, entity: entity, stage: 1)
+ stage_2_tracker = create(:bulk_import_tracker, entity: entity, stage: 2)
+
+ expect(described_class.next_pipeline_trackers_for(entity.id))
+ .to include(stage_1_tracker)
+
+ expect(described_class.next_pipeline_trackers_for(entity.id))
+ .not_to include(stage_2_tracker)
+ end
+ end
+
+ describe '#pipeline_class' do
+ it 'returns the pipeline class' do
+ pipeline_class = BulkImports::Stage.pipelines.first[1]
+ tracker = create(:bulk_import_tracker, pipeline_name: pipeline_class)
+
+ expect(tracker.pipeline_class).to eq(pipeline_class)
+ end
+
+ it 'raises an error when the pipeline is not valid' do
+ tracker = create(:bulk_import_tracker, pipeline_name: 'InexistingPipeline')
+
+ expect { tracker.pipeline_class }
+ .to raise_error(
+ NameError,
+ "'InexistingPipeline' is not a valid BulkImport Pipeline"
+ )
+ end
+ end
end
diff --git a/spec/models/ci/artifact_blob_spec.rb b/spec/models/ci/artifact_blob_spec.rb
index 44f895cc1c5..c00f46683b9 100644
--- a/spec/models/ci/artifact_blob_spec.rb
+++ b/spec/models/ci/artifact_blob_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ci::ArtifactBlob do
let_it_be(:project) { create(:project, :public) }
let_it_be(:build) { create(:ci_build, :artifacts, project: project) }
+
let(:entry) { build.artifacts_metadata_entry('other_artifacts_0.1.2/another-subdirectory/banana_sample.gif') }
subject { described_class.new(entry) }
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index f3029598b02..db956b26b6b 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Ci::Bridge do
CI_PROJECT_PATH_SLUG CI_PROJECT_NAMESPACE CI_PROJECT_ROOT_NAMESPACE
CI_PIPELINE_IID CI_CONFIG_PATH CI_PIPELINE_SOURCE CI_COMMIT_MESSAGE
CI_COMMIT_TITLE CI_COMMIT_DESCRIPTION CI_COMMIT_REF_PROTECTED
- CI_COMMIT_TIMESTAMP
+ CI_COMMIT_TIMESTAMP CI_COMMIT_AUTHOR
]
expect(bridge.scoped_variables.map { |v| v[:key] }).to include(*variables)
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index 5b07bd8923f..339dffa507f 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -585,6 +585,68 @@ RSpec.describe Ci::Build do
is_expected.to be_falsey
end
end
+
+ context 'with runners_cached_states feature flag enabled' do
+ before do
+ stub_feature_flags(runners_cached_states: true)
+ end
+
+ it 'caches the result in Redis' do
+ expect(Rails.cache).to receive(:fetch).with(['has-online-runners', build.id], expires_in: 1.minute)
+
+ build.any_runners_online?
+ end
+ end
+
+ context 'with runners_cached_states feature flag disabled' do
+ before do
+ stub_feature_flags(runners_cached_states: false)
+ end
+
+ it 'does not cache' do
+ expect(Rails.cache).not_to receive(:fetch).with(['has-online-runners', build.id], expires_in: 1.minute)
+
+ build.any_runners_online?
+ end
+ end
+ end
+
+ describe '#any_runners_available?' do
+ subject { build.any_runners_available? }
+
+ context 'when no runners' do
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when there are runners' do
+ let!(:runner) { create(:ci_runner, :project, projects: [build.project]) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'with runners_cached_states feature flag enabled' do
+ before do
+ stub_feature_flags(runners_cached_states: true)
+ end
+
+ it 'caches the result in Redis' do
+ expect(Rails.cache).to receive(:fetch).with(['has-available-runners', build.project.id], expires_in: 1.minute)
+
+ build.any_runners_available?
+ end
+ end
+
+ context 'with runners_cached_states feature flag disabled' do
+ before do
+ stub_feature_flags(runners_cached_states: false)
+ end
+
+ it 'does not cache' do
+ expect(Rails.cache).not_to receive(:fetch).with(['has-available-runners', build.project.id], expires_in: 1.minute)
+
+ build.any_runners_available?
+ end
+ end
end
describe '#artifacts?' do
@@ -821,45 +883,6 @@ RSpec.describe Ci::Build do
{ cache: [{ key: "key", paths: ["public"], policy: "pull-push" }] }
end
- context 'with multiple_cache_per_job FF disabled' do
- before do
- stub_feature_flags(multiple_cache_per_job: false)
- end
- let(:options) { { cache: { key: "key", paths: ["public"], policy: "pull-push" } } }
-
- subject { build.cache }
-
- context 'when build has cache' do
- before do
- allow(build).to receive(:options).and_return(options)
- end
-
- context 'when project has jobs_cache_index' do
- before do
- allow_any_instance_of(Project).to receive(:jobs_cache_index).and_return(1)
- end
-
- it { is_expected.to be_an(Array).and all(include(key: "key-1")) }
- end
-
- context 'when project does not have jobs_cache_index' do
- before do
- allow_any_instance_of(Project).to receive(:jobs_cache_index).and_return(nil)
- end
-
- it { is_expected.to eq([options[:cache]]) }
- end
- end
-
- context 'when build does not have cache' do
- before do
- allow(build).to receive(:options).and_return({})
- end
-
- it { is_expected.to eq([]) }
- end
- end
-
subject { build.cache }
context 'when build has cache' do
@@ -1174,6 +1197,8 @@ RSpec.describe Ci::Build do
end
describe 'state transition as a deployable' do
+ subject { build.send(event) }
+
let!(:build) { create(:ci_build, :with_deployment, :start_review_app, project: project, pipeline: pipeline) }
let(:deployment) { build.deployment }
let(:environment) { deployment.environment }
@@ -1188,54 +1213,78 @@ RSpec.describe Ci::Build do
expect(environment.name).to eq('review/master')
end
- context 'when transits to running' do
- before do
- build.run!
+ shared_examples_for 'avoid deadlock' do
+ it 'executes UPDATE in the right order' do
+ recorded = ActiveRecord::QueryRecorder.new { subject }
+
+ index_for_build = recorded.log.index { |l| l.include?("UPDATE \"ci_builds\"") }
+ index_for_deployment = recorded.log.index { |l| l.include?("UPDATE \"deployments\"") }
+
+ expect(index_for_build).to be < index_for_deployment
end
+ end
+
+ context 'when transits to running' do
+ let(:event) { :run! }
+
+ it_behaves_like 'avoid deadlock'
it 'transits deployment status to running' do
+ subject
+
expect(deployment).to be_running
end
end
context 'when transits to success' do
+ let(:event) { :success! }
+
before do
allow(Deployments::UpdateEnvironmentWorker).to receive(:perform_async)
allow(Deployments::ExecuteHooksWorker).to receive(:perform_async)
- build.success!
end
+ it_behaves_like 'avoid deadlock'
+
it 'transits deployment status to success' do
+ subject
+
expect(deployment).to be_success
end
end
context 'when transits to failed' do
- before do
- build.drop!
- end
+ let(:event) { :drop! }
+
+ it_behaves_like 'avoid deadlock'
it 'transits deployment status to failed' do
+ subject
+
expect(deployment).to be_failed
end
end
context 'when transits to skipped' do
- before do
- build.skip!
- end
+ let(:event) { :skip! }
+
+ it_behaves_like 'avoid deadlock'
it 'transits deployment status to skipped' do
+ subject
+
expect(deployment).to be_skipped
end
end
context 'when transits to canceled' do
- before do
- build.cancel!
- end
+ let(:event) { :cancel! }
+
+ it_behaves_like 'avoid deadlock'
it 'transits deployment status to canceled' do
+ subject
+
expect(deployment).to be_canceled
end
end
@@ -2500,6 +2549,7 @@ RSpec.describe Ci::Build do
{ key: 'CI_COMMIT_DESCRIPTION', value: pipeline.git_commit_description, public: true, masked: false },
{ key: 'CI_COMMIT_REF_PROTECTED', value: (!!pipeline.protected_ref?).to_s, public: true, masked: false },
{ key: 'CI_COMMIT_TIMESTAMP', value: pipeline.git_commit_timestamp, public: true, masked: false },
+ { key: 'CI_COMMIT_AUTHOR', value: pipeline.git_author_full_text, public: true, masked: false },
{ key: 'CI_BUILD_REF', value: build.sha, public: true, masked: false },
{ key: 'CI_BUILD_BEFORE_SHA', value: build.before_sha, public: true, masked: false },
{ key: 'CI_BUILD_REF_NAME', value: build.ref, public: true, masked: false },
@@ -3620,10 +3670,10 @@ RSpec.describe Ci::Build do
end
describe 'state transition when build fails' do
- let(:service) { MergeRequests::AddTodoWhenBuildFailsService.new(project, user) }
+ let(:service) { ::MergeRequests::AddTodoWhenBuildFailsService.new(project, user) }
before do
- allow(MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).and_return(service)
+ allow(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).and_return(service)
allow(service).to receive(:close)
end
@@ -3708,7 +3758,7 @@ RSpec.describe Ci::Build do
subject.drop!
end
- it 'creates a todo' do
+ it 'creates a todo async', :sidekiq_inline do
project.add_developer(user)
expect_next_instance_of(TodoService) do |todo_service|
@@ -3741,6 +3791,7 @@ RSpec.describe Ci::Build do
describe '.matches_tag_ids' do
let_it_be(:build, reload: true) { create(:ci_build, project: project, user: user) }
+
let(:tag_ids) { ::ActsAsTaggableOn::Tag.named_any(tag_list).ids }
subject { described_class.where(id: build).matches_tag_ids(tag_ids) }
@@ -4192,6 +4243,7 @@ RSpec.describe Ci::Build do
describe '#artifacts_metadata_entry' do
let_it_be(:build) { create(:ci_build, project: project) }
+
let(:path) { 'other_artifacts_0.1.2/another-subdirectory/banana_sample.gif' }
around do |example|
diff --git a/spec/models/ci/build_trace_chunk_spec.rb b/spec/models/ci/build_trace_chunk_spec.rb
index 3d728b9335e..12bc5d9aa3c 100644
--- a/spec/models/ci/build_trace_chunk_spec.rb
+++ b/spec/models/ci/build_trace_chunk_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Ci::BuildTraceChunk, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
let_it_be(:build) { create(:ci_build, :running) }
+
let(:chunk_index) { 0 }
let(:data_store) { :redis }
let(:raw_data) { nil }
diff --git a/spec/models/ci/daily_build_group_report_result_spec.rb b/spec/models/ci/daily_build_group_report_result_spec.rb
index 4e96ec7cecb..acc87c61036 100644
--- a/spec/models/ci/daily_build_group_report_result_spec.rb
+++ b/spec/models/ci/daily_build_group_report_result_spec.rb
@@ -86,6 +86,7 @@ RSpec.describe Ci::DailyBuildGroupReportResult do
describe 'scopes' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
+
let(:recent_build_group_report_result) { create(:ci_daily_build_group_report_result, project: project, group: group) }
let(:old_build_group_report_result) do
create(:ci_daily_build_group_report_result, date: 1.week.ago, project: project)
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 796947be4c8..cdb123573f1 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -195,6 +195,22 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe '#archived_trace_exists?' do
+ subject { artifact.archived_trace_exists? }
+
+ context 'when the file exists' do
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when the file does not exist' do
+ before do
+ artifact.file.remove!
+ end
+
+ it { is_expected.to be_falsy }
+ end
+ end
+
describe '.for_sha' do
let(:first_pipeline) { create(:ci_pipeline) }
let(:second_pipeline) { create(:ci_pipeline, project: first_pipeline.project, sha: Digest::SHA1.hexdigest(SecureRandom.hex)) }
diff --git a/spec/models/ci/pipeline_schedule_spec.rb b/spec/models/ci/pipeline_schedule_spec.rb
index cec3b544e50..3e5fbbfe823 100644
--- a/spec/models/ci/pipeline_schedule_spec.rb
+++ b/spec/models/ci/pipeline_schedule_spec.rb
@@ -90,6 +90,18 @@ RSpec.describe Ci::PipelineSchedule do
end
end
+ describe '.owned_by' do
+ let(:user) { create(:user) }
+ let!(:owned_pipeline_schedule) { create(:ci_pipeline_schedule, owner: user) }
+ let!(:other_pipeline_schedule) { create(:ci_pipeline_schedule) }
+
+ subject { described_class.owned_by(user) }
+
+ it 'returns owned pipeline schedules' do
+ is_expected.to eq([owned_pipeline_schedule])
+ end
+ end
+
describe '#set_next_run_at' do
let(:pipeline_schedule) { create(:ci_pipeline_schedule, :nightly) }
let(:ideal_next_run_at) { pipeline_schedule.send(:ideal_next_run_from, Time.zone.now) }
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index d57a39d133f..b7f5811e945 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
it { is_expected.to respond_to :git_author_name }
it { is_expected.to respond_to :git_author_email }
+ it { is_expected.to respond_to :git_author_full_text }
it { is_expected.to respond_to :short_sha }
it { is_expected.to delegate_method(:full_path).to(:project).with_prefix }
@@ -426,6 +427,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
subject { pipeline.legacy_detached_merge_request_pipeline? }
let_it_be(:merge_request) { create(:merge_request) }
+
let(:ref) { 'feature' }
let(:target_sha) { nil }
@@ -819,6 +821,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
CI_COMMIT_DESCRIPTION
CI_COMMIT_REF_PROTECTED
CI_COMMIT_TIMESTAMP
+ CI_COMMIT_AUTHOR
CI_BUILD_REF
CI_BUILD_BEFORE_SHA
CI_BUILD_REF_NAME
@@ -830,6 +833,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
let_it_be(:assignees) { create_list(:user, 2) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:labels) { create_list(:label, 2) }
+
let(:merge_request) do
create(:merge_request, :simple,
source_project: project,
@@ -1274,6 +1278,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe 'state machine' do
let_it_be_with_reload(:pipeline) { create(:ci_empty_pipeline, :created) }
+
let(:current) { Time.current.change(usec: 0) }
let(:build) { create_build('build1', queued_at: 0) }
let(:build_b) { create_build('build2', queued_at: 0) }
@@ -2277,6 +2282,35 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
)
end
end
+
+ context 'when method is scoped' do
+ let!(:commit_123_ref_master_parent_pipeline) do
+ create(
+ :ci_pipeline,
+ sha: '123',
+ ref: 'master',
+ project: project
+ )
+ end
+
+ let!(:commit_123_ref_master_child_pipeline) do
+ create(
+ :ci_pipeline,
+ sha: '123',
+ ref: 'master',
+ project: project,
+ child_of: commit_123_ref_master_parent_pipeline
+ )
+ end
+
+ it 'returns the latest pipeline after applying the scope' do
+ result = described_class.ci_sources.latest_pipeline_per_commit(%w[123], 'master')
+
+ expect(result).to match(
+ '123' => commit_123_ref_master_parent_pipeline
+ )
+ end
+ end
end
describe '.latest_successful_ids_per_project' do
@@ -2325,6 +2359,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
subject { pipeline.reload.status }
let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
+
let(:build) { create(:ci_build, :created, pipeline: pipeline, name: 'test') }
context 'on waiting for resource' do
@@ -2633,6 +2668,37 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
expect(latest_status).to eq %w(canceled canceled)
end
end
+
+ context 'preloading relations' do
+ let(:pipeline1) { create(:ci_empty_pipeline, :created) }
+ let(:pipeline2) { create(:ci_empty_pipeline, :created) }
+
+ before do
+ create(:ci_build, :pending, pipeline: pipeline1)
+ create(:generic_commit_status, :pending, pipeline: pipeline1)
+
+ create(:ci_build, :pending, pipeline: pipeline2)
+ create(:ci_build, :pending, pipeline: pipeline2)
+ create(:generic_commit_status, :pending, pipeline: pipeline2)
+ create(:generic_commit_status, :pending, pipeline: pipeline2)
+ create(:generic_commit_status, :pending, pipeline: pipeline2)
+ end
+
+ it 'preloads relations for each build to avoid N+1 queries' do
+ control1 = ActiveRecord::QueryRecorder.new do
+ pipeline1.cancel_running
+ end
+
+ control2 = ActiveRecord::QueryRecorder.new do
+ pipeline2.cancel_running
+ end
+
+ extra_update_queries = 3 # transition ... => :canceled
+ extra_generic_commit_status_validation_queries = 2 # name_uniqueness_across_types
+
+ expect(control2.count).to eq(control1.count + extra_update_queries + extra_generic_commit_status_validation_queries)
+ end
+ end
end
describe '#retry_failed' do
@@ -2688,6 +2754,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe '#execute_hooks' do
let_it_be(:pipeline) { create(:ci_empty_pipeline, :created) }
+
let!(:build_a) { create_build('a', 0) }
let!(:build_b) { create_build('b', 0) }
@@ -3353,6 +3420,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe '#build_with_artifacts_in_self_and_descendants' do
let_it_be(:pipeline) { create(:ci_pipeline) }
+
let!(:build) { create(:ci_build, name: 'test', pipeline: pipeline) }
let(:child_pipeline) { create(:ci_pipeline, child_of: pipeline) }
let!(:child_build) { create(:ci_build, :artifacts, name: 'test', pipeline: child_pipeline) }
@@ -3780,6 +3848,26 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ describe '#uses_needs?' do
+ let_it_be(:pipeline) { create(:ci_pipeline) }
+
+ context 'when the scheduling type is `dag`' do
+ it 'returns true' do
+ create(:ci_build, pipeline: pipeline, scheduling_type: :dag)
+
+ expect(pipeline.uses_needs?).to eq(true)
+ end
+ end
+
+ context 'when the scheduling type is nil or stage' do
+ it 'returns false' do
+ create(:ci_build, pipeline: pipeline, scheduling_type: :stage)
+
+ expect(pipeline.uses_needs?).to eq(false)
+ end
+ end
+ end
+
describe '#total_size' do
let(:pipeline) { create(:ci_pipeline) }
let!(:build_job1) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
@@ -3814,6 +3902,16 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
pipeline.drop
end
end
+
+ context 'with failure_reason' do
+ let(:pipeline) { create(:ci_pipeline, :running) }
+ let(:failure_reason) { 'config_error' }
+ let(:counter) { Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc') }
+
+ it 'increments the counter with the failure_reason' do
+ expect { pipeline.drop!(failure_reason) }.to change { counter.get(reason: failure_reason) }.by(1)
+ end
+ end
end
end
@@ -3843,6 +3941,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
describe '#find_stage_by_name' do
let_it_be(:pipeline) { create(:ci_pipeline) }
+
let(:stage_name) { 'test' }
let(:stage) do
@@ -4128,6 +4227,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
subject { pipeline.base_and_ancestors(same_project: same_project) }
let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+
let(:same_project) { false }
context 'when pipeline is not child nor parent' do
@@ -4164,6 +4264,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when pipeline is a child of a child pipeline' do
let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+
let(:ancestor) { create(:ci_pipeline) }
let(:parent) { create(:ci_pipeline) }
@@ -4179,6 +4280,7 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
context 'when pipeline is a triggered pipeline' do
let_it_be(:pipeline) { create(:ci_pipeline, :created) }
+
let(:upstream) { create(:ci_pipeline, project: create(:project)) }
before do
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index ff3551d2a18..ffe0b0d0b19 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -353,6 +353,7 @@ RSpec.describe Ci::Runner do
using RSpec::Parameterized::TableSyntax
let_it_be(:pipeline) { create(:ci_pipeline) }
+
let(:build) { create(:ci_build, pipeline: pipeline) }
let(:runner_project) { build.project }
let(:runner) { create(:ci_runner, :project, projects: [runner_project], tag_list: tag_list, run_untagged: run_untagged) }
diff --git a/spec/models/ci/stage_spec.rb b/spec/models/ci/stage_spec.rb
index 0afc491dc73..e46d9189c86 100644
--- a/spec/models/ci/stage_spec.rb
+++ b/spec/models/ci/stage_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::Stage, :models do
let_it_be(:pipeline) { create(:ci_empty_pipeline) }
+
let(:stage) { create(:ci_stage_entity, pipeline: pipeline, project: pipeline.project) }
it_behaves_like 'having unique enum values'
@@ -27,6 +28,18 @@ RSpec.describe Ci::Stage, :models do
end
end
+ describe '.by_name' do
+ it 'finds stages by name' do
+ a = create(:ci_stage_entity, name: 'a')
+ b = create(:ci_stage_entity, name: 'b')
+ c = create(:ci_stage_entity, name: 'c')
+
+ expect(described_class.by_name('a')).to contain_exactly(a)
+ expect(described_class.by_name('b')).to contain_exactly(b)
+ expect(described_class.by_name(%w[a c])).to contain_exactly(a, c)
+ end
+ end
+
describe '#status' do
context 'when stage is pending' do
let(:stage) { create(:ci_stage_entity, status: 'pending') }
diff --git a/spec/models/ci/test_case_failure_spec.rb b/spec/models/ci/test_case_failure_spec.rb
deleted file mode 100644
index 34f89b663ed..00000000000
--- a/spec/models/ci/test_case_failure_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::TestCaseFailure do
- describe 'relationships' do
- it { is_expected.to belong_to(:build) }
- it { is_expected.to belong_to(:test_case) }
- end
-
- describe 'validations' do
- subject { build(:ci_test_case_failure) }
-
- it { is_expected.to validate_presence_of(:test_case) }
- it { is_expected.to validate_presence_of(:build) }
- it { is_expected.to validate_presence_of(:failed_at) }
- end
-
- describe '.recent_failures_count' do
- let_it_be(:project) { create(:project) }
-
- subject(:recent_failures) do
- described_class.recent_failures_count(
- project: project,
- test_case_keys: test_case_keys
- )
- end
-
- context 'when test case failures are within the date range and are for the test case keys' do
- let(:tc_1) { create(:ci_test_case, project: project) }
- let(:tc_2) { create(:ci_test_case, project: project) }
- let(:test_case_keys) { [tc_1.key_hash, tc_2.key_hash] }
-
- before do
- create_list(:ci_test_case_failure, 3, test_case: tc_1, failed_at: 1.day.ago)
- create_list(:ci_test_case_failure, 2, test_case: tc_2, failed_at: 3.days.ago)
- end
-
- it 'returns the number of failures for each test case key hash for the past 14 days by default' do
- expect(recent_failures).to eq(
- tc_1.key_hash => 3,
- tc_2.key_hash => 2
- )
- end
- end
-
- context 'when test case failures are within the date range but are not for the test case keys' do
- let(:tc) { create(:ci_test_case, project: project) }
- let(:test_case_keys) { ['some-other-key-hash'] }
-
- before do
- create(:ci_test_case_failure, test_case: tc, failed_at: 1.day.ago)
- end
-
- it 'excludes them from the count' do
- expect(recent_failures[tc.key_hash]).to be_nil
- end
- end
-
- context 'when test case failures are not within the date range but are for the test case keys' do
- let(:tc) { create(:ci_test_case, project: project) }
- let(:test_case_keys) { [tc.key_hash] }
-
- before do
- create(:ci_test_case_failure, test_case: tc, failed_at: 15.days.ago)
- end
-
- it 'excludes them from the count' do
- expect(recent_failures[tc.key_hash]).to be_nil
- end
- end
- end
-end
diff --git a/spec/models/ci/test_case_spec.rb b/spec/models/ci/test_case_spec.rb
deleted file mode 100644
index 45311e285a6..00000000000
--- a/spec/models/ci/test_case_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::TestCase do
- describe 'relationships' do
- it { is_expected.to belong_to(:project) }
- it { is_expected.to have_many(:test_case_failures) }
- end
-
- describe 'validations' do
- subject { build(:ci_test_case) }
-
- it { is_expected.to validate_presence_of(:project) }
- it { is_expected.to validate_presence_of(:key_hash) }
- end
-
- describe '.find_or_create_by_batch' do
- it 'finds or creates records for the given test case keys', :aggregate_failures do
- project = create(:project)
- existing_tc = create(:ci_test_case, project: project)
- new_key = Digest::SHA256.hexdigest(SecureRandom.hex)
- keys = [existing_tc.key_hash, new_key]
-
- result = described_class.find_or_create_by_batch(project, keys)
-
- expect(result.map(&:key_hash)).to match_array([existing_tc.key_hash, new_key])
- expect(result).to all(be_persisted)
- end
- end
-end
diff --git a/spec/models/ci/unit_test_failure_spec.rb b/spec/models/ci/unit_test_failure_spec.rb
new file mode 100644
index 00000000000..f9b8c66b603
--- /dev/null
+++ b/spec/models/ci/unit_test_failure_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UnitTestFailure do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:build) }
+ it { is_expected.to belong_to(:unit_test) }
+ end
+
+ describe 'validations' do
+ subject { build(:ci_unit_test_failure) }
+
+ it { is_expected.to validate_presence_of(:unit_test) }
+ it { is_expected.to validate_presence_of(:build) }
+ it { is_expected.to validate_presence_of(:failed_at) }
+ end
+
+ describe '.recent_failures_count' do
+ let_it_be(:project) { create(:project) }
+
+ subject(:recent_failures) do
+ described_class.recent_failures_count(
+ project: project,
+ unit_test_keys: unit_test_keys
+ )
+ end
+
+ context 'when unit test failures are within the date range and are for the unit test keys' do
+ let(:test_1) { create(:ci_unit_test, project: project) }
+ let(:test_2) { create(:ci_unit_test, project: project) }
+ let(:unit_test_keys) { [test_1.key_hash, test_2.key_hash] }
+
+ before do
+ create_list(:ci_unit_test_failure, 3, unit_test: test_1, failed_at: 1.day.ago)
+ create_list(:ci_unit_test_failure, 2, unit_test: test_2, failed_at: 3.days.ago)
+ end
+
+ it 'returns the number of failures for each unit test key hash for the past 14 days by default' do
+ expect(recent_failures).to eq(
+ test_1.key_hash => 3,
+ test_2.key_hash => 2
+ )
+ end
+ end
+
+ context 'when unit test failures are within the date range but are not for the unit test keys' do
+ let(:test) { create(:ci_unit_test, project: project) }
+ let(:unit_test_keys) { ['some-other-key-hash'] }
+
+ before do
+ create(:ci_unit_test_failure, unit_test: test, failed_at: 1.day.ago)
+ end
+
+ it 'excludes them from the count' do
+ expect(recent_failures[test.key_hash]).to be_nil
+ end
+ end
+
+ context 'when unit test failures are not within the date range but are for the unit test keys' do
+ let(:test) { create(:ci_unit_test, project: project) }
+ let(:unit_test_keys) { [test.key_hash] }
+
+ before do
+ create(:ci_unit_test_failure, unit_test: test, failed_at: 15.days.ago)
+ end
+
+ it 'excludes them from the count' do
+ expect(recent_failures[test.key_hash]).to be_nil
+ end
+ end
+ end
+end
diff --git a/spec/models/ci/unit_test_spec.rb b/spec/models/ci/unit_test_spec.rb
new file mode 100644
index 00000000000..2207a362be3
--- /dev/null
+++ b/spec/models/ci/unit_test_spec.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::UnitTest do
+ describe 'relationships' do
+ it { is_expected.to belong_to(:project) }
+ it { is_expected.to have_many(:unit_test_failures) }
+ end
+
+ describe 'validations' do
+ subject { build(:ci_unit_test) }
+
+ it { is_expected.to validate_presence_of(:project) }
+ it { is_expected.to validate_presence_of(:key_hash) }
+ it { is_expected.to validate_presence_of(:name) }
+ it { is_expected.to validate_presence_of(:suite_name) }
+ end
+
+ describe '.find_or_create_by_batch' do
+ let(:project) { create(:project) }
+
+ it 'finds or creates records for the given unit test keys', :aggregate_failures do
+ existing_test = create(:ci_unit_test, project: project, suite_name: 'rspec', name: 'Math#sum adds numbers')
+ new_key = Digest::SHA256.hexdigest(SecureRandom.hex)
+ attrs = [
+ {
+ key_hash: existing_test.key_hash,
+ name: 'This new name will not apply',
+ suite_name: 'This new suite name will not apply'
+ },
+ {
+ key_hash: new_key,
+ name: 'Component works',
+ suite_name: 'jest'
+ }
+ ]
+
+ result = described_class.find_or_create_by_batch(project, attrs)
+
+ expect(result).to match_array([
+ have_attributes(
+ key_hash: existing_test.key_hash,
+ suite_name: 'rspec',
+ name: 'Math#sum adds numbers'
+ ),
+ have_attributes(
+ key_hash: new_key,
+ suite_name: 'jest',
+ name: 'Component works'
+ )
+ ])
+
+ expect(result).to all(be_persisted)
+ end
+
+ context 'when a given name or suite_name exceeds the string size limit' do
+ before do
+ stub_const("#{described_class}::MAX_NAME_SIZE", 6)
+ stub_const("#{described_class}::MAX_SUITE_NAME_SIZE", 6)
+ end
+
+ it 'truncates the values before storing the information' do
+ new_key = Digest::SHA256.hexdigest(SecureRandom.hex)
+ attrs = [
+ {
+ key_hash: new_key,
+ name: 'abcdefg',
+ suite_name: 'abcdefg'
+ }
+ ]
+
+ result = described_class.find_or_create_by_batch(project, attrs)
+
+ expect(result).to match_array([
+ have_attributes(
+ key_hash: new_key,
+ suite_name: 'abc...',
+ name: 'abc...'
+ )
+ ])
+
+ expect(result).to all(be_persisted)
+ end
+ end
+ end
+end
diff --git a/spec/models/clusters/agent_token_spec.rb b/spec/models/clusters/agent_token_spec.rb
index a1b45df1970..680b351d24a 100644
--- a/spec/models/clusters/agent_token_spec.rb
+++ b/spec/models/clusters/agent_token_spec.rb
@@ -24,4 +24,53 @@ RSpec.describe Clusters::AgentToken do
expect(agent_token.token.length).to be >= 50
end
end
+
+ describe '#track_usage', :clean_gitlab_redis_cache do
+ let(:agent_token) { create(:cluster_agent_token) }
+
+ subject { agent_token.track_usage }
+
+ context 'when last_used_at was updated recently' do
+ before do
+ agent_token.update!(last_used_at: 10.minutes.ago)
+ end
+
+ it 'updates cache but not database' do
+ expect { subject }.not_to change { agent_token.reload.read_attribute(:last_used_at) }
+
+ expect_redis_update
+ end
+ end
+
+ context 'when last_used_at was not updated recently' do
+ it 'updates cache and database' do
+ does_db_update
+ expect_redis_update
+ end
+
+ context 'with invalid token' do
+ before do
+ agent_token.description = SecureRandom.hex(2000)
+ end
+
+ it 'still updates caches and database' do
+ expect(agent_token).to be_invalid
+
+ does_db_update
+ expect_redis_update
+ end
+ end
+ end
+
+ def expect_redis_update
+ Gitlab::Redis::Cache.with do |redis|
+ redis_key = "cache:#{described_class.name}:#{agent_token.id}:attributes"
+ expect(redis.get(redis_key)).to be_present
+ end
+ end
+
+ def does_db_update
+ expect { subject }.to change { agent_token.reload.read_attribute(:last_used_at) }
+ end
+ end
end
diff --git a/spec/models/clusters/applications/prometheus_spec.rb b/spec/models/clusters/applications/prometheus_spec.rb
index 032de6aa7c2..5a0ccabd467 100644
--- a/spec/models/clusters/applications/prometheus_spec.rb
+++ b/spec/models/clusters/applications/prometheus_spec.rb
@@ -39,6 +39,19 @@ RSpec.describe Clusters::Applications::Prometheus do
end
end
+ describe 'transition to externally_installed' do
+ let(:project) { create(:project) }
+ let(:cluster) { create(:cluster, :with_installed_helm) }
+ let(:application) { create(:clusters_applications_prometheus, :installing, cluster: cluster) }
+
+ it 'schedules post installation job' do
+ expect(Clusters::Applications::ActivateServiceWorker)
+ .to receive(:perform_async).with(cluster.id, 'prometheus')
+
+ application.make_externally_installed!
+ end
+ end
+
describe 'transition to updating' do
let(:project) { create(:project) }
let(:cluster) { create(:cluster, projects: [project]) }
@@ -61,85 +74,8 @@ RSpec.describe Clusters::Applications::Prometheus do
end
describe '#prometheus_client' do
- shared_examples 'exception caught for prometheus client' do
- before do
- allow(kube_client).to receive(:proxy_url).and_raise(exception)
- end
-
- it 'returns nil' do
- expect(subject.prometheus_client).to be_nil
- end
- end
-
- context 'cluster is nil' do
- it 'returns nil' do
- expect(subject.cluster).to be_nil
- expect(subject.prometheus_client).to be_nil
- end
- end
-
- context "cluster doesn't have kubeclient" do
- let(:cluster) { create(:cluster) }
-
- subject { create(:clusters_applications_prometheus, cluster: cluster) }
-
- it 'returns nil' do
- expect(subject.prometheus_client).to be_nil
- end
- end
-
- context 'cluster has kubeclient' do
- let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
- let(:kubernetes_url) { subject.cluster.platform_kubernetes.api_url }
- let(:kube_client) { subject.cluster.kubeclient.core_client }
-
- subject { create(:clusters_applications_prometheus, cluster: cluster) }
-
- before do
- subject.cluster.platform_kubernetes.namespace = 'a-namespace'
- stub_kubeclient_discover(cluster.platform_kubernetes.api_url)
-
- create(:cluster_kubernetes_namespace,
- cluster: cluster,
- cluster_project: cluster.cluster_project,
- project: cluster.cluster_project.project)
- end
-
- it 'creates proxy prometheus_client' do
- expect(subject.prometheus_client).to be_instance_of(Gitlab::PrometheusClient)
- end
-
- it 'merges proxy_url, options and headers from kube client with prometheus_client options' do
- expect(Gitlab::PrometheusClient)
- .to(receive(:new))
- .with(a_valid_url, kube_client.rest_client.options.merge({
- headers: kube_client.headers,
- timeout: PrometheusAdapter::DEFAULT_PROMETHEUS_REQUEST_TIMEOUT_SEC
- }))
- subject.prometheus_client
- end
-
- context 'when cluster is not reachable' do
- it_behaves_like 'exception caught for prometheus client' do
- let(:exception) { Kubeclient::HttpError.new(401, 'Unauthorized', nil) }
- end
- end
-
- context 'when there is a socket error while contacting cluster' do
- it_behaves_like 'exception caught for prometheus client' do
- let(:exception) { Errno::ECONNREFUSED }
- end
-
- it_behaves_like 'exception caught for prometheus client' do
- let(:exception) { Errno::ECONNRESET }
- end
- end
-
- context 'when the network is unreachable' do
- it_behaves_like 'exception caught for prometheus client' do
- let(:exception) { Errno::ENETUNREACH }
- end
- end
+ include_examples '#prometheus_client shared' do
+ let(:factory) { :clusters_applications_prometheus }
end
end
diff --git a/spec/models/clusters/cluster_spec.rb b/spec/models/clusters/cluster_spec.rb
index a8f81cba285..b2ed64fd9b0 100644
--- a/spec/models/clusters/cluster_spec.rb
+++ b/spec/models/clusters/cluster_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to have_one(:provider_gcp) }
it { is_expected.to have_one(:provider_aws) }
it { is_expected.to have_one(:platform_kubernetes) }
+ it { is_expected.to have_one(:integration_prometheus) }
it { is_expected.to have_one(:application_helm) }
it { is_expected.to have_one(:application_ingress) }
it { is_expected.to have_one(:application_prometheus) }
@@ -40,7 +41,6 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
it { is_expected.to delegate_method(:rbac?).to(:platform_kubernetes).with_prefix }
it { is_expected.to delegate_method(:available?).to(:application_helm).with_prefix }
it { is_expected.to delegate_method(:available?).to(:application_ingress).with_prefix }
- it { is_expected.to delegate_method(:available?).to(:application_prometheus).with_prefix }
it { is_expected.to delegate_method(:available?).to(:application_knative).with_prefix }
it { is_expected.to delegate_method(:available?).to(:application_elastic_stack).with_prefix }
it { is_expected.to delegate_method(:external_ip).to(:application_ingress).with_prefix }
@@ -1349,6 +1349,80 @@ RSpec.describe Clusters::Cluster, :use_clean_rails_memory_store_caching do
end
end
+ describe '#application_prometheus_available?' do
+ let_it_be_with_reload(:cluster) { create(:cluster, :project) }
+
+ subject { cluster.application_prometheus_available? }
+
+ it { is_expected.to be_falsey }
+
+ context 'has a integration_prometheus' do
+ let_it_be(:integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ it { is_expected.to be_truthy }
+
+ context 'disabled' do
+ before do
+ cluster.integration_prometheus.enabled = false
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'has a application_prometheus' do
+ let_it_be(:application) { create(:clusters_applications_prometheus, :installed, :no_helm_installed, cluster: cluster) }
+
+ it { is_expected.to be_truthy }
+
+ context 'errored' do
+ before do
+ cluster.application_prometheus.status = Clusters::Applications::Prometheus.state_machines[:status].states[:errored]
+ end
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'also has a integration_prometheus' do
+ let_it_be(:integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
+ describe '#prometheus_adapter' do
+ let_it_be_with_reload(:cluster) { create(:cluster, :project) }
+
+ it 'returns nothing' do
+ expect(cluster.prometheus_adapter).to be_nil
+ end
+
+ context 'has a integration_prometheus' do
+ let_it_be(:integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ it 'returns the integration' do
+ expect(cluster.prometheus_adapter).to eq(integration)
+ end
+ end
+
+ context 'has a application_prometheus' do
+ let_it_be(:application) { create(:clusters_applications_prometheus, :no_helm_installed, cluster: cluster) }
+
+ it 'returns the application' do
+ expect(cluster.prometheus_adapter).to eq(application)
+ end
+
+ context 'also has a integration_prometheus' do
+ let_it_be(:integration) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ it 'returns the integration' do
+ expect(cluster.prometheus_adapter).to eq(integration)
+ end
+ end
+ end
+ end
+
describe '#delete_cached_resources!' do
let!(:cluster) { create(:cluster, :project) }
let!(:staging_namespace) { create(:cluster_kubernetes_namespace, cluster: cluster, namespace: 'staging') }
diff --git a/spec/models/clusters/integrations/prometheus_spec.rb b/spec/models/clusters/integrations/prometheus_spec.rb
new file mode 100644
index 00000000000..a7be1673ce2
--- /dev/null
+++ b/spec/models/clusters/integrations/prometheus_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Integrations::Prometheus do
+ include KubernetesHelpers
+ include StubRequests
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:cluster).class_name('Clusters::Cluster') }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:cluster) }
+ it { is_expected.not_to allow_value(nil).for(:enabled) }
+ end
+
+ describe '#prometheus_client' do
+ include_examples '#prometheus_client shared' do
+ let(:factory) { :clusters_integrations_prometheus }
+ end
+ end
+
+ describe '#configured?' do
+ let(:prometheus) { create(:clusters_integrations_prometheus, cluster: cluster) }
+
+ subject { prometheus.configured? }
+
+ context 'when a kubenetes client is present' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+
+ it { is_expected.to be_truthy }
+
+ context 'when it is disabled' do
+ let(:prometheus) { create(:clusters_integrations_prometheus, :disabled, cluster: cluster) }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when the kubernetes URL is blocked' do
+ before do
+ blocked_ip = '127.0.0.1' # localhost addresses are blocked by default
+
+ stub_all_dns(cluster.platform.api_url, ip_address: blocked_ip)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+
+ context 'when a kubenetes client is not present' do
+ let(:cluster) { create(:cluster) }
+
+ it { is_expected.to be_falsy }
+ end
+ end
+end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index a5f02b61132..7c00f367844 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -526,7 +526,7 @@ eos
context 'that is found' do
before do
# Artificially mark as completed.
- merge_request.update(merge_commit_sha: merge_commit.id)
+ merge_request.update!(merge_commit_sha: merge_commit.id)
end
it do
diff --git a/spec/models/commit_status_spec.rb b/spec/models/commit_status_spec.rb
index 01da379e001..e64dee2d26f 100644
--- a/spec/models/commit_status_spec.rb
+++ b/spec/models/commit_status_spec.rb
@@ -213,12 +213,12 @@ RSpec.describe CommitStatus do
context 'when it is canceled' do
before do
- commit_status.update(status: 'canceled')
+ commit_status.update!(status: 'canceled')
end
context 'when there is auto_canceled_by' do
before do
- commit_status.update(auto_canceled_by: create(:ci_empty_pipeline))
+ commit_status.update!(auto_canceled_by: create(:ci_empty_pipeline))
end
it 'is auto canceled' do
@@ -510,10 +510,6 @@ RSpec.describe CommitStatus do
end
describe '#group_name' do
- before do
- stub_feature_flags(simplified_commit_status_group_name: false)
- end
-
using RSpec::Parameterized::TableSyntax
let(:commit_status) do
@@ -528,18 +524,24 @@ RSpec.describe CommitStatus do
'rspec1 0/2' | 'rspec1'
'rspec:windows' | 'rspec:windows'
'rspec:windows 0' | 'rspec:windows 0'
+ 'rspec:windows 0 2/2' | 'rspec:windows 0'
'rspec:windows 0 test' | 'rspec:windows 0 test'
- 'rspec:windows 0 1' | 'rspec:windows'
- 'rspec:windows 0 1 name' | 'rspec:windows name'
+ 'rspec:windows 0 test 2/2' | 'rspec:windows 0 test'
+ 'rspec:windows 0 1 2/2' | 'rspec:windows'
+ 'rspec:windows 0 1 [aws] 2/2' | 'rspec:windows'
+ 'rspec:windows 0 1 name [aws] 2/2' | 'rspec:windows 0 1 name'
+ 'rspec:windows 0 1 name' | 'rspec:windows 0 1 name'
+ 'rspec:windows 0 1 name 1/2' | 'rspec:windows 0 1 name'
'rspec:windows 0/1' | 'rspec:windows'
- 'rspec:windows 0/1 name' | 'rspec:windows name'
+ 'rspec:windows 0/1 name' | 'rspec:windows 0/1 name'
+ 'rspec:windows 0/1 name 1/2' | 'rspec:windows 0/1 name'
'rspec:windows 0:1' | 'rspec:windows'
- 'rspec:windows 0:1 name' | 'rspec:windows name'
+ 'rspec:windows 0:1 name' | 'rspec:windows 0:1 name'
'rspec:windows 10000 20000' | 'rspec:windows'
'rspec:windows 0 : / 1' | 'rspec:windows'
- 'rspec:windows 0 : / 1 name' | 'rspec:windows name'
- '0 1 name ruby' | 'name ruby'
- '0 :/ 1 name ruby' | 'name ruby'
+ 'rspec:windows 0 : / 1 name' | 'rspec:windows 0 : / 1 name'
+ '0 1 name ruby' | '0 1 name ruby'
+ '0 :/ 1 name ruby' | '0 :/ 1 name ruby'
'rspec: [aws]' | 'rspec'
'rspec: [aws] 0/1' | 'rspec'
'rspec: [aws, max memory]' | 'rspec'
@@ -561,58 +563,6 @@ RSpec.describe CommitStatus do
is_expected.to eq(group_name)
end
end
-
- context 'with simplified_commit_status_group_name' do
- before do
- stub_feature_flags(simplified_commit_status_group_name: true)
- end
-
- where(:name, :group_name) do
- 'rspec1' | 'rspec1'
- 'rspec1 0 1' | 'rspec1'
- 'rspec1 0/2' | 'rspec1'
- 'rspec:windows' | 'rspec:windows'
- 'rspec:windows 0' | 'rspec:windows 0'
- 'rspec:windows 0 2/2' | 'rspec:windows 0'
- 'rspec:windows 0 test' | 'rspec:windows 0 test'
- 'rspec:windows 0 test 2/2' | 'rspec:windows 0 test'
- 'rspec:windows 0 1 2/2' | 'rspec:windows'
- 'rspec:windows 0 1 [aws] 2/2' | 'rspec:windows'
- 'rspec:windows 0 1 name [aws] 2/2' | 'rspec:windows 0 1 name'
- 'rspec:windows 0 1 name' | 'rspec:windows 0 1 name'
- 'rspec:windows 0 1 name 1/2' | 'rspec:windows 0 1 name'
- 'rspec:windows 0/1' | 'rspec:windows'
- 'rspec:windows 0/1 name' | 'rspec:windows 0/1 name'
- 'rspec:windows 0/1 name 1/2' | 'rspec:windows 0/1 name'
- 'rspec:windows 0:1' | 'rspec:windows'
- 'rspec:windows 0:1 name' | 'rspec:windows 0:1 name'
- 'rspec:windows 10000 20000' | 'rspec:windows'
- 'rspec:windows 0 : / 1' | 'rspec:windows'
- 'rspec:windows 0 : / 1 name' | 'rspec:windows 0 : / 1 name'
- '0 1 name ruby' | '0 1 name ruby'
- '0 :/ 1 name ruby' | '0 :/ 1 name ruby'
- 'rspec: [aws]' | 'rspec'
- 'rspec: [aws] 0/1' | 'rspec'
- 'rspec: [aws, max memory]' | 'rspec'
- 'rspec:linux: [aws, max memory, data]' | 'rspec:linux'
- 'rspec: [inception: [something, other thing], value]' | 'rspec'
- 'rspec:windows 0/1: [name, other]' | 'rspec:windows'
- 'rspec:windows: [name, other] 0/1' | 'rspec:windows'
- 'rspec:windows: [name, 0/1] 0/1' | 'rspec:windows'
- 'rspec:windows: [0/1, name]' | 'rspec:windows'
- 'rspec:windows: [, ]' | 'rspec:windows'
- 'rspec:windows: [name]' | 'rspec:windows'
- 'rspec:windows: [name,other]' | 'rspec:windows'
- end
-
- with_them do
- it "#{params[:name]} puts in #{params[:group_name]}" do
- commit_status.name = name
-
- is_expected.to eq(group_name)
- end
- end
- end
end
describe '#detailed_status' do
@@ -660,7 +610,7 @@ RSpec.describe CommitStatus do
end
it "raise exception when trying to update" do
- expect { commit_status.save }.to raise_error(ActiveRecord::StaleObjectError)
+ expect { commit_status.save! }.to raise_error(ActiveRecord::StaleObjectError)
end
end
@@ -679,30 +629,45 @@ RSpec.describe CommitStatus do
end
end
- describe 'set failure_reason when drop' do
+ describe '#drop' do
let(:commit_status) { create(:commit_status, :created) }
+ let(:counter) { Gitlab::Metrics.counter(:gitlab_ci_job_failure_reasons, 'desc') }
+ let(:failure_reason) { reason.to_s }
subject do
commit_status.drop!(reason)
commit_status
end
+ shared_examples 'incrementing failure reason counter' do
+ it 'increments the counter with the failure_reason' do
+ expect { subject }.to change { counter.get(reason: failure_reason) }.by(1)
+ end
+ end
+
context 'when failure_reason is nil' do
let(:reason) { }
+ let(:failure_reason) { 'unknown_failure' }
it { is_expected.to be_unknown_failure }
+
+ it_behaves_like 'incrementing failure reason counter'
end
context 'when failure_reason is script_failure' do
let(:reason) { :script_failure }
it { is_expected.to be_script_failure }
+
+ it_behaves_like 'incrementing failure reason counter'
end
context 'when failure_reason is unmet_prerequisites' do
let(:reason) { :unmet_prerequisites }
it { is_expected.to be_unmet_prerequisites }
+
+ it_behaves_like 'incrementing failure reason counter'
end
end
@@ -870,4 +835,23 @@ RSpec.describe CommitStatus do
it { is_expected.to eq(false) }
end
end
+
+ describe '#update_older_statuses_retried!' do
+ let!(:build_old) { create_status(name: 'build') }
+ let!(:build_new) { create_status(name: 'build') }
+ let!(:test) { create_status(name: 'test') }
+ let!(:build_from_other_pipeline) do
+ new_pipeline = create(:ci_pipeline, project: project, sha: project.commit.id)
+ create_status(name: 'build', pipeline: new_pipeline)
+ end
+
+ it "updates 'retried' and 'status' columns of the latest status with the same name in the same pipeline" do
+ build_new.update_older_statuses_retried!
+
+ expect(build_new.reload).to have_attributes(retried: false, processed: false)
+ expect(build_old.reload).to have_attributes(retried: true, processed: true)
+ expect(test.reload).to have_attributes(retried: false, processed: false)
+ expect(build_from_other_pipeline.reload).to have_attributes(retried: false, processed: false)
+ end
+ end
end
diff --git a/spec/models/concerns/batch_destroy_dependent_associations_spec.rb b/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
index a8fcb714c64..993afd47a57 100644
--- a/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
+++ b/spec/models/concerns/batch_destroy_dependent_associations_spec.rb
@@ -26,6 +26,7 @@ RSpec.describe BatchDestroyDependentAssociations do
let_it_be(:project) { create(:project) }
let_it_be(:build) { create(:ci_build, project: project) }
let_it_be(:notification_setting) { create(:notification_setting, project: project) }
+
let!(:todos) { create(:todo, project: project) }
it 'destroys multiple builds' do
diff --git a/spec/models/concerns/cache_markdown_field_spec.rb b/spec/models/concerns/cache_markdown_field_spec.rb
index 6e62d4ef31b..33a4c8eac41 100644
--- a/spec/models/concerns/cache_markdown_field_spec.rb
+++ b/spec/models/concerns/cache_markdown_field_spec.rb
@@ -17,9 +17,13 @@ RSpec.describe CacheMarkdownField, :clean_gitlab_redis_cache do
include CacheMarkdownField
def initialize(args = {})
- @title, @description, @cached_markdown_version = args[:title], args[:description], args[:cached_markdown_version]
- @title_html, @description_html = args[:title_html], args[:description_html]
- @author, @project = args[:author], args[:project]
+ @title = args[:title]
+ @description = args[:description]
+ @cached_markdown_version = args[:cached_markdown_version]
+ @title_html = args[:title_html]
+ @description_html = args[:description_html]
+ @author = args[:author]
+ @project = args[:project]
@parent_user = args[:parent_user]
end
diff --git a/spec/models/concerns/cacheable_attributes_spec.rb b/spec/models/concerns/cacheable_attributes_spec.rb
index f2877bed9cf..dc80e30216a 100644
--- a/spec/models/concerns/cacheable_attributes_spec.rb
+++ b/spec/models/concerns/cacheable_attributes_spec.rb
@@ -205,7 +205,7 @@ RSpec.describe CacheableAttributes do
end
end
- it 'uses RequestStore in addition to process memory cache', :request_store do
+ it 'uses RequestStore in addition to process memory cache', :request_store, :do_not_mock_admin_mode_setting do
# Warm up the cache
create(:application_setting).cache!
diff --git a/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
new file mode 100644
index 00000000000..ddff9ce32b4
--- /dev/null
+++ b/spec/models/concerns/cascading_namespace_setting_attribute_spec.rb
@@ -0,0 +1,320 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe NamespaceSetting, 'CascadingNamespaceSettingAttribute' do
+ let(:group) { create(:group) }
+ let(:subgroup) { create(:group, parent: group) }
+
+ def group_settings
+ group.namespace_settings
+ end
+
+ def subgroup_settings
+ subgroup.namespace_settings
+ end
+
+ describe '#delayed_project_removal' do
+ subject(:delayed_project_removal) { subgroup_settings.delayed_project_removal }
+
+ context 'when the feature is disabled' do
+ before do
+ stub_feature_flags(cascading_namespace_settings: false)
+
+ group_settings.update!(delayed_project_removal: true)
+ end
+
+ it 'does not cascade' do
+ expect(delayed_project_removal).to eq(nil)
+ end
+ end
+
+ context 'when there is no parent' do
+ context 'and the value is not nil' do
+ before do
+ group_settings.update!(delayed_project_removal: true)
+ end
+
+ it 'returns the local value' do
+ expect(group_settings.delayed_project_removal).to eq(true)
+ end
+ end
+
+ context 'and the value is nil' do
+ before do
+ group_settings.update!(delayed_project_removal: nil)
+ stub_application_setting(delayed_project_removal: false)
+ end
+
+ it 'returns the application settings value' do
+ expect(group_settings.delayed_project_removal).to eq(false)
+ end
+ end
+ end
+
+ context 'when parent does not lock the attribute' do
+ context 'and value is not nil' do
+ before do
+ group_settings.update!(delayed_project_removal: false)
+ end
+
+ it 'returns local setting when present' do
+ subgroup_settings.update!(delayed_project_removal: true)
+
+ expect(delayed_project_removal).to eq(true)
+ end
+
+ it 'returns the parent value when local value is nil' do
+ subgroup_settings.update!(delayed_project_removal: nil)
+
+ expect(delayed_project_removal).to eq(false)
+ end
+
+ it 'returns the correct dirty value' do
+ subgroup_settings.delayed_project_removal = true
+
+ expect(delayed_project_removal).to eq(true)
+ end
+
+ it 'does not return the application setting value when parent value is false' do
+ stub_application_setting(delayed_project_removal: true)
+
+ expect(delayed_project_removal).to eq(false)
+ end
+ end
+
+ context 'and the value is nil' do
+ before do
+ group_settings.update!(delayed_project_removal: nil, lock_delayed_project_removal: false)
+ subgroup_settings.update!(delayed_project_removal: nil)
+
+ subgroup_settings.clear_memoization(:delayed_project_removal)
+ end
+
+ it 'cascades to the application settings value' do
+ expect(delayed_project_removal).to eq(false)
+ end
+ end
+
+ context 'when multiple ancestors set a value' do
+ let(:third_level_subgroup) { create(:group, parent: subgroup) }
+
+ before do
+ group_settings.update!(delayed_project_removal: true)
+ subgroup_settings.update!(delayed_project_removal: false)
+ end
+
+ it 'returns the closest ancestor value' do
+ expect(third_level_subgroup.namespace_settings.delayed_project_removal).to eq(false)
+ end
+ end
+ end
+
+ context 'when parent locks the attribute' do
+ before do
+ subgroup_settings.update!(delayed_project_removal: true)
+ group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
+
+ subgroup_settings.clear_memoization(:delayed_project_removal)
+ subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
+ end
+
+ it 'returns the parent value' do
+ expect(delayed_project_removal).to eq(false)
+ end
+
+ it 'does not allow the local value to be saved' do
+ subgroup_settings.delayed_project_removal = nil
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
+ end
+ end
+
+ context 'when the application settings locks the attribute' do
+ before do
+ subgroup_settings.update!(delayed_project_removal: true)
+ stub_application_setting(lock_delayed_project_removal: true, delayed_project_removal: true)
+ end
+
+ it 'returns the application setting value' do
+ expect(delayed_project_removal).to eq(true)
+ end
+
+ it 'does not allow the local value to be saved' do
+ subgroup_settings.delayed_project_removal = nil
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be changed because it is locked by an ancestor/)
+ end
+ end
+ end
+
+ describe '#delayed_project_removal?' do
+ before do
+ subgroup_settings.update!(delayed_project_removal: true)
+ group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
+
+ subgroup_settings.clear_memoization(:delayed_project_removal)
+ subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
+ end
+
+ it 'aliases the method when the attribute is a boolean' do
+ expect(subgroup_settings.delayed_project_removal?).to eq(subgroup_settings.delayed_project_removal)
+ end
+ end
+
+ describe '#delayed_project_removal_locked?' do
+ shared_examples 'not locked' do
+ it 'is not locked by an ancestor' do
+ expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
+ end
+
+ it 'is not locked by application setting' do
+ expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
+ end
+
+ it 'does not return a locked namespace' do
+ expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
+ end
+ end
+
+ context 'when the feature is disabled' do
+ before do
+ stub_feature_flags(cascading_namespace_settings: false)
+
+ group_settings.update!(delayed_project_removal: true)
+ end
+
+ it_behaves_like 'not locked'
+ end
+
+ context 'when parent does not lock the attribute' do
+ it_behaves_like 'not locked'
+ end
+
+ context 'when parent locks the attribute' do
+ before do
+ group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
+
+ subgroup_settings.clear_memoization(:delayed_project_removal)
+ subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
+ end
+
+ it 'is locked by an ancestor' do
+ expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(true)
+ end
+
+ it 'is not locked by application setting' do
+ expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(false)
+ end
+
+ it 'returns a locked namespace settings object' do
+ expect(subgroup_settings.delayed_project_removal_locked_ancestor.namespace_id).to eq(group_settings.namespace_id)
+ end
+ end
+
+ context 'when not locked by application settings' do
+ before do
+ stub_application_setting(lock_delayed_project_removal: false)
+ end
+
+ it_behaves_like 'not locked'
+ end
+
+ context 'when locked by application settings' do
+ before do
+ stub_application_setting(lock_delayed_project_removal: true)
+ end
+
+ it 'is not locked by an ancestor' do
+ expect(subgroup_settings.delayed_project_removal_locked_by_ancestor?).to eq(false)
+ end
+
+ it 'is locked by application setting' do
+ expect(subgroup_settings.delayed_project_removal_locked_by_application_setting?).to eq(true)
+ end
+
+ it 'does not return a locked namespace' do
+ expect(subgroup_settings.delayed_project_removal_locked_ancestor).to be_nil
+ end
+ end
+ end
+
+ describe '#lock_delayed_project_removal=' do
+ context 'when parent locks the attribute' do
+ before do
+ group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
+
+ subgroup_settings.clear_memoization(:delayed_project_removal)
+ subgroup_settings.clear_memoization(:delayed_project_removal_locked_ancestor)
+ end
+
+ it 'does not allow the attribute to be saved' do
+ subgroup_settings.lock_delayed_project_removal = true
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
+ end
+ end
+
+ context 'when parent does not lock the attribute' do
+ before do
+ group_settings.update!(lock_delayed_project_removal: false)
+
+ subgroup_settings.lock_delayed_project_removal = true
+ end
+
+ it 'allows the lock to be set when the attribute is not nil' do
+ subgroup_settings.delayed_project_removal = true
+
+ expect(subgroup_settings.save).to eq(true)
+ end
+
+ it 'does not allow the lock to be saved when the attribute is nil' do
+ subgroup_settings.delayed_project_removal = nil
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid, /Delayed project removal cannot be nil when locking the attribute/)
+ end
+ end
+
+ context 'when application settings locks the attribute' do
+ before do
+ stub_application_setting(lock_delayed_project_removal: true)
+ end
+
+ it 'does not allow the attribute to be saved' do
+ subgroup_settings.lock_delayed_project_removal = true
+
+ expect { subgroup_settings.save! }
+ .to raise_error(ActiveRecord::RecordInvalid, /Lock delayed project removal cannot be changed because it is locked by an ancestor/)
+ end
+ end
+
+ context 'when application_settings does not lock the attribute' do
+ before do
+ stub_application_setting(lock_delayed_project_removal: false)
+ end
+
+ it 'allows the attribute to be saved' do
+ subgroup_settings.delayed_project_removal = true
+ subgroup_settings.lock_delayed_project_removal = true
+
+ expect(subgroup_settings.save).to eq(true)
+ end
+ end
+ end
+
+ describe 'after update callback' do
+ before do
+ subgroup_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: false)
+ end
+
+ it 'clears descendant locks' do
+ group_settings.update!(lock_delayed_project_removal: true, delayed_project_removal: true)
+
+ expect(subgroup_settings.reload.lock_delayed_project_removal).to eq(false)
+ end
+ end
+end
diff --git a/spec/models/concerns/ci/artifactable_spec.rb b/spec/models/concerns/ci/artifactable_spec.rb
index ebc838e86a6..62fc689a9ca 100644
--- a/spec/models/concerns/ci/artifactable_spec.rb
+++ b/spec/models/concerns/ci/artifactable_spec.rb
@@ -72,5 +72,33 @@ RSpec.describe Ci::Artifactable do
expect(Ci::JobArtifact.expired(1).order_id_asc).to eq([recently_expired_artifact])
end
end
+
+ describe '.with_files_stored_locally' do
+ it 'returns artifacts stored locally' do
+ expect(Ci::JobArtifact.with_files_stored_locally).to contain_exactly(recently_expired_artifact, later_expired_artifact, not_expired_artifact)
+ end
+ end
+
+ describe '.with_files_stored_remotely' do
+ let(:remote_artifact) { create(:ci_job_artifact, :remote_store) }
+
+ before do
+ stub_artifacts_object_storage
+ end
+
+ it 'returns artifacts stored remotely' do
+ expect(Ci::JobArtifact.with_files_stored_remotely).to contain_exactly(remote_artifact)
+ end
+ end
+
+ describe '.project_id_in' do
+ context 'when artifacts belongs to projects' do
+ let(:project_ids) { [recently_expired_artifact.project.id, not_expired_artifact.project.id, non_existing_record_id] }
+
+ it 'returns artifacts belonging to projects' do
+ expect(Ci::JobArtifact.project_id_in(project_ids)).to contain_exactly(recently_expired_artifact, not_expired_artifact)
+ end
+ end
+ end
end
end
diff --git a/spec/models/concerns/featurable_spec.rb b/spec/models/concerns/featurable_spec.rb
index b550d22f686..295f3523dd5 100644
--- a/spec/models/concerns/featurable_spec.rb
+++ b/spec/models/concerns/featurable_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Featurable do
let_it_be(:user) { create(:user) }
+
let(:project) { create(:project) }
let(:feature_class) { subject.class }
let(:features) { feature_class::FEATURES }
@@ -163,7 +164,7 @@ RSpec.describe Featurable do
end
def update_all_project_features(project, features, value)
- project_feature_attributes = features.map { |f| ["#{f}_access_level", value] }.to_h
+ project_feature_attributes = features.to_h { |f| ["#{f}_access_level", value] }
project.project_feature.update!(project_feature_attributes)
end
end
diff --git a/spec/models/concerns/has_timelogs_report_spec.rb b/spec/models/concerns/has_timelogs_report_spec.rb
new file mode 100644
index 00000000000..f694fc350ee
--- /dev/null
+++ b/spec/models/concerns/has_timelogs_report_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe HasTimelogsReport do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:issue) { create(:issue, project: create(:project, :public, group: group)) }
+
+ describe '#timelogs' do
+ let!(:timelog1) { create_timelog(15.days.ago) }
+ let!(:timelog2) { create_timelog(10.days.ago) }
+ let!(:timelog3) { create_timelog(5.days.ago) }
+ let(:start_time) { 20.days.ago }
+ let(:end_time) { 8.days.ago }
+
+ before do
+ group.add_developer(user)
+ end
+
+ it 'returns collection of timelogs between given times' do
+ expect(group.timelogs(start_time, end_time).to_a).to match_array([timelog1, timelog2])
+ end
+
+ it 'returns empty collection if times are not present' do
+ expect(group.timelogs(nil, nil)).to be_empty
+ end
+
+ it 'returns empty collection if time range is invalid' do
+ expect(group.timelogs(end_time, start_time)).to be_empty
+ end
+ end
+
+ describe '#user_can_access_group_timelogs?' do
+ it 'returns true if user can access group timelogs' do
+ group.add_developer(user)
+
+ expect(group).to be_user_can_access_group_timelogs(user)
+ end
+
+ it 'returns false if user has insufficient permissions' do
+ group.add_guest(user)
+
+ expect(group).not_to be_user_can_access_group_timelogs(user)
+ end
+ end
+
+ def create_timelog(time)
+ create(:timelog, issue: issue, user: user, spent_at: time)
+ end
+end
diff --git a/spec/models/concerns/issuable_spec.rb b/spec/models/concerns/issuable_spec.rb
index 3545c8e9686..14db9b530db 100644
--- a/spec/models/concerns/issuable_spec.rb
+++ b/spec/models/concerns/issuable_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Issuable do
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:author) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
- it { is_expected.to have_many(:todos).dependent(:destroy) }
+ it { is_expected.to have_many(:todos) }
it { is_expected.to have_many(:labels) }
it { is_expected.to have_many(:note_authors).through(:notes) }
@@ -65,6 +65,23 @@ RSpec.describe Issuable do
it { expect(issuable_class).to respond_to(:opened) }
it { expect(issuable_class).to respond_to(:closed) }
it { expect(issuable_class).to respond_to(:assigned) }
+
+ describe '.includes_for_bulk_update' do
+ before do
+ stub_const('Example', Class.new(ActiveRecord::Base))
+
+ Example.class_eval do
+ include Issuable # adds :labels and :metrics, among others
+
+ belongs_to :author
+ has_many :assignees
+ end
+ end
+
+ it 'includes available associations' do
+ expect(Example.includes_for_bulk_update.includes_values).to eq([:author, :assignees, :labels, :metrics])
+ end
+ end
end
describe 'author_name' do
@@ -380,7 +397,7 @@ RSpec.describe Issuable do
context 'user is a participant in the issue' do
before do
- allow(issue).to receive(:participants).with(user).and_return([user])
+ allow(issue).to receive(:participant?).with(user).and_return(true)
end
it 'returns false when no subcription exists' do
diff --git a/spec/models/concerns/milestoneable_spec.rb b/spec/models/concerns/milestoneable_spec.rb
index 5fb3b39f734..961eac4710d 100644
--- a/spec/models/concerns/milestoneable_spec.rb
+++ b/spec/models/concerns/milestoneable_spec.rb
@@ -50,13 +50,13 @@ RSpec.describe Milestoneable do
it 'returns true with a milestone from the issue project' do
milestone = create(:milestone, project: project)
- expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
+ expect(build_milestoneable(milestone.id).milestone_available?).to be(true)
end
it 'returns true with a milestone from the issue project group' do
milestone = create(:milestone, group: group)
- expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
+ expect(build_milestoneable(milestone.id).milestone_available?).to be(true)
end
it 'returns true with a milestone from the the parent of the issue project group' do
@@ -64,19 +64,23 @@ RSpec.describe Milestoneable do
group.update!(parent: parent)
milestone = create(:milestone, group: parent)
- expect(build_milestoneable(milestone.id).milestone_available?).to be_truthy
+ expect(build_milestoneable(milestone.id).milestone_available?).to be(true)
+ end
+
+ it 'returns true with a blank milestone' do
+ expect(build_milestoneable('').milestone_available?).to be(true)
end
it 'returns false with a milestone from another project' do
milestone = create(:milestone)
- expect(build_milestoneable(milestone.id).milestone_available?).to be_falsey
+ expect(build_milestoneable(milestone.id).milestone_available?).to be(false)
end
it 'returns false with a milestone from another group' do
milestone = create(:milestone, group: create(:group))
- expect(build_milestoneable(milestone.id).milestone_available?).to be_falsey
+ expect(build_milestoneable(milestone.id).milestone_available?).to be(false)
end
end
end
diff --git a/spec/models/concerns/milestoneish_spec.rb b/spec/models/concerns/milestoneish_spec.rb
index 3b8fc465421..46a876f34e9 100644
--- a/spec/models/concerns/milestoneish_spec.rb
+++ b/spec/models/concerns/milestoneish_spec.rb
@@ -2,30 +2,28 @@
require 'spec_helper'
-RSpec.describe Milestone, 'Milestoneish' do
- let(:author) { create(:user) }
- let(:assignee) { create(:user) }
- let(:non_member) { create(:user) }
- let(:member) { create(:user) }
- let(:guest) { create(:user) }
- let(:admin) { create(:admin) }
- let(:project) { create(:project, :public) }
- let(:milestone) { create(:milestone, project: project) }
- let(:label1) { create(:label, project: project) }
- let(:label2) { create(:label, project: project) }
- let!(:issue) { create(:issue, project: project, milestone: milestone, assignees: [member], labels: [label1]) }
- let!(:security_issue_1) { create(:issue, :confidential, project: project, author: author, milestone: milestone, labels: [label2]) }
- let!(:security_issue_2) { create(:issue, :confidential, project: project, assignees: [assignee], milestone: milestone) }
- let!(:closed_issue_1) { create(:issue, :closed, project: project, milestone: milestone) }
- let!(:closed_issue_2) { create(:issue, :closed, project: project, milestone: milestone) }
- let!(:closed_security_issue_1) { create(:issue, :confidential, :closed, project: project, author: author, milestone: milestone) }
- let!(:closed_security_issue_2) { create(:issue, :confidential, :closed, project: project, assignees: [assignee], milestone: milestone) }
- let!(:closed_security_issue_3) { create(:issue, :confidential, :closed, project: project, author: author, milestone: milestone) }
- let!(:closed_security_issue_4) { create(:issue, :confidential, :closed, project: project, assignees: [assignee], milestone: milestone) }
- let!(:merge_request) { create(:merge_request, source_project: project, target_project: project, milestone: milestone) }
- let(:label_1) { create(:label, title: 'label_1', project: project, priority: 1) }
- let(:label_2) { create(:label, title: 'label_2', project: project, priority: 2) }
- let(:label_3) { create(:label, title: 'label_3', project: project) }
+RSpec.describe Milestone, 'Milestoneish', factory_default: :keep do
+ let_it_be(:author) { create(:user) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
+ let_it_be(:member) { create(:user) }
+ let_it_be(:guest) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:project, reload: true) { create_default(:project, :public, :empty_repo).freeze }
+ let_it_be(:milestone, refind: true) { create_default(:milestone, project: project) }
+ let_it_be(:label1) { create(:label) }
+ let_it_be(:label2) { create(:label) }
+ let_it_be(:issue, reload: true) { create(:issue, milestone: milestone, assignees: [member], labels: [label1]) }
+ let_it_be(:security_issue_1, reload: true) { create(:issue, :confidential, author: author, milestone: milestone, labels: [label2]) }
+ let_it_be(:security_issue_2, reload: true) { create(:issue, :confidential, assignees: [assignee], milestone: milestone) }
+ let_it_be(:closed_issue_1, reload: true) { create(:issue, :closed, milestone: milestone) }
+ let_it_be(:closed_issue_2, reload: true) { create(:issue, :closed, milestone: milestone) }
+ let_it_be(:closed_security_issue_1, reload: true) { create(:issue, :confidential, :closed, author: author, milestone: milestone) }
+ let_it_be(:closed_security_issue_2, reload: true) { create(:issue, :confidential, :closed, assignees: [assignee], milestone: milestone) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project, milestone: milestone) }
+ let_it_be(:label_1) { create(:label, title: 'label_1', priority: 1) }
+ let_it_be(:label_2) { create(:label, title: 'label_2', priority: 2) }
+ let_it_be(:label_3) { create(:label, title: 'label_3') }
before do
project.add_developer(member)
@@ -63,7 +61,7 @@ RSpec.describe Milestone, 'Milestoneish' do
end
end
- context 'attributes visibility' do
+ context 'with attributes visibility' do
using RSpec::Parameterized::TableSyntax
let(:users) do
@@ -167,8 +165,6 @@ RSpec.describe Milestone, 'Milestoneish' do
end
describe '#merge_requests_visible_to_user' do
- let(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) }
-
context 'when project is private' do
before do
project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
@@ -211,10 +207,11 @@ RSpec.describe Milestone, 'Milestoneish' do
end
context 'when milestone is at parent level group' do
- let(:parent_group) { create(:group) }
- let(:group) { create(:group, parent: parent_group) }
- let(:project) { create(:project, namespace: group) }
- let(:milestone) { create(:milestone, group: parent_group) }
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
+ let_it_be(:project) { create(:project, :empty_repo, namespace: group) }
+ let_it_be(:milestone) { create(:milestone, group: parent_group) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) }
it 'does not return any merge request for a non member' do
merge_requests = milestone.merge_requests_visible_to_user(non_member)
@@ -243,7 +240,7 @@ RSpec.describe Milestone, 'Milestoneish' do
end
describe '#percent_complete', :use_clean_rails_memory_store_caching do
- context 'division by zero' do
+ context 'with division by zero' do
let(:new_milestone) { build_stubbed(:milestone) }
it { expect(new_milestone.percent_complete).to eq(0) }
@@ -252,13 +249,19 @@ RSpec.describe Milestone, 'Milestoneish' do
describe '#closed_issues_count' do
it 'counts all closed issues including confidential' do
- expect(milestone.closed_issues_count).to eq 6
+ expect(milestone.closed_issues_count).to eq 4
end
end
describe '#total_issues_count' do
it 'counts all issues including confidential' do
- expect(milestone.total_issues_count).to eq 9
+ expect(milestone.total_issues_count).to eq 7
+ end
+ end
+
+ describe '#total_merge_requests_count' do
+ it 'counts merge requests' do
+ expect(milestone.total_merge_requests_count).to eq 1
end
end
diff --git a/spec/models/concerns/participable_spec.rb b/spec/models/concerns/participable_spec.rb
index 3376e337dc9..903c7ae16b6 100644
--- a/spec/models/concerns/participable_spec.rb
+++ b/spec/models/concerns/participable_spec.rb
@@ -39,11 +39,12 @@ RSpec.describe Participable do
expect(participants).to include(user3)
end
- it 'caches the raw list of participants' do
+ it 'caches the list of filtered participants' do
instance = model.new
user1 = build(:user)
- expect(instance).to receive(:raw_participants).once
+ expect(instance).to receive(:all_participants_hash).once.and_return({})
+ expect(instance).to receive(:filter_by_ability).once
instance.participants(user1)
instance.participants(user1)
@@ -91,5 +92,71 @@ RSpec.describe Participable do
expect(ext_arg).to be_an_instance_of(Gitlab::ReferenceExtractor)
end
end
+
+ context 'participable is a personal snippet' do
+ let(:model) { PersonalSnippet }
+ let(:instance) { model.new(author: user1) }
+
+ let(:user1) { build(:user) }
+ let(:user2) { build(:user) }
+ let(:user3) { build(:user) }
+
+ before do
+ allow(model).to receive(:participant_attrs).and_return([:foo, :bar])
+ end
+
+ it 'returns the list of participants' do
+ expect(instance).to receive(:foo).and_return(user1)
+ expect(instance).to receive(:bar).and_return(user2)
+
+ participants = instance.participants(user1)
+ expect(participants).to contain_exactly(user1)
+ end
+ end
+ end
+
+ describe '#participant?' do
+ let(:instance) { model.new }
+
+ let(:user1) { build(:user) }
+ let(:user2) { build(:user) }
+ let(:user3) { build(:user) }
+ let(:project) { build(:project, :public) }
+
+ before do
+ allow(model).to receive(:participant_attrs).and_return([:foo, :bar])
+ end
+
+ it 'returns whether the user is a participant' do
+ allow(instance).to receive(:foo).and_return(user2)
+ allow(instance).to receive(:bar).and_return(user3)
+ allow(instance).to receive(:project).and_return(project)
+
+ expect(instance.participant?(user1)).to be false
+ expect(instance.participant?(user2)).to be true
+ expect(instance.participant?(user3)).to be true
+ end
+
+ it 'caches the list of raw participants' do
+ expect(instance).to receive(:raw_participants).once.and_return([])
+ expect(instance).to receive(:project).twice.and_return(project)
+
+ instance.participant?(user1)
+ instance.participant?(user1)
+ end
+
+ context 'participable is a personal snippet' do
+ let(:model) { PersonalSnippet }
+ let(:instance) { model.new(author: user1) }
+
+ it 'returns whether the user is a participant' do
+ allow(instance).to receive(:foo).and_return(user1)
+ allow(instance).to receive(:bar).and_return(user2)
+
+ expect(instance.participant?(user1)).to be true
+ expect(instance.participant?(user2)).to be false
+ expect(instance.participant?(user3)).to be false
+ end
+ end
end
end
diff --git a/spec/models/concerns/safe_url_spec.rb b/spec/models/concerns/safe_url_spec.rb
index 3d38c05bf11..c298e56b1b1 100644
--- a/spec/models/concerns/safe_url_spec.rb
+++ b/spec/models/concerns/safe_url_spec.rb
@@ -26,14 +26,16 @@ RSpec.describe SafeUrl do
context 'when URL contains credentials' do
let(:url) { 'http://foo:bar@example.com' }
- it { is_expected.to eq('http://*****:*****@example.com')}
+ it 'masks username and password' do
+ is_expected.to eq('http://*****:*****@example.com')
+ end
- context 'when username is whitelisted' do
- subject { test_class.safe_url(usernames_whitelist: usernames_whitelist) }
+ context 'when username is allowed' do
+ subject { test_class.safe_url(allowed_usernames: usernames) }
- let(:usernames_whitelist) { %w[foo] }
+ let(:usernames) { %w[foo] }
- it 'does expect the whitelisted username not to be masked' do
+ it 'masks the password, but not the username' do
is_expected.to eq('http://foo:*****@example.com')
end
end
diff --git a/spec/models/concerns/sidebars/container_with_html_options_spec.rb b/spec/models/concerns/sidebars/container_with_html_options_spec.rb
new file mode 100644
index 00000000000..cc83fc84113
--- /dev/null
+++ b/spec/models/concerns/sidebars/container_with_html_options_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::ContainerWithHtmlOptions do
+ subject do
+ Class.new do
+ include Sidebars::ContainerWithHtmlOptions
+
+ def title
+ 'Foo'
+ end
+ end.new
+ end
+
+ describe '#container_html_options' do
+ it 'includes by default aria-label attribute' do
+ expect(subject.container_html_options).to eq(aria: { label: 'Foo' })
+ end
+ end
+end
diff --git a/spec/models/concerns/sidebars/positionable_list_spec.rb b/spec/models/concerns/sidebars/positionable_list_spec.rb
new file mode 100644
index 00000000000..231aa5295dd
--- /dev/null
+++ b/spec/models/concerns/sidebars/positionable_list_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::PositionableList do
+ subject do
+ Class.new do
+ include Sidebars::PositionableList
+ end.new
+ end
+
+ describe '#add_element' do
+ it 'adds the element to the last position of the list' do
+ list = [1, 2]
+
+ subject.add_element(list, 3)
+
+ expect(list).to eq([1, 2, 3])
+ end
+ end
+
+ describe '#insert_element_before' do
+ let(:user) { build(:user) }
+ let(:list) { [1, user] }
+
+ it 'adds element before the specific element class' do
+ subject.insert_element_before(list, User, 2)
+
+ expect(list).to eq [1, 2, user]
+ end
+
+ context 'when reference element does not exist' do
+ it 'adds the element to the top of the list' do
+ subject.insert_element_before(list, Project, 2)
+
+ expect(list).to eq [2, 1, user]
+ end
+ end
+ end
+
+ describe '#insert_element_after' do
+ let(:user) { build(:user) }
+ let(:list) { [1, user] }
+
+ it 'adds element after the specific element class' do
+ subject.insert_element_after(list, Integer, 2)
+
+ expect(list).to eq [1, 2, user]
+ end
+
+ context 'when reference element does not exist' do
+ it 'adds the element to the end of the list' do
+ subject.insert_element_after(list, Project, 2)
+
+ expect(list).to eq [1, user, 2]
+ end
+ end
+ end
+end
diff --git a/spec/models/concerns/sortable_spec.rb b/spec/models/concerns/sortable_spec.rb
index bbfdaeec64c..cfa00bab025 100644
--- a/spec/models/concerns/sortable_spec.rb
+++ b/spec/models/concerns/sortable_spec.rb
@@ -3,6 +3,31 @@
require 'spec_helper'
RSpec.describe Sortable do
+ describe 'scopes' do
+ describe 'secondary ordering by id' do
+ let(:sorted_relation) { Group.all.order_created_asc }
+
+ def arel_orders(relation)
+ relation.arel.orders
+ end
+
+ it 'allows secondary ordering by id ascending' do
+ orders = arel_orders(sorted_relation.with_order_id_asc)
+
+ expect(orders.map { |arel| arel.expr.name }).to eq(%w(created_at id))
+ expect(orders).to all(be_kind_of(Arel::Nodes::Ascending))
+ end
+
+ it 'allows secondary ordering by id descending' do
+ orders = arel_orders(sorted_relation.with_order_id_desc)
+
+ expect(orders.map { |arel| arel.expr.name }).to eq(%w(created_at id))
+ expect(orders.first).to be_kind_of(Arel::Nodes::Ascending)
+ expect(orders.last).to be_kind_of(Arel::Nodes::Descending)
+ end
+ end
+ end
+
describe '.order_by' do
let(:arel_table) { Group.arel_table }
let(:relation) { Group.all }
diff --git a/spec/models/concerns/subscribable_spec.rb b/spec/models/concerns/subscribable_spec.rb
index 3e52ca5cf63..a60a0a5e26d 100644
--- a/spec/models/concerns/subscribable_spec.rb
+++ b/spec/models/concerns/subscribable_spec.rb
@@ -7,50 +7,54 @@ RSpec.describe Subscribable, 'Subscribable' do
let(:resource) { create(:issue, project: project) }
let(:user_1) { create(:user) }
- describe '#subscribed?' do
+ shared_examples 'returns expected values' do |method|
context 'without user' do
it 'returns false' do
- expect(resource.subscribed?(nil, project)).to be_falsey
+ expect(resource.public_send(method, nil, project)).to be_falsey
end
end
context 'without project' do
it 'returns false when no subscription exists' do
- expect(resource.subscribed?(user_1)).to be_falsey
+ expect(resource.public_send(method, user_1)).to be_falsey
end
- it 'returns true when a subcription exists and subscribed is true' do
+ it 'returns true when a subscription exists and subscribed is true' do
resource.subscriptions.create!(user: user_1, subscribed: true)
- expect(resource.subscribed?(user_1)).to be_truthy
+ expect(resource.public_send(method, user_1)).to be_truthy
end
- it 'returns false when a subcription exists and subscribed is false' do
+ it 'returns false when a subscription exists and subscribed is false' do
resource.subscriptions.create!(user: user_1, subscribed: false)
- expect(resource.subscribed?(user_1)).to be_falsey
+ expect(resource.public_send(method, user_1)).to be_falsey
end
end
context 'with project' do
it 'returns false when no subscription exists' do
- expect(resource.subscribed?(user_1, project)).to be_falsey
+ expect(resource.public_send(method, user_1, project)).to be_falsey
end
- it 'returns true when a subcription exists and subscribed is true' do
+ it 'returns true when a subscription exists and subscribed is true' do
resource.subscriptions.create!(user: user_1, project: project, subscribed: true)
- expect(resource.subscribed?(user_1, project)).to be_truthy
+ expect(resource.public_send(method, user_1, project)).to be_truthy
end
- it 'returns false when a subcription exists and subscribed is false' do
+ it 'returns false when a subscription exists and subscribed is false' do
resource.subscriptions.create!(user: user_1, project: project, subscribed: false)
- expect(resource.subscribed?(user_1, project)).to be_falsey
+ expect(resource.public_send(method, user_1, project)).to be_falsey
end
end
end
+ describe '#subscribed?' do
+ it_behaves_like 'returns expected values', :subscribed?
+ end
+
describe '#subscribers' do
it 'returns [] when no subcribers exists' do
expect(resource.subscribers(project)).to be_empty
@@ -189,4 +193,27 @@ RSpec.describe Subscribable, 'Subscribable' do
it_behaves_like 'setting subscriptions'
end
end
+
+ describe '#lazy_subscription' do
+ let(:labels) { create_list(:group_label, 5) }
+
+ before do
+ labels.each do |label|
+ create(:subscription, :group_label, user: user_1, subscribable: label)
+ end
+ end
+
+ it 'executes only one SQL query' do
+ lazy_queries = ActiveRecord::QueryRecorder.new do
+ labels.each { |label| label.lazy_subscription(user_1) }
+ end
+
+ preloaded_queries = ActiveRecord::QueryRecorder.new do
+ labels.each { |label| label.lazy_subscription(user_1)&.subscribed? }
+ end
+
+ expect(lazy_queries.count).to eq(0)
+ expect(preloaded_queries.count).to eq(1)
+ end
+ end
end
diff --git a/spec/models/concerns/token_authenticatable_spec.rb b/spec/models/concerns/token_authenticatable_spec.rb
index 2df76684d71..4bdb3e0a32a 100644
--- a/spec/models/concerns/token_authenticatable_spec.rb
+++ b/spec/models/concerns/token_authenticatable_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe ApplicationSetting, 'TokenAuthenticatable' do
it 'persists new token as an encrypted string' do
expect(subject).to eq settings.reload.runners_registration_token
expect(settings.read_attribute('runners_registration_token_encrypted'))
- .to eq Gitlab::CryptoHelper.aes256_gcm_encrypt(subject, nonce: Gitlab::CryptoHelper::AES256_GCM_IV_STATIC)
+ .to eq TokenAuthenticatableStrategies::EncryptionHelper.encrypt_token(subject)
expect(settings).to be_persisted
end
@@ -243,7 +243,7 @@ RSpec.describe Ci::Build, 'TokenAuthenticatable' do
it 'persists new token as an encrypted string' do
build.ensure_token!
- encrypted = Gitlab::CryptoHelper.aes256_gcm_encrypt(build.token, nonce: Gitlab::CryptoHelper::AES256_GCM_IV_STATIC)
+ encrypted = TokenAuthenticatableStrategies::EncryptionHelper.encrypt_token(build.token)
expect(build.read_attribute('token_encrypted')).to eq encrypted
end
diff --git a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
index 1e1cd97e410..b311e302a31 100644
--- a/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
+++ b/spec/models/concerns/token_authenticatable_strategies/encrypted_spec.rb
@@ -7,6 +7,10 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
let(:instance) { double(:instance) }
let(:encrypted) do
+ TokenAuthenticatableStrategies::EncryptionHelper.encrypt_token('my-value')
+ end
+
+ let(:encrypted_with_static_iv) do
Gitlab::CryptoHelper.aes256_gcm_encrypt('my-value')
end
@@ -15,12 +19,25 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
describe '#find_token_authenticatable' do
- context 'when using optional strategy' do
+ context 'when encryption is required' do
+ let(:options) { { encrypted: :required } }
+
+ it 'finds the encrypted resource by cleartext' do
+ allow(model).to receive(:find_by)
+ .with('some_field_encrypted' => [encrypted, encrypted_with_static_iv])
+ .and_return('encrypted resource')
+
+ expect(subject.find_token_authenticatable('my-value'))
+ .to eq 'encrypted resource'
+ end
+ end
+
+ context 'when encryption is optional' do
let(:options) { { encrypted: :optional } }
it 'finds the encrypted resource by cleartext' do
allow(model).to receive(:find_by)
- .with('some_field_encrypted' => encrypted)
+ .with('some_field_encrypted' => [encrypted, encrypted_with_static_iv])
.and_return('encrypted resource')
expect(subject.find_token_authenticatable('my-value'))
@@ -33,7 +50,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
.and_return('plaintext resource')
allow(model).to receive(:find_by)
- .with('some_field_encrypted' => encrypted)
+ .with('some_field_encrypted' => [encrypted, encrypted_with_static_iv])
.and_return(nil)
expect(subject.find_token_authenticatable('my-value'))
@@ -41,7 +58,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
end
- context 'when using migration strategy' do
+ context 'when encryption is migrating' do
let(:options) { { encrypted: :migrating } }
it 'finds the cleartext resource by cleartext' do
@@ -65,12 +82,28 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
describe '#get_token' do
- context 'when using optional strategy' do
- let(:options) { { encrypted: :optional } }
+ context 'when encryption is required' do
+ let(:options) { { encrypted: :required } }
+
+ it 'returns decrypted token when an encrypted with static iv token is present' do
+ allow(instance).to receive(:read_attribute)
+ .with('some_field_encrypted')
+ .and_return(Gitlab::CryptoHelper.aes256_gcm_encrypt('my-test-value'))
+
+ expect(subject.get_token(instance)).to eq 'my-test-value'
+ end
+
+ it 'returns decrypted token when an encrypted token is present' do
+ allow(instance).to receive(:read_attribute)
+ .with('some_field_encrypted')
+ .and_return(encrypted)
- before do
- stub_feature_flags(dynamic_nonce_creation: false)
+ expect(subject.get_token(instance)).to eq 'my-value'
end
+ end
+
+ context 'when encryption is optional' do
+ let(:options) { { encrypted: :optional } }
it 'returns decrypted token when an encrypted token is present' do
allow(instance).to receive(:read_attribute)
@@ -80,6 +113,14 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
expect(subject.get_token(instance)).to eq 'my-value'
end
+ it 'returns decrypted token when an encrypted with static iv token is present' do
+ allow(instance).to receive(:read_attribute)
+ .with('some_field_encrypted')
+ .and_return(Gitlab::CryptoHelper.aes256_gcm_encrypt('my-test-value'))
+
+ expect(subject.get_token(instance)).to eq 'my-test-value'
+ end
+
it 'returns the plaintext token when encrypted token is not present' do
allow(instance).to receive(:read_attribute)
.with('some_field_encrypted')
@@ -93,7 +134,7 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
end
- context 'when using migration strategy' do
+ context 'when encryption is migrating' do
let(:options) { { encrypted: :migrating } }
it 'returns cleartext token when an encrypted token is present' do
@@ -123,12 +164,22 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
describe '#set_token' do
- context 'when using optional strategy' do
+ context 'when encryption is required' do
+ let(:options) { { encrypted: :required } }
+
+ it 'writes encrypted token and returns it' do
+ expect(instance).to receive(:[]=)
+ .with('some_field_encrypted', encrypted)
+
+ expect(subject.set_token(instance, 'my-value')).to eq 'my-value'
+ end
+ end
+ context 'when encryption is optional' do
let(:options) { { encrypted: :optional } }
it 'writes encrypted token and removes plaintext token and returns it' do
expect(instance).to receive(:[]=)
- .with('some_field_encrypted', any_args)
+ .with('some_field_encrypted', encrypted)
expect(instance).to receive(:[]=)
.with('some_field', nil)
@@ -136,12 +187,12 @@ RSpec.describe TokenAuthenticatableStrategies::Encrypted do
end
end
- context 'when using migration strategy' do
+ context 'when encryption is migrating' do
let(:options) { { encrypted: :migrating } }
it 'writes encrypted token and writes plaintext token' do
expect(instance).to receive(:[]=)
- .with('some_field_encrypted', any_args)
+ .with('some_field_encrypted', encrypted)
expect(instance).to receive(:[]=)
.with('some_field', 'my-value')
diff --git a/spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb b/spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb
new file mode 100644
index 00000000000..6f322a32a3b
--- /dev/null
+++ b/spec/models/concerns/token_authenticatable_strategies/encryption_helper_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TokenAuthenticatableStrategies::EncryptionHelper do
+ let(:encrypted_token) { described_class.encrypt_token('my-value') }
+
+ describe '.encrypt_token' do
+ it 'encrypts token' do
+ expect(encrypted_token).not_to eq('my-value')
+ end
+ end
+
+ describe '.decrypt_token' do
+ it 'decrypts token with static iv' do
+ expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
+ end
+
+ it 'decrypts token with dynamic iv' do
+ iv = ::Digest::SHA256.hexdigest('my-value').bytes.take(described_class::NONCE_SIZE).pack('c*')
+ token = Gitlab::CryptoHelper.aes256_gcm_encrypt('my-value', nonce: iv)
+ encrypted_token = "#{described_class::DYNAMIC_NONCE_IDENTIFIER}#{token}#{iv}"
+
+ expect(described_class.decrypt_token(encrypted_token)).to eq('my-value')
+ end
+ end
+end
diff --git a/spec/models/deploy_keys_project_spec.rb b/spec/models/deploy_keys_project_spec.rb
index ccc2c64e02c..60f4b9c55b1 100644
--- a/spec/models/deploy_keys_project_spec.rb
+++ b/spec/models/deploy_keys_project_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe DeployKeysProject do
end
it "doesn't destroy the deploy key" do
- subject.destroy
+ subject.destroy!
expect { deploy_key.reload }.not_to raise_error
end
@@ -34,7 +34,7 @@ RSpec.describe DeployKeysProject do
context "when the deploy key is private" do
it "destroys the deploy key" do
- subject.destroy
+ subject.destroy!
expect { deploy_key.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
@@ -49,7 +49,7 @@ RSpec.describe DeployKeysProject do
end
it "doesn't destroy the deploy key" do
- subject.destroy
+ subject.destroy!
expect { deploy_key.reload }.not_to raise_error
end
diff --git a/spec/models/deploy_token_spec.rb b/spec/models/deploy_token_spec.rb
index c7e1d5fc0d5..c8917a7dd65 100644
--- a/spec/models/deploy_token_spec.rb
+++ b/spec/models/deploy_token_spec.rb
@@ -47,7 +47,7 @@ RSpec.describe DeployToken do
describe '#ensure_token' do
it 'ensures a token' do
deploy_token.token = nil
- deploy_token.save
+ deploy_token.save!
expect(deploy_token.token).not_to be_empty
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index 68d12f51d4b..c9544569ad6 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -161,9 +161,9 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::LinkMergeRequestWorker asynchronously' do
+ it 'does not execute Deployments::LinkMergeRequestWorker' do
expect(Deployments::LinkMergeRequestWorker)
- .to receive(:perform_async).with(deployment.id)
+ .not_to receive(:perform_async).with(deployment.id)
deployment.drop!
end
@@ -188,9 +188,9 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::LinkMergeRequestWorker asynchronously' do
+ it 'does not execute Deployments::LinkMergeRequestWorker' do
expect(Deployments::LinkMergeRequestWorker)
- .to receive(:perform_async).with(deployment.id)
+ .not_to receive(:perform_async).with(deployment.id)
deployment.cancel!
end
@@ -497,7 +497,7 @@ RSpec.describe Deployment do
context 'when the SHA for the deployment does not exist in the repo' do
it 'returns false' do
- deployment.update(sha: Gitlab::Git::BLANK_SHA)
+ deployment.update!(sha: Gitlab::Git::BLANK_SHA)
commit = project.commit
expect(deployment.includes_commit?(commit)).to be false
@@ -573,15 +573,39 @@ RSpec.describe Deployment do
end
describe '#previous_deployment' do
- it 'returns the previous deployment' do
- deploy1 = create(:deployment)
- deploy2 = create(
- :deployment,
- project: deploy1.project,
- environment: deploy1.environment
- )
+ using RSpec::Parameterized::TableSyntax
- expect(deploy2.previous_deployment).to eq(deploy1)
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:production) { create(:environment, :production, project: project) }
+ let_it_be(:staging) { create(:environment, :staging, project: project) }
+ let_it_be(:production_deployment_1) { create(:deployment, :success, project: project, environment: production) }
+ let_it_be(:production_deployment_2) { create(:deployment, :success, project: project, environment: production) }
+ let_it_be(:production_deployment_3) { create(:deployment, :failed, project: project, environment: production) }
+ let_it_be(:production_deployment_4) { create(:deployment, :canceled, project: project, environment: production) }
+ let_it_be(:staging_deployment_1) { create(:deployment, :failed, project: project, environment: staging) }
+ let_it_be(:staging_deployment_2) { create(:deployment, :success, project: project, environment: staging) }
+ let_it_be(:production_deployment_5) { create(:deployment, :success, project: project, environment: production) }
+ let_it_be(:staging_deployment_3) { create(:deployment, :success, project: project, environment: staging) }
+
+ where(:pointer, :expected_previous_deployment) do
+ 'production_deployment_1' | nil
+ 'production_deployment_2' | 'production_deployment_1'
+ 'production_deployment_3' | 'production_deployment_2'
+ 'production_deployment_4' | 'production_deployment_2'
+ 'staging_deployment_1' | nil
+ 'staging_deployment_2' | nil
+ 'production_deployment_5' | 'production_deployment_2'
+ 'staging_deployment_3' | 'staging_deployment_2'
+ end
+
+ with_them do
+ it 'returns the previous deployment' do
+ if expected_previous_deployment.nil?
+ expect(send(pointer).previous_deployment).to eq(expected_previous_deployment)
+ else
+ expect(send(pointer).previous_deployment).to eq(send(expected_previous_deployment))
+ end
+ end
end
end
@@ -631,45 +655,6 @@ RSpec.describe Deployment do
end
end
- describe '#previous_environment_deployment' do
- it 'returns the previous deployment of the same environment' do
- deploy1 = create(:deployment, :success)
- deploy2 = create(
- :deployment,
- :success,
- project: deploy1.project,
- environment: deploy1.environment
- )
-
- expect(deploy2.previous_environment_deployment).to eq(deploy1)
- end
-
- it 'ignores deployments that were not successful' do
- deploy1 = create(:deployment, :failed)
- deploy2 = create(
- :deployment,
- :success,
- project: deploy1.project,
- environment: deploy1.environment
- )
-
- expect(deploy2.previous_environment_deployment).to be_nil
- end
-
- it 'ignores deployments for different environments' do
- deploy1 = create(:deployment, :success)
- preprod = create(:environment, project: deploy1.project, name: 'preprod')
- deploy2 = create(
- :deployment,
- :success,
- project: deploy1.project,
- environment: preprod
- )
-
- expect(deploy2.previous_environment_deployment).to be_nil
- end
- end
-
describe '#create_ref' do
let(:deployment) { build(:deployment) }
@@ -796,4 +781,30 @@ RSpec.describe Deployment do
end
end
end
+
+ describe '#update_merge_request_metrics!' do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:environment) { build(:environment, environment_tier, project: project) }
+ let!(:deployment) { create(:deployment, :success, project: project, environment: environment) }
+ let!(:merge_request) { create(:merge_request, :simple, :merged_last_month, project: project) }
+
+ context 'with production environment' do
+ let(:environment_tier) { :production }
+
+ it 'updates merge request metrics for production-grade environment' do
+ expect { deployment.update_merge_request_metrics! }
+ .to change { merge_request.reload.metrics.first_deployed_to_production_at }
+ .from(nil).to(deployment.reload.finished_at)
+ end
+ end
+
+ context 'with staging environment' do
+ let(:environment_tier) { :staging }
+
+ it 'updates merge request metrics for production-grade environment' do
+ expect { deployment.update_merge_request_metrics! }
+ .not_to change { merge_request.reload.metrics.first_deployed_to_production_at }
+ end
+ end
+ end
end
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index e021a6cf6d3..759bb080172 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -302,6 +302,8 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'testing' | described_class.tiers[:testing]
'testing-prd' | described_class.tiers[:testing]
'acceptance-testing' | described_class.tiers[:testing]
+ 'production-test' | described_class.tiers[:testing]
+ 'test-production' | described_class.tiers[:testing]
'QC' | described_class.tiers[:testing]
'gstg' | described_class.tiers[:staging]
'staging' | described_class.tiers[:staging]
@@ -315,6 +317,12 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
'gprd-cny' | described_class.tiers[:production]
'production' | described_class.tiers[:production]
'Production' | described_class.tiers[:production]
+ 'PRODUCTION' | described_class.tiers[:production]
+ 'Production/eu' | described_class.tiers[:production]
+ 'production/eu' | described_class.tiers[:production]
+ 'PRODUCTION/EU' | described_class.tiers[:production]
+ 'productioneu' | described_class.tiers[:production]
+ 'production/www.gitlab.com' | described_class.tiers[:production]
'prod' | described_class.tiers[:production]
'PROD' | described_class.tiers[:production]
'Live' | described_class.tiers[:production]
@@ -444,31 +452,6 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
end
end
- describe '#update_merge_request_metrics?' do
- {
- 'gprd' => false,
- 'prod' => true,
- 'prod-test' => false,
- 'PROD' => true,
- 'production' => true,
- 'production-test' => false,
- 'PRODUCTION' => true,
- 'production/eu' => true,
- 'PRODUCTION/EU' => true,
- 'production/www.gitlab.com' => true,
- 'productioneu' => false,
- 'Production' => true,
- 'Production/eu' => true,
- 'test-production' => false
- }.each do |name, expected_value|
- it "returns #{expected_value} for #{name}" do
- env = create(:environment, name: name)
-
- expect(env.update_merge_request_metrics?).to eq(expected_value), "Expected the name '#{name}' to result in #{expected_value}, but it didn't."
- end
- end
- end
-
describe '#environment_type' do
subject { environment.environment_type }
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 47148c4febc..949e8ec0a72 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -299,11 +299,11 @@ RSpec.describe Event do
end
def visible_to_none_except(*roles)
- visible_to_none.merge(roles.map { |role| [role, true] }.to_h)
+ visible_to_none.merge(roles.to_h { |role| [role, true] })
end
def visible_to_all_except(*roles)
- visible_to_all.merge(roles.map { |role| [role, false] }.to_h)
+ visible_to_all.merge(roles.to_h { |role| [role, false] })
end
shared_examples 'visibility examples' do
@@ -723,7 +723,7 @@ RSpec.describe Event do
note_on_design: true,
note_on_commit: true
}
- valid_target_factories.map do |kind, needs_project|
+ valid_target_factories.to_h do |kind, needs_project|
extra_data = if kind == :merge_request
{ source_project: project }
elsif needs_project
@@ -735,7 +735,7 @@ RSpec.describe Event do
target = kind == :project ? nil : build(kind, **extra_data)
[kind, build(:event, :created, author: project.owner, project: project, target: target)]
- end.to_h
+ end
end
it 'passes a sanity check', :aggregate_failures do
diff --git a/spec/models/experiment_spec.rb b/spec/models/experiment_spec.rb
index 09dd1766acc..1517f426fa3 100644
--- a/spec/models/experiment_spec.rb
+++ b/spec/models/experiment_spec.rb
@@ -244,18 +244,27 @@ RSpec.describe Experiment do
context 'when no existing experiment_subject record exists for the given group' do
it 'creates an experiment_subject record' do
- expect_next(ExperimentSubject).to receive(:update!).with(variant: variant).and_call_original
-
expect { record_group_and_variant! }.to change(ExperimentSubject, :count).by(1)
+ expect(ExperimentSubject.last.variant).to eq(variant.to_s)
end
end
context 'when an existing experiment_subject exists for the given group' do
- context 'but it belonged to a different variant' do
- let!(:experiment_subject) do
- create(:experiment_subject, experiment: experiment, group: group, user: nil, variant: :experimental)
+ let_it_be(:experiment_subject) do
+ create(:experiment_subject, experiment: experiment, group: group, user: nil, variant: :experimental)
+ end
+
+ context 'when it belongs to the same variant' do
+ let(:variant) { :experimental }
+
+ it 'does not initiate a transaction' do
+ expect(ActiveRecord::Base.connection).not_to receive(:transaction)
+
+ subject
end
+ end
+ context 'but it belonged to a different variant' do
it 'updates the variant value' do
expect { record_group_and_variant! }.to change { experiment_subject.reload.variant }.to('control')
end
@@ -299,6 +308,16 @@ RSpec.describe Experiment do
expect { subject }.not_to change(ExperimentUser, :count)
end
+ context 'when group type or context did not change' do
+ let(:context) { {} }
+
+ it 'does not initiate a transaction' do
+ expect(ActiveRecord::Base.connection).not_to receive(:transaction)
+
+ subject
+ end
+ end
+
context 'but the group_type and context has changed' do
let(:group) { :experimental }
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index 24d09d1c035..2f82d8a0bbe 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Group do
+ include ReloadHelpers
+
let!(:group) { create(:group) }
describe 'associations' do
@@ -281,7 +283,7 @@ RSpec.describe Group do
end
describe '#two_factor_authentication_allowed' do
- let_it_be(:group) { create(:group) }
+ let_it_be_with_reload(:group) { create(:group) }
context 'for a parent group' do
it 'is valid' do
@@ -311,6 +313,120 @@ RSpec.describe Group do
end
end
+ context 'traversal_ids on create' do
+ context 'default traversal_ids' do
+ let(:group) { build(:group) }
+
+ before do
+ group.save!
+ group.reload
+ end
+
+ it { expect(group.traversal_ids).to eq [group.id] }
+ end
+
+ context 'has a parent' do
+ let(:parent) { create(:group) }
+ let(:group) { build(:group, parent: parent) }
+
+ before do
+ group.save!
+ reload_models(parent, group)
+ end
+
+ it { expect(parent.traversal_ids).to eq [parent.id] }
+ it { expect(group.traversal_ids).to eq [parent.id, group.id] }
+ end
+
+ context 'has a parent update before save' do
+ let(:parent) { create(:group) }
+ let(:group) { build(:group, parent: parent) }
+ let!(:new_grandparent) { create(:group) }
+
+ before do
+ parent.update!(parent: new_grandparent)
+ group.save!
+ reload_models(parent, group)
+ end
+
+ it 'avoid traversal_ids race condition' do
+ expect(parent.traversal_ids).to eq [new_grandparent.id, parent.id]
+ expect(group.traversal_ids).to eq [new_grandparent.id, parent.id, group.id]
+ end
+ end
+ end
+
+ context 'traversal_ids on update' do
+ context 'parent is updated' do
+ let(:new_parent) { create(:group) }
+
+ subject {group.update!(parent: new_parent, name: 'new name') }
+
+ it_behaves_like 'update on column', :traversal_ids
+ end
+
+ context 'parent is not updated' do
+ subject { group.update!(name: 'new name') }
+
+ it_behaves_like 'no update on column', :traversal_ids
+ end
+ end
+
+ context 'traversal_ids on ancestral update' do
+ context 'update multiple ancestors before save' do
+ let(:parent) { create(:group) }
+ let(:group) { create(:group, parent: parent) }
+ let!(:new_grandparent) { create(:group) }
+ let!(:new_parent) { create(:group) }
+
+ before do
+ group.parent = new_parent
+ new_parent.update!(parent: new_grandparent)
+
+ group.save!
+ reload_models(parent, group, new_grandparent, new_parent)
+ end
+
+ it 'avoids traversal_ids race condition' do
+ expect(parent.traversal_ids).to eq [parent.id]
+ expect(group.traversal_ids).to eq [new_grandparent.id, new_parent.id, group.id]
+ expect(new_grandparent.traversal_ids).to eq [new_grandparent.id]
+ expect(new_parent.traversal_ids).to eq [new_grandparent.id, new_parent.id]
+ end
+ end
+
+ context 'assigning a new parent' do
+ let!(:old_parent) { create(:group) }
+ let!(:new_parent) { create(:group) }
+ let!(:group) { create(:group, parent: old_parent) }
+
+ before do
+ group.update(parent: new_parent)
+ reload_models(old_parent, new_parent, group)
+ end
+
+ it 'updates traversal_ids' do
+ expect(group.traversal_ids).to eq [new_parent.id, group.id]
+ end
+ end
+
+ context 'assigning a new grandparent' do
+ let!(:old_grandparent) { create(:group) }
+ let!(:new_grandparent) { create(:group) }
+ let!(:parent_group) { create(:group, parent: old_grandparent) }
+ let!(:group) { create(:group, parent: parent_group) }
+
+ before do
+ parent_group.update(parent: new_grandparent)
+ end
+
+ it 'updates traversal_ids for all descendants' do
+ expect(parent_group.reload.traversal_ids).to eq [new_grandparent.id, parent_group.id]
+ expect(group.reload.traversal_ids).to eq [new_grandparent.id, parent_group.id, group.id]
+ end
+ end
+ end
+
describe '.without_integration' do
let(:another_group) { create(:group) }
let(:instance_integration) { build(:jira_service, :instance) }
@@ -565,39 +681,178 @@ RSpec.describe Group do
end
end
- describe '#last_blocked_owner?' do
- let(:blocked_user) { create(:user, :blocked) }
+ describe '#member_last_blocked_owner?' do
+ let_it_be(:blocked_user) { create(:user, :blocked) }
+
+ let(:member) { blocked_user.group_members.last }
before do
group.add_user(blocked_user, GroupMember::OWNER)
end
- it { expect(group.last_blocked_owner?(blocked_user)).to be_truthy }
+ context 'when last_blocked_owner is set' do
+ before do
+ expect(group).not_to receive(:members_with_parents)
+ end
+
+ it 'returns true' do
+ member.last_blocked_owner = true
+
+ expect(group.member_last_blocked_owner?(member)).to be(true)
+ end
+
+ it 'returns false' do
+ member.last_blocked_owner = false
+
+ expect(group.member_last_blocked_owner?(member)).to be(false)
+ end
+ end
+
+ context 'when last_blocked_owner is not set' do
+ it { expect(group.member_last_blocked_owner?(member)).to be(true) }
+
+ context 'with another active owner' do
+ before do
+ group.add_user(create(:user), GroupMember::OWNER)
+ end
+
+ it { expect(group.member_last_blocked_owner?(member)).to be(false) }
+ end
+
+ context 'with 2 blocked owners' do
+ before do
+ group.add_user(create(:user, :blocked), GroupMember::OWNER)
+ end
+
+ it { expect(group.member_last_blocked_owner?(member)).to be(false) }
+ end
+
+ context 'with owners from a parent' do
+ before do
+ parent_group = create(:group)
+ create(:group_member, :owner, group: parent_group)
+ group.update(parent: parent_group)
+ end
+
+ it { expect(group.member_last_blocked_owner?(member)).to be(false) }
+ end
+ end
+ end
+
+ context 'when analyzing blocked owners' do
+ let_it_be(:blocked_user) { create(:user, :blocked) }
+
+ describe '#single_blocked_owner?' do
+ context 'when there is only one blocked owner' do
+ before do
+ group.add_user(blocked_user, GroupMember::OWNER)
+ end
+
+ it 'returns true' do
+ expect(group.single_blocked_owner?).to eq(true)
+ end
+ end
+
+ context 'when there are multiple blocked owners' do
+ let_it_be(:blocked_user_2) { create(:user, :blocked) }
+
+ before do
+ group.add_user(blocked_user, GroupMember::OWNER)
+ group.add_user(blocked_user_2, GroupMember::OWNER)
+ end
+
+ it 'returns true' do
+ expect(group.single_blocked_owner?).to eq(false)
+ end
+ end
+
+ context 'when there are no blocked owners' do
+ it 'returns false' do
+ expect(group.single_blocked_owner?).to eq(false)
+ end
+ end
+ end
+
+ describe '#blocked_owners' do
+ let_it_be(:user) { create(:user) }
- context 'with another active owner' do
before do
- group.add_user(create(:user), GroupMember::OWNER)
+ group.add_user(blocked_user, GroupMember::OWNER)
+ group.add_user(user, GroupMember::OWNER)
end
- it { expect(group.last_blocked_owner?(blocked_user)).to be_falsy }
+ it 'has only blocked owners' do
+ expect(group.blocked_owners.map(&:user)).to match([blocked_user])
+ end
end
+ end
+
+ describe '#single_owner?' do
+ let_it_be(:user) { create(:user) }
- context 'with 2 blocked owners' do
+ context 'when there is only one owner' do
before do
- group.add_user(create(:user, :blocked), GroupMember::OWNER)
+ group.add_user(user, GroupMember::OWNER)
end
- it { expect(group.last_blocked_owner?(blocked_user)).to be_falsy }
+ it 'returns true' do
+ expect(group.single_owner?).to eq(true)
+ end
end
- context 'with owners from a parent' do
+ context 'when there are multiple owners' do
+ let_it_be(:user_2) { create(:user) }
+
before do
- parent_group = create(:group)
- create(:group_member, :owner, group: parent_group)
- group.update(parent: parent_group)
+ group.add_user(user, GroupMember::OWNER)
+ group.add_user(user_2, GroupMember::OWNER)
end
- it { expect(group.last_blocked_owner?(blocked_user)).to be_falsy }
+ it 'returns true' do
+ expect(group.single_owner?).to eq(false)
+ end
+ end
+
+ context 'when there are no owners' do
+ it 'returns false' do
+ expect(group.single_owner?).to eq(false)
+ end
+ end
+ end
+
+ describe '#member_last_owner?' do
+ let_it_be(:user) { create(:user) }
+
+ let(:member) { group.members.last }
+
+ before do
+ group.add_user(user, GroupMember::OWNER)
+ end
+
+ context 'when last_owner is set' do
+ before do
+ expect(group).not_to receive(:last_owner?)
+ end
+
+ it 'returns true' do
+ member.last_owner = true
+
+ expect(group.member_last_owner?(member)).to be(true)
+ end
+
+ it 'returns false' do
+ member.last_owner = false
+
+ expect(group.member_last_owner?(member)).to be(false)
+ end
+ end
+
+ context 'when last_owner is not set' do
+ it 'returns true' do
+ expect(group).to receive(:last_owner?).and_call_original
+
+ expect(group.member_last_owner?(member)).to be(true)
+ end
end
end
@@ -1500,30 +1755,6 @@ RSpec.describe Group do
perfectly_matched_variable])
end
end
-
- context 'when :scoped_group_variables feature flag is disabled' do
- before do
- stub_feature_flags(scoped_group_variables: false)
- end
-
- context 'when environment scope is exactly matched' do
- let(:environment_scope) { 'review/name' }
-
- it { is_expected.to contain_exactly(ci_variable) }
- end
-
- context 'when environment scope is partially matched' do
- let(:environment_scope) { 'review/*' }
-
- it { is_expected.to contain_exactly(ci_variable) }
- end
-
- context 'when environment scope does not match' do
- let(:environment_scope) { 'review/*/special' }
-
- it { is_expected.to contain_exactly(ci_variable) }
- end
- end
end
context 'when group has children' do
@@ -1838,24 +2069,28 @@ RSpec.describe Group do
end
end
- def subject_and_reload(*models)
- subject
- models.map(&:reload)
- end
-
describe '#update_shared_runners_setting!' do
context 'enabled' do
subject { group.update_shared_runners_setting!('enabled') }
context 'group that its ancestors have shared runners disabled' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
- let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
+ let_it_be(:parent, reload: true) { create(:group, :shared_runners_disabled) }
+ let_it_be(:group, reload: true) { create(:group, :shared_runners_disabled, parent: parent) }
+ let_it_be(:project, reload: true) { create(:project, shared_runners_enabled: false, group: group) }
- it 'raises error and does not enable shared Runners' do
- expect { subject_and_reload(parent, group, project) }
+ it 'raises exception' do
+ expect { subject }
.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Shared runners enabled cannot be enabled because parent group has shared Runners disabled')
- .and not_change { parent.shared_runners_enabled }
+ end
+
+ it 'does not enable shared runners' do
+ expect do
+ subject rescue nil
+
+ parent.reload
+ group.reload
+ project.reload
+ end.to not_change { parent.shared_runners_enabled }
.and not_change { group.shared_runners_enabled }
.and not_change { project.shared_runners_enabled }
end
@@ -1941,13 +2176,21 @@ RSpec.describe Group do
end
context 'when parent does not allow' do
- let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
- let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
+ let_it_be(:parent, reload: true) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false ) }
+ let_it_be(:group, reload: true) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
- it 'raises error and does not allow descendants to override' do
- expect { subject_and_reload(parent, group) }
+ it 'raises exception' do
+ expect { subject }
.to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
- .and not_change { parent.allow_descendants_override_disabled_shared_runners }
+ end
+
+ it 'does not allow descendants to override' do
+ expect do
+ subject rescue nil
+
+ parent.reload
+ group.reload
+ end.to not_change { parent.allow_descendants_override_disabled_shared_runners }
.and not_change { parent.shared_runners_enabled }
.and not_change { group.allow_descendants_override_disabled_shared_runners }
.and not_change { group.shared_runners_enabled }
@@ -2104,4 +2347,18 @@ RSpec.describe Group do
it_behaves_like 'model with Debian distributions'
end
+
+ describe '.ids_with_disabled_email' do
+ let!(:parent_1) { create(:group, emails_disabled: true) }
+ let!(:child_1) { create(:group, parent: parent_1) }
+
+ let!(:parent_2) { create(:group, emails_disabled: false) }
+ let!(:child_2) { create(:group, parent: parent_2) }
+
+ let!(:other_group) { create(:group, emails_disabled: false) }
+
+ subject(:group_ids_where_email_is_disabled) { described_class.ids_with_disabled_email([child_1, child_2, other_group]) }
+
+ it { is_expected.to eq(Set.new([child_1.id])) }
+ end
end
diff --git a/spec/models/hooks/system_hook_spec.rb b/spec/models/hooks/system_hook_spec.rb
index e56d08c1847..02e630cbf27 100644
--- a/spec/models/hooks/system_hook_spec.rb
+++ b/spec/models/hooks/system_hook_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe SystemHook do
end
it "user_destroy hook" do
- user.destroy
+ user.destroy!
expect(WebMock).to have_requested(:post, system_hook.url).with(
body: /user_destroy/,
@@ -102,7 +102,7 @@ RSpec.describe SystemHook do
end
it 'group destroy hook' do
- group.destroy
+ group.destroy!
expect(WebMock).to have_requested(:post, system_hook.url).with(
body: /group_destroy/,
diff --git a/spec/models/hooks/web_hook_spec.rb b/spec/models/hooks/web_hook_spec.rb
index 3fc1ad6eb0d..413e69fb071 100644
--- a/spec/models/hooks/web_hook_spec.rb
+++ b/spec/models/hooks/web_hook_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe WebHook do
it 'strips :url before saving it' do
hook.url = ' https://example.com '
- hook.save
+ hook.save!
expect(hook.url).to eq('https://example.com')
end
@@ -45,14 +45,14 @@ RSpec.describe WebHook do
it 'gets rid of whitespace' do
hook.push_events_branch_filter = ' branch '
- hook.save
+ hook.save!
expect(hook.push_events_branch_filter).to eq('branch')
end
it 'stores whitespace only as empty' do
hook.push_events_branch_filter = ' '
- hook.save
+ hook.save!
expect(hook.push_events_branch_filter).to eq('')
end
@@ -91,7 +91,7 @@ RSpec.describe WebHook do
web_hook = create(:project_hook)
create_list(:web_hook_log, 3, web_hook: web_hook)
- expect { web_hook.destroy }.to change(web_hook.web_hook_logs, :count).by(-3)
+ expect { web_hook.destroy! }.to change(web_hook.web_hook_logs, :count).by(-3)
end
end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index d89b323f525..781e2aece56 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -6,23 +6,28 @@ RSpec.describe Integration do
let_it_be(:project_1) { create(:project) }
let_it_be(:project_2) { create(:project) }
let_it_be(:project_3) { create(:project) }
+ let_it_be(:project_4) { create(:project) }
let_it_be(:instance_integration) { create(:jira_service, :instance) }
before do
create(:jira_service, project: project_1, inherit_from_id: instance_integration.id)
create(:jira_service, project: project_2, inherit_from_id: nil)
- create(:slack_service, project: project_3, inherit_from_id: nil)
+ create(:jira_service, group: create(:group), project: nil, inherit_from_id: nil)
+ create(:jira_service, project: project_3, inherit_from_id: nil)
+ create(:slack_service, project: project_4, inherit_from_id: nil)
end
describe '.with_custom_integration_for' do
it 'returns projects with custom integrations' do
- expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2)
+ # We use pagination to verify that the group is excluded from the query
+ expect(Project.with_custom_integration_for(instance_integration, 0, 2)).to contain_exactly(project_2, project_3)
+ expect(Project.with_custom_integration_for(instance_integration)).to contain_exactly(project_2, project_3)
end
end
describe '.without_integration' do
it 'returns projects without integration' do
- expect(Project.without_integration(instance_integration)).to contain_exactly(project_3)
+ expect(Project.without_integration(instance_integration)).to contain_exactly(project_4)
end
end
end
diff --git a/spec/models/internal_id_spec.rb b/spec/models/internal_id_spec.rb
index 07f62b9de55..981245627af 100644
--- a/spec/models/internal_id_spec.rb
+++ b/spec/models/internal_id_spec.rb
@@ -97,6 +97,25 @@ RSpec.describe InternalId do
expect(subject).to eq(1)
end
end
+
+ context 'when executed outside of transaction' do
+ it 'increments counter with in_transaction: "false"' do
+ expect(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :generate, usage: 'issues', in_transaction: 'false').and_call_original
+
+ subject
+ end
+ end
+
+ context 'when executed within transaction' do
+ it 'increments counter with in_transaction: "true"' do
+ expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :generate, usage: 'issues', in_transaction: 'true').and_call_original
+
+ InternalId.transaction { subject }
+ end
+ end
end
describe '.reset' do
@@ -134,6 +153,29 @@ RSpec.describe InternalId do
described_class.generate_next(issue, scope, usage, init)
end
end
+
+ context 'when executed outside of transaction' do
+ let(:value) { 2 }
+
+ it 'increments counter with in_transaction: "false"' do
+ expect(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :reset, usage: 'issues', in_transaction: 'false').and_call_original
+
+ subject
+ end
+ end
+
+ context 'when executed within transaction' do
+ let(:value) { 2 }
+
+ it 'increments counter with in_transaction: "true"' do
+ expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :reset, usage: 'issues', in_transaction: 'true').and_call_original
+
+ InternalId.transaction { subject }
+ end
+ end
end
describe '.track_greatest' do
@@ -183,6 +225,25 @@ RSpec.describe InternalId do
expect(subject).to eq(value)
end
end
+
+ context 'when executed outside of transaction' do
+ it 'increments counter with in_transaction: "false"' do
+ expect(ActiveRecord::Base.connection).to receive(:transaction_open?) { false }
+ expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :track_greatest, usage: 'issues', in_transaction: 'false').and_call_original
+
+ subject
+ end
+ end
+
+ context 'when executed within transaction' do
+ it 'increments counter with in_transaction: "true"' do
+ expect(InternalId::InternalIdGenerator.internal_id_transactions_total).to receive(:increment)
+ .with(operation: :track_greatest, usage: 'issues', in_transaction: 'true').and_call_original
+
+ InternalId.transaction { subject }
+ end
+ end
end
describe '#increment_and_save!' do
diff --git a/spec/models/issue/metrics_spec.rb b/spec/models/issue/metrics_spec.rb
index 1d3c09a48b7..18b0a46c928 100644
--- a/spec/models/issue/metrics_spec.rb
+++ b/spec/models/issue/metrics_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe Issue::Metrics do
context "milestones" do
it "records the first time an issue is associated with a milestone" do
time = Time.current
- travel_to(time) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time) { subject.update!(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -47,9 +47,9 @@ RSpec.describe Issue::Metrics do
it "does not record the second time an issue is associated with a milestone" do
time = Time.current
- travel_to(time) { subject.update(milestone: create(:milestone, project: project)) }
- travel_to(time + 2.hours) { subject.update(milestone: nil) }
- travel_to(time + 6.hours) { subject.update(milestone: create(:milestone, project: project)) }
+ travel_to(time) { subject.update!(milestone: create(:milestone, project: project)) }
+ travel_to(time + 2.hours) { subject.update!(milestone: nil) }
+ travel_to(time + 6.hours) { subject.update!(milestone: create(:milestone, project: project)) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -61,7 +61,7 @@ RSpec.describe Issue::Metrics do
it "records the first time an issue is associated with a list label" do
list_label = create(:list).label
time = Time.current
- travel_to(time) { subject.update(label_ids: [list_label.id]) }
+ travel_to(time) { subject.update!(label_ids: [list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
@@ -71,9 +71,9 @@ RSpec.describe Issue::Metrics do
it "does not record the second time an issue is associated with a list label" do
time = Time.current
first_list_label = create(:list).label
- travel_to(time) { subject.update(label_ids: [first_list_label.id]) }
+ travel_to(time) { subject.update!(label_ids: [first_list_label.id]) }
second_list_label = create(:list).label
- travel_to(time + 5.hours) { subject.update(label_ids: [second_list_label.id]) }
+ travel_to(time + 5.hours) { subject.update!(label_ids: [second_list_label.id]) }
metrics = subject.metrics
expect(metrics).to be_present
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index a3e245f4def..23caf3647c3 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -85,18 +85,14 @@ RSpec.describe Issue do
describe 'callbacks' do
describe '#ensure_metrics' do
it 'creates metrics after saving' do
- issue = create(:issue, project: reusable_project)
-
- expect(issue.metrics).to be_persisted
+ expect(subject.metrics).to be_persisted
expect(Issue::Metrics.count).to eq(1)
end
it 'does not create duplicate metrics for an issue' do
- issue = create(:issue, project: reusable_project)
+ subject.close!
- issue.close!
-
- expect(issue.metrics).to be_persisted
+ expect(subject.metrics).to be_persisted
expect(Issue::Metrics.count).to eq(1)
end
@@ -105,6 +101,20 @@ RSpec.describe Issue do
create(:issue, project: reusable_project)
end
+
+ context 'when metrics record is missing' do
+ before do
+ subject.metrics.delete
+ subject.reload
+ subject.metrics # make sure metrics association is cached (currently nil)
+ end
+
+ it 'creates the metrics record' do
+ subject.update!(title: 'title')
+
+ expect(subject.metrics).to be_present
+ end
+ end
end
describe '#record_create_action' do
@@ -327,7 +337,7 @@ RSpec.describe Issue do
end
it 'returns true for a user that is the author of an issue' do
- issue.update(author: user)
+ issue.update!(author: user)
expect(issue.assignee_or_author?(user)).to be_truthy
end
@@ -665,7 +675,7 @@ RSpec.describe Issue do
expect(user2.assigned_open_issues_count).to eq(0)
issue.assignees = [user2]
- issue.save
+ issue.save!
expect(user1.assigned_open_issues_count).to eq(0)
expect(user2.assigned_open_issues_count).to eq(1)
@@ -897,7 +907,7 @@ RSpec.describe Issue do
let(:private_project) { build(:project, :private)}
before do
- issue.update(project: private_project) # move issue to private project
+ issue.update!(project: private_project) # move issue to private project
end
shared_examples 'issue visible if user has guest access' do
@@ -1034,7 +1044,7 @@ RSpec.describe Issue do
with_them do
it 'checks for spam on issues that can be seen anonymously' do
project = reusable_project
- project.update(visibility_level: visibility_level)
+ project.update!(visibility_level: visibility_level)
issue = create(:issue, project: project, confidential: confidential, description: 'original description')
issue.assign_attributes(new_attributes)
@@ -1048,7 +1058,7 @@ RSpec.describe Issue do
it 'refreshes the number of open issues of the project' do
project = subject.project
- expect { subject.destroy }
+ expect { subject.destroy! }
.to change { project.open_issues_count }.from(1).to(0)
end
end
@@ -1263,8 +1273,8 @@ RSpec.describe Issue do
let_it_be(:issue) { create(:issue) }
it 'returns a list of emails' do
- participant1 = issue.issue_email_participants.create(email: 'a@gitlab.com')
- participant2 = issue.issue_email_participants.create(email: 'b@gitlab.com')
+ participant1 = issue.issue_email_participants.create!(email: 'a@gitlab.com')
+ participant2 = issue.issue_email_participants.create!(email: 'b@gitlab.com')
expect(issue.email_participants_emails).to contain_exactly(participant1.email, participant2.email)
end
diff --git a/spec/models/key_spec.rb b/spec/models/key_spec.rb
index 3d33a39d353..0cb20efcb0a 100644
--- a/spec/models/key_spec.rb
+++ b/spec/models/key_spec.rb
@@ -75,6 +75,28 @@ RSpec.describe Key, :mailer do
.to eq([key_3, key_1, key_2])
end
end
+
+ context 'expiration scopes' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:expired_today_not_notified) { create(:key, expires_at: Time.current, user: user) }
+ let_it_be(:expired_today_already_notified) { create(:key, expires_at: Time.current, user: user, expiry_notification_delivered_at: Time.current) }
+ let_it_be(:expired_yesterday) { create(:key, expires_at: 1.day.ago, user: user) }
+ let_it_be(:expiring_soon_unotified) { create(:key, expires_at: 3.days.from_now, user: user) }
+ let_it_be(:expiring_soon_notified) { create(:key, expires_at: 4.days.from_now, user: user, before_expiry_notification_delivered_at: Time.current) }
+ let_it_be(:future_expiry) { create(:key, expires_at: 1.month.from_now, user: user) }
+
+ describe '.expired_today_and_not_notified' do
+ it 'returns keys that expire today' do
+ expect(described_class.expired_today_and_not_notified).to contain_exactly(expired_today_not_notified)
+ end
+ end
+
+ describe '.expiring_soon_and_not_notified' do
+ it 'returns keys that will expire soon' do
+ expect(described_class.expiring_soon_and_not_notified).to contain_exactly(expiring_soon_unotified)
+ end
+ end
+ end
end
context "validation of uniqueness (based on fingerprint uniqueness)" do
diff --git a/spec/models/label_spec.rb b/spec/models/label_spec.rb
index e1abfd9d8e5..14acaf11ca4 100644
--- a/spec/models/label_spec.rb
+++ b/spec/models/label_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Label do
+ let_it_be(:project) { create(:project) }
+
describe 'modules' do
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Subscribable) }
@@ -44,6 +46,22 @@ RSpec.describe Label do
end
end
+ describe 'scopes' do
+ describe '.on_board' do
+ let(:board) { create(:board, project: project) }
+ let!(:list1) { create(:list, board: board, label: development) }
+ let!(:list2) { create(:list, board: board, label: testing) }
+
+ let!(:development) { create(:label, project: project, name: 'Development') }
+ let!(:testing) { create(:label, project: project, name: 'Testing') }
+ let!(:regression) { create(:label, project: project, name: 'Regression') }
+
+ it 'returns only the board labels' do
+ expect(described_class.on_board(board.id)).to match_array([development, testing])
+ end
+ end
+ end
+
describe '#color' do
it 'strips color' do
label = described_class.new(color: ' #abcdef ')
@@ -92,9 +110,7 @@ RSpec.describe Label do
end
describe 'priorization' do
- subject(:label) { create(:label) }
-
- let(:project) { label.project }
+ subject(:label) { create(:label, project: project) }
describe '#prioritize!' do
context 'when label is not prioritized' do
diff --git a/spec/models/list_spec.rb b/spec/models/list_spec.rb
index ad07ee1115b..67a76c305f6 100644
--- a/spec/models/list_spec.rb
+++ b/spec/models/list_spec.rb
@@ -17,17 +17,4 @@ RSpec.describe List do
it { is_expected.to validate_presence_of(:label) }
it { is_expected.to validate_presence_of(:list_type) }
end
-
- describe '.without_types' do
- it 'exclude lists of given types' do
- board = create(:list, list_type: :label).board
- # closed list is created by default
- backlog_list = create(:list, list_type: :backlog, board: board)
-
- exclude_type = [described_class.list_types[:label], described_class.list_types[:closed]]
-
- lists = described_class.without_types(exclude_type)
- expect(lists.where(board: board)).to match_array([backlog_list])
- end
- end
end
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index c41f466456f..5f3a67b52ba 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -413,6 +413,24 @@ RSpec.describe Member do
it { is_expected.not_to include @blocked_developer }
it { is_expected.not_to include @member_with_minimal_access }
end
+
+ describe '.distinct_on_user_with_max_access_level' do
+ let_it_be(:other_group) { create(:group) }
+ let_it_be(:member_with_lower_access_level) { create(:group_member, :developer, group: other_group, user: @owner_user) }
+
+ subject { described_class.default_scoped.distinct_on_user_with_max_access_level.to_a }
+
+ it { is_expected.not_to include member_with_lower_access_level }
+ it { is_expected.to include @owner }
+ it { is_expected.to include @maintainer }
+ it { is_expected.to include @invited_member }
+ it { is_expected.to include @accepted_invite_member }
+ it { is_expected.to include @requested_member }
+ it { is_expected.to include @accepted_request_member }
+ it { is_expected.to include @blocked_maintainer }
+ it { is_expected.to include @blocked_developer }
+ it { is_expected.to include @member_with_minimal_access }
+ end
end
describe "Delegate methods" do
@@ -420,6 +438,16 @@ RSpec.describe Member do
it { is_expected.to respond_to(:user_email) }
end
+ describe '.valid_email?' do
+ it 'is a valid email format' do
+ expect(described_class.valid_email?('foo')).to eq(false)
+ end
+
+ it 'is not a valid email format' do
+ expect(described_class.valid_email?('foo@example.com')).to eq(true)
+ end
+ end
+
describe '.add_user' do
%w[project group].each do |source_type|
context "when source is a #{source_type}" do
diff --git a/spec/models/members/group_member_spec.rb b/spec/models/members/group_member_spec.rb
index 3d3ed6fc54a..908bb9f91a3 100644
--- a/spec/models/members/group_member_spec.rb
+++ b/spec/models/members/group_member_spec.rb
@@ -66,6 +66,12 @@ RSpec.describe GroupMember do
it_behaves_like 'members notifications', :group
+ describe '#namespace_id' do
+ subject { build(:group_member, source_id: 1).namespace_id }
+
+ it { is_expected.to eq 1 }
+ end
+
describe '#real_source_type' do
subject { create(:group_member).real_source_type }
diff --git a/spec/models/members/last_group_owner_assigner_spec.rb b/spec/models/members/last_group_owner_assigner_spec.rb
new file mode 100644
index 00000000000..3c9a7a11555
--- /dev/null
+++ b/spec/models/members/last_group_owner_assigner_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Members::LastGroupOwnerAssigner do
+ describe "#execute" do
+ let_it_be(:user, reload: true) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:group_member) { user.members.last }
+
+ subject(:assigner) { described_class.new(group, [group_member]) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it "avoids extra database queries utilizing memoization", :aggregate_failures do
+ control = ActiveRecord::QueryRecorder.new { assigner.execute }
+ count_queries = control.occurrences_by_line_method.first[1][:occurrences].find_all { |i| i.include?('SELECT COUNT') }
+
+ expect(control.count).to be <= 5
+ expect(count_queries.count).to eq(0)
+ end
+
+ context "when there are unblocked owners" do
+ context "with one unblocked owner" do
+ specify do
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(true)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(false)
+ end
+ end
+
+ context "with multiple unblocked owners" do
+ let_it_be(:unblocked_owner_member) { create(:group_member, :owner, source: group) }
+
+ specify do
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(false)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(false)
+ end
+
+ it "has many members passed" do
+ assigner = described_class.new(group, [unblocked_owner_member, group_member])
+
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(false)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(false)
+ .and change(unblocked_owner_member, :last_owner)
+ .from(nil).to(false)
+ .and change(unblocked_owner_member, :last_blocked_owner)
+ .from(nil).to(false)
+ end
+ end
+ end
+
+ context "when there are blocked owners" do
+ before do
+ user.block!
+ end
+
+ context "with one blocked owner" do
+ specify do
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(false)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(true)
+ end
+ end
+
+ context "with multiple unblocked owners" do
+ specify do
+ create_list(:group_member, 2, :owner, source: group)
+
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(false)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(false)
+ end
+ end
+
+ context "with multiple blocked owners" do
+ specify do
+ create(:group_member, :owner, :blocked, source: group)
+
+ expect { assigner.execute }.to change(group_member, :last_owner)
+ .from(nil).to(false)
+ .and change(group_member, :last_blocked_owner)
+ .from(nil).to(false)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/members/project_member_spec.rb b/spec/models/members/project_member_spec.rb
index 388d04c8012..ce3e86f964d 100644
--- a/spec/models/members/project_member_spec.rb
+++ b/spec/models/members/project_member_spec.rb
@@ -13,6 +13,10 @@ RSpec.describe ProjectMember do
it { is_expected.to validate_inclusion_of(:access_level).in_array(Gitlab::Access.values) }
end
+ describe 'delegations' do
+ it { is_expected.to delegate_method(:namespace_id).to(:project) }
+ end
+
describe '.access_level_roles' do
it 'returns Gitlab::Access.options' do
expect(described_class.access_level_roles).to eq(Gitlab::Access.options)
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 8c7289adbcc..4b46c98117f 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -186,39 +186,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:multiline_commits) { subject.commits.select(&is_multiline) }
let(:singleline_commits) { subject.commits.reject(&is_multiline) }
- context 'when the total number of commits is safe' do
- it 'returns the oldest multiline commit message' do
- expect(subject.default_squash_commit_message).to eq(multiline_commits.last.message)
- end
- end
-
- context 'when the total number of commits is big' do
- let(:safe_number) { 20 }
-
- before do
- stub_const('MergeRequestDiff::COMMITS_SAFE_SIZE', safe_number)
- end
-
- it 'returns the oldest multiline commit message from safe number of commits' do
- expect(subject.default_squash_commit_message).to eq(
- "remove emtpy file.(beacase git ignore empty file)\nadd whitespace test file.\n"
- )
- end
- end
-
- it 'returns the merge request title if there are no multiline commits' do
- expect(subject).to receive(:commits).and_return(
- CommitCollection.new(project, singleline_commits)
- )
-
- expect(subject.default_squash_commit_message).to eq(subject.title)
- end
-
- it 'does not return commit messages from multiline merge commits' do
- collection = CommitCollection.new(project, multiline_commits).enrich!
-
- expect(collection.commits).to all( receive(:merge_commit?).and_return(true) )
- expect(subject).to receive(:commits).and_return(collection)
+ it 'returns the merge request title' do
expect(subject.default_squash_commit_message).to eq(subject.title)
end
end
@@ -420,6 +388,19 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe '.by_merge_or_squash_commit_sha' do
+ subject { described_class.by_merge_or_squash_commit_sha([sha1, sha2]) }
+
+ let(:sha1) { '123abc' }
+ let(:sha2) { '456abc' }
+ let(:mr1) { create(:merge_request, :merged, squash_commit_sha: sha1) }
+ let(:mr2) { create(:merge_request, :merged, merge_commit_sha: sha2) }
+
+ it 'returns merge requests that match the given squash and merge commits' do
+ is_expected.to include(mr1, mr2)
+ end
+ end
+
describe '.by_related_commit_sha' do
subject { described_class.by_related_commit_sha(sha) }
@@ -462,16 +443,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
- describe '.by_cherry_pick_sha' do
- it 'returns merge requests that match the given merge commit' do
- note = create(:track_mr_picking_note, commit_id: '456abc')
-
- create(:track_mr_picking_note, project: create(:project), commit_id: '456def')
-
- expect(described_class.by_cherry_pick_sha('456abc')).to eq([note.noteable])
- end
- end
-
describe '.in_projects' do
it 'returns the merge requests for a set of projects' do
expect(described_class.in_projects(Project.all)).to eq([subject])
@@ -1353,6 +1324,24 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(subject.work_in_progress?).to eq false
end
+ it 'does not detect Draft: in the middle of the title' do
+ subject.title = 'Something with Draft: in the middle'
+
+ expect(subject.work_in_progress?).to eq false
+ end
+
+ it 'does not detect WIP at the end of the title' do
+ subject.title = 'Something ends with WIP'
+
+ expect(subject.work_in_progress?).to eq false
+ end
+
+ it 'does not detect Draft at the end of the title' do
+ subject.title = 'Something ends with Draft'
+
+ expect(subject.work_in_progress?).to eq false
+ end
+
it "doesn't detect WIP for words starting with WIP" do
subject.title = "Wipwap #{subject.title}"
expect(subject.work_in_progress?).to eq false
@@ -1363,6 +1352,11 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(subject.work_in_progress?).to eq false
end
+ it "doesn't detect draft for words containing with draft" do
+ subject.title = "Drafting #{subject.title}"
+ expect(subject.work_in_progress?).to eq false
+ end
+
it "doesn't detect WIP by default" do
expect(subject.work_in_progress?).to eq false
end
@@ -1393,6 +1387,42 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(subject.work_in_progress?).to eq false
end
end
+
+ it 'removes only WIP prefix from the MR title' do
+ subject.title = 'WIP: Implement feature called WIP'
+
+ expect(subject.wipless_title).to eq 'Implement feature called WIP'
+ end
+
+ it 'removes only draft prefix from the MR title' do
+ subject.title = 'Draft: Implement feature called draft'
+
+ expect(subject.wipless_title).to eq 'Implement feature called draft'
+ end
+
+ it 'does not remove WIP in the middle of the title' do
+ subject.title = 'Something with WIP in the middle'
+
+ expect(subject.wipless_title).to eq subject.title
+ end
+
+ it 'does not remove Draft in the middle of the title' do
+ subject.title = 'Something with Draft in the middle'
+
+ expect(subject.wipless_title).to eq subject.title
+ end
+
+ it 'does not remove WIP at the end of the title' do
+ subject.title = 'Something ends with WIP'
+
+ expect(subject.wipless_title).to eq subject.title
+ end
+
+ it 'does not remove Draft at the end of the title' do
+ subject.title = 'Something ends with Draft'
+
+ expect(subject.wipless_title).to eq subject.title
+ end
end
describe "#wip_title" do
@@ -2023,14 +2053,6 @@ RSpec.describe MergeRequest, factory_default: :keep do
let(:merge_request) { create(:merge_request, :with_codequality_reports, source_project: project) }
it { is_expected.to be_truthy }
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(codequality_backend_comparison: false)
- end
-
- it { is_expected.to be_falsey }
- end
end
context 'when head pipeline does not have a codequality report' do
@@ -3857,17 +3879,7 @@ RSpec.describe MergeRequest, factory_default: :keep do
context 'when service class is Ci::CompareCodequalityReportsService' do
let(:service_class) { 'Ci::CompareCodequalityReportsService' }
- context 'when feature flag is enabled' do
- it { is_expected.to be_truthy }
- end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(codequality_backend_comparison: false)
- end
-
- it { is_expected.to be_falsey }
- end
+ it { is_expected.to be_truthy }
end
context 'when service class is different' do
diff --git a/spec/models/namespace/admin_note_spec.rb b/spec/models/namespace/admin_note_spec.rb
new file mode 100644
index 00000000000..65ba1f61416
--- /dev/null
+++ b/spec/models/namespace/admin_note_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Namespace::AdminNote, type: :model do
+ let!(:namespace) { create(:namespace) }
+
+ describe 'associations' do
+ it { is_expected.to belong_to :namespace }
+ end
+
+ describe 'validations' do
+ it { is_expected.to validate_presence_of(:namespace) }
+ it { is_expected.to validate_length_of(:note).is_at_most(1000) }
+ end
+end
diff --git a/spec/models/namespace/traversal_hierarchy_spec.rb b/spec/models/namespace/traversal_hierarchy_spec.rb
index 83e6d704640..b166d541171 100644
--- a/spec/models/namespace/traversal_hierarchy_spec.rb
+++ b/spec/models/namespace/traversal_hierarchy_spec.rb
@@ -43,21 +43,63 @@ RSpec.describe Namespace::TraversalHierarchy, type: :model do
end
end
+ shared_examples 'locked update query' do
+ it 'locks query with FOR UPDATE' do
+ qr = ActiveRecord::QueryRecorder.new do
+ subject
+ end
+ expect(qr.count).to eq 1
+ expect(qr.log.first).to match /FOR UPDATE/
+ end
+ end
+
describe '#incorrect_traversal_ids' do
- subject { described_class.new(root).incorrect_traversal_ids }
+ let!(:hierarchy) { described_class.new(root) }
+
+ subject { hierarchy.incorrect_traversal_ids }
+
+ before do
+ Namespace.update_all(traversal_ids: [])
+ end
it { is_expected.to match_array Namespace.all }
+
+ context 'when lock is true' do
+ subject { hierarchy.incorrect_traversal_ids(lock: true).load }
+
+ it_behaves_like 'locked update query'
+ end
end
describe '#sync_traversal_ids!' do
- let(:hierarchy) { described_class.new(root) }
+ let!(:hierarchy) { described_class.new(root) }
- before do
- hierarchy.sync_traversal_ids!
- root.reload
- end
+ subject { hierarchy.sync_traversal_ids! }
- it_behaves_like 'hierarchy with traversal_ids'
it { expect(hierarchy.incorrect_traversal_ids).to be_empty }
+
+ it_behaves_like 'hierarchy with traversal_ids'
+ it_behaves_like 'locked update query'
+
+ context 'when deadlocked' do
+ before do
+ connection_double = double(:connection)
+
+ allow(Namespace).to receive(:connection).and_return(connection_double)
+ allow(connection_double).to receive(:exec_query) { raise ActiveRecord::Deadlocked.new }
+ end
+
+ it { expect { subject }.to raise_error(ActiveRecord::Deadlocked) }
+
+ it 'increment db_deadlock counter' do
+ expect { subject rescue nil }.to change { db_deadlock_total('Namespace#sync_traversal_ids!') }.by(1)
+ end
+ end
+ end
+
+ def db_deadlock_total(source)
+ Gitlab::Metrics
+ .counter(:db_deadlock, 'Counts the times we have deadlocked in the database')
+ .get(source: source)
end
end
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index 59b7510051f..14d28be8d43 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -66,5 +66,36 @@ RSpec.describe NamespaceSetting, type: :model do
end
end
end
+
+ describe '#allow_resource_access_token_creation_for_group' do
+ let(:settings) { group.namespace_settings }
+
+ context 'group is top-level group' do
+ let(:group) { create(:group) }
+
+ it 'is valid' do
+ settings.resource_access_token_creation_allowed = false
+
+ expect(settings).to be_valid
+ end
+ end
+
+ context 'group is a subgroup' do
+ let(:group) { create(:group, parent: create(:group)) }
+
+ it 'is invalid when resource access token creation is not enabled' do
+ settings.resource_access_token_creation_allowed = false
+
+ expect(settings).to be_invalid
+ expect(group.namespace_settings.errors.messages[:resource_access_token_creation_allowed]).to include("is not allowed since the group is not top-level group.")
+ end
+
+ it 'is valid when resource access tokens are enabled' do
+ settings.resource_access_token_creation_allowed = true
+
+ expect(settings).to be_valid
+ end
+ end
+ end
end
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 65d787d334b..96ecc9836d4 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe Namespace do
it { is_expected.to have_many :custom_emoji }
it { is_expected.to have_one :package_setting_relation }
it { is_expected.to have_one :onboarding_progress }
+ it { is_expected.to have_one :admin_note }
end
describe 'validations' do
@@ -154,6 +155,33 @@ RSpec.describe Namespace do
end
end
+ describe 'scopes' do
+ let_it_be(:namespace1) { create(:group, name: 'Namespace 1', path: 'namespace-1') }
+ let_it_be(:namespace2) { create(:group, name: 'Namespace 2', path: 'namespace-2') }
+ let_it_be(:namespace1sub) { create(:group, name: 'Sub Namespace', path: 'sub-namespace', parent: namespace1) }
+ let_it_be(:namespace2sub) { create(:group, name: 'Sub Namespace', path: 'sub-namespace', parent: namespace2) }
+
+ describe '.by_parent' do
+ it 'includes correct namespaces' do
+ expect(described_class.by_parent(namespace1.id)).to eq([namespace1sub])
+ expect(described_class.by_parent(namespace2.id)).to eq([namespace2sub])
+ expect(described_class.by_parent(nil)).to match_array([namespace, namespace1, namespace2])
+ end
+ end
+
+ describe '.filter_by_path' do
+ it 'includes correct namespaces' do
+ expect(described_class.filter_by_path(namespace1.path)).to eq([namespace1])
+ expect(described_class.filter_by_path(namespace2.path)).to eq([namespace2])
+ expect(described_class.filter_by_path('sub-namespace')).to match_array([namespace1sub, namespace2sub])
+ end
+
+ it 'filters case-insensitive' do
+ expect(described_class.filter_by_path(namespace1.path.upcase)).to eq([namespace1])
+ end
+ end
+ end
+
describe 'delegate' do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.with_arguments(allow_nil: true) }
it { is_expected.to delegate_method(:avatar_url).to(:owner).with_arguments(allow_nil: true) }
@@ -168,27 +196,19 @@ RSpec.describe Namespace do
describe 'inclusions' do
it { is_expected.to include_module(Gitlab::VisibilityLevel) }
it { is_expected.to include_module(Namespaces::Traversal::Recursive) }
+ it { is_expected.to include_module(Namespaces::Traversal::Linear) }
end
- describe 'callbacks' do
- describe 'before_save :ensure_delayed_project_removal_assigned_to_namespace_settings' do
- it 'sets the matching value in namespace_settings' do
- expect { namespace.update!(delayed_project_removal: true) }.to change {
- namespace.namespace_settings.delayed_project_removal
- }.from(false).to(true)
- end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(migrate_delayed_project_removal: false)
- end
+ context 'traversal_ids on create' do
+ context 'default traversal_ids' do
+ let(:namespace) { build(:namespace) }
- it 'does not set the matching value in namespace_settings' do
- expect { namespace.update!(delayed_project_removal: true) }.not_to change {
- namespace.namespace_settings.delayed_project_removal
- }
- end
+ before do
+ namespace.save!
+ namespace.reload
end
+
+ it { expect(namespace.traversal_ids).to eq [namespace.id] }
end
end
@@ -859,7 +879,51 @@ RSpec.describe Namespace do
end
end
- it_behaves_like 'recursive namespace traversal'
+ describe '#use_traversal_ids?' do
+ let_it_be(:namespace) { build(:namespace) }
+
+ subject { namespace.use_traversal_ids? }
+
+ context 'when use_traversal_ids feature flag is true' do
+ before do
+ stub_feature_flags(use_traversal_ids: true)
+ end
+
+ it { is_expected.to eq true }
+ end
+
+ context 'when use_traversal_ids feature flag is false' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it { is_expected.to eq false }
+ end
+ end
+
+ context 'when use_traversal_ids feature flag is true' do
+ it_behaves_like 'namespace traversal'
+
+ describe '#self_and_descendants' do
+ subject { namespace.self_and_descendants }
+
+ it { expect(subject.to_sql).to include 'traversal_ids @>' }
+ end
+ end
+
+ context 'when use_traversal_ids feature flag is false' do
+ before do
+ stub_feature_flags(use_traversal_ids: false)
+ end
+
+ it_behaves_like 'namespace traversal'
+
+ describe '#self_and_descendants' do
+ subject { namespace.self_and_descendants }
+
+ it { expect(subject.to_sql).not_to include 'traversal_ids @>' }
+ end
+ end
describe '#users_with_descendants' do
let(:user_a) { create(:user) }
@@ -897,24 +961,10 @@ RSpec.describe Namespace do
it { expect(namespace.all_projects.to_a).to match_array([project2, project1]) }
it { expect(child.all_projects.to_a).to match_array([project2]) }
- context 'when recursive_namespace_lookup_as_inner_join feature flag is on' do
- before do
- stub_feature_flags(recursive_namespace_lookup_as_inner_join: true)
- end
+ it 'queries for the namespace and its descendants' do
+ expect(Project).to receive(:where).with(namespace: [namespace, child])
- it 'queries for the namespace and its descendants' do
- expect(namespace.all_projects).to match_array([project1, project2])
- end
- end
-
- context 'when recursive_namespace_lookup_as_inner_join feature flag is off' do
- before do
- stub_feature_flags(recursive_namespace_lookup_as_inner_join: false)
- end
-
- it 'queries for the namespace and its descendants' do
- expect(namespace.all_projects).to match_array([project1, project2])
- end
+ namespace.all_projects
end
end
@@ -1085,21 +1135,42 @@ RSpec.describe Namespace do
end
describe '#root_ancestor' do
- let!(:root_group) { create(:group) }
+ context 'with persisted root group' do
+ let!(:root_group) { create(:group) }
- it 'returns root_ancestor for root group without a query' do
- expect { root_group.root_ancestor }.not_to exceed_query_limit(0)
+ it 'returns root_ancestor for root group without a query' do
+ expect { root_group.root_ancestor }.not_to exceed_query_limit(0)
+ end
+
+ it 'returns the top most ancestor' do
+ nested_group = create(:group, parent: root_group)
+ deep_nested_group = create(:group, parent: nested_group)
+ very_deep_nested_group = create(:group, parent: deep_nested_group)
+
+ expect(root_group.root_ancestor).to eq(root_group)
+ expect(nested_group.root_ancestor).to eq(root_group)
+ expect(deep_nested_group.root_ancestor).to eq(root_group)
+ expect(very_deep_nested_group.root_ancestor).to eq(root_group)
+ end
end
- it 'returns the top most ancestor' do
- nested_group = create(:group, parent: root_group)
- deep_nested_group = create(:group, parent: nested_group)
- very_deep_nested_group = create(:group, parent: deep_nested_group)
+ context 'with not persisted root group' do
+ let!(:root_group) { build(:group) }
- expect(root_group.root_ancestor).to eq(root_group)
- expect(nested_group.root_ancestor).to eq(root_group)
- expect(deep_nested_group.root_ancestor).to eq(root_group)
- expect(very_deep_nested_group.root_ancestor).to eq(root_group)
+ it 'returns root_ancestor for root group without a query' do
+ expect { root_group.root_ancestor }.not_to exceed_query_limit(0)
+ end
+
+ it 'returns the top most ancestor' do
+ nested_group = build(:group, parent: root_group)
+ deep_nested_group = build(:group, parent: nested_group)
+ very_deep_nested_group = build(:group, parent: deep_nested_group)
+
+ expect(root_group.root_ancestor).to eq(root_group)
+ expect(nested_group.root_ancestor).to eq(root_group)
+ expect(deep_nested_group.root_ancestor).to eq(root_group)
+ expect(very_deep_nested_group.root_ancestor).to eq(root_group)
+ end
end
end
@@ -1372,6 +1443,12 @@ RSpec.describe Namespace do
end
end
+ describe '#paid?' do
+ it 'returns false for a root namespace with a free plan' do
+ expect(namespace.paid?).to eq(false)
+ end
+ end
+
describe '#shared_runners_setting' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 590acfc0ac1..992b2246f01 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe Note do
it { is_expected.to include_module(Participable) }
it { is_expected.to include_module(Mentionable) }
it { is_expected.to include_module(Awardable) }
+ it { is_expected.to include_module(Sortable) }
end
describe 'validation' do
@@ -856,6 +857,22 @@ RSpec.describe Note do
end
end
+ describe '.simple_sorts' do
+ it 'does not contain name sorts' do
+ expect(described_class.simple_sorts.grep(/name/)).to be_empty
+ end
+ end
+
+ describe '.cherry_picked_merge_requests' do
+ it 'returns merge requests that match the given merge commit' do
+ note = create(:track_mr_picking_note, commit_id: '456abc')
+
+ create(:track_mr_picking_note, project: create(:project), commit_id: '456def')
+
+ expect(MergeRequest.id_in(described_class.cherry_picked_merge_requests('456abc'))).to eq([note.noteable])
+ end
+ end
+
describe '#for_project_snippet?' do
it 'returns true for a project snippet note' do
expect(build(:note_on_project_snippet).for_project_snippet?).to be true
@@ -1322,7 +1339,7 @@ RSpec.describe Note do
let_it_be(:note1) { create(:note, note: 'Test 345') }
let_it_be(:note2) { create(:note, note: 'Test 789') }
- describe '#for_note_or_capitalized_note' do
+ describe '.for_note_or_capitalized_note' do
it 'returns the expected matching note' do
notes = described_class.for_note_or_capitalized_note('Test 345')
@@ -1344,7 +1361,7 @@ RSpec.describe Note do
end
end
- describe '#like_note_or_capitalized_note' do
+ describe '.like_note_or_capitalized_note' do
it 'returns the expected matching note' do
notes = described_class.like_note_or_capitalized_note('Test 345')
@@ -1367,69 +1384,69 @@ RSpec.describe Note do
expect(notes.second.id).to eq(note2.id)
end
end
+ end
- describe '#noteable_assignee_or_author' do
- let(:user) { create(:user) }
- let(:noteable) { create(:issue) }
- let(:note) { create(:note, project: noteable.project, noteable: noteable) }
+ describe '#noteable_assignee_or_author?' do
+ let(:user) { create(:user) }
+ let(:noteable) { create(:issue) }
+ let(:note) { create(:note, project: noteable.project, noteable: noteable) }
- subject { note.noteable_assignee_or_author?(user) }
+ subject { note.noteable_assignee_or_author?(user) }
- shared_examples 'assignee check' do
- context 'when the provided user is one of the assignees' do
- before do
- note.noteable.update(assignees: [user, create(:user)])
- end
+ shared_examples 'assignee check' do
+ context 'when the provided user is one of the assignees' do
+ before do
+ note.noteable.update(assignees: [user, create(:user)])
+ end
- it 'returns true' do
- expect(subject).to be_truthy
- end
+ it 'returns true' do
+ expect(subject).to be_truthy
end
end
+ end
- shared_examples 'author check' do
- context 'when the provided user is the author' do
- before do
- note.noteable.update(author: user)
- end
-
- it 'returns true' do
- expect(subject).to be_truthy
- end
+ shared_examples 'author check' do
+ context 'when the provided user is the author' do
+ before do
+ note.noteable.update(author: user)
end
- context 'when the provided user is neither author nor assignee' do
- it 'returns true' do
- expect(subject).to be_falsey
- end
+ it 'returns true' do
+ expect(subject).to be_truthy
end
end
- context 'when user is nil' do
- let(:user) { nil }
-
- it 'returns false' do
+ context 'when the provided user is neither author nor assignee' do
+ it 'returns true' do
expect(subject).to be_falsey
end
end
+ end
+
+ context 'when user is nil' do
+ let(:user) { nil }
- context 'when noteable is an issue' do
- it_behaves_like 'author check'
- it_behaves_like 'assignee check'
+ it 'returns false' do
+ expect(subject).to be_falsey
end
+ end
- context 'when noteable is a merge request' do
- let(:noteable) { create(:merge_request) }
+ context 'when noteable is an issue' do
+ it_behaves_like 'author check'
+ it_behaves_like 'assignee check'
+ end
- it_behaves_like 'author check'
- it_behaves_like 'assignee check'
- end
+ context 'when noteable is a merge request' do
+ let(:noteable) { create(:merge_request) }
- context 'when noteable is a snippet' do
- let(:noteable) { create(:personal_snippet) }
+ it_behaves_like 'author check'
+ it_behaves_like 'assignee check'
+ end
- it_behaves_like 'author check'
- end
+ context 'when noteable is a snippet' do
+ let(:noteable) { create(:personal_snippet) }
+
+ it_behaves_like 'author check'
end
end
diff --git a/spec/models/notification_setting_spec.rb b/spec/models/notification_setting_spec.rb
index 4ef5ab7af48..010b7455f85 100644
--- a/spec/models/notification_setting_spec.rb
+++ b/spec/models/notification_setting_spec.rb
@@ -200,4 +200,18 @@ RSpec.describe NotificationSetting do
subject.email_events
end
end
+
+ describe '#order_by_id_asc' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:notification_setting_1) { create(:notification_setting, project: project) }
+ let_it_be(:notification_setting_2) { create(:notification_setting, project: other_project) }
+ let_it_be(:notification_setting_3) { create(:notification_setting, project: project) }
+
+ let(:ids) { [notification_setting_1, notification_setting_2, notification_setting_3].map(&:id) }
+
+ subject(:ordered_records) { described_class.where(id: ids, source: project).order_by_id_asc }
+
+ it { is_expected.to eq([notification_setting_1, notification_setting_3]) }
+ end
end
diff --git a/spec/models/packages/debian/file_entry_spec.rb b/spec/models/packages/debian/file_entry_spec.rb
new file mode 100644
index 00000000000..7aa16bc0cce
--- /dev/null
+++ b/spec/models/packages/debian/file_entry_spec.rb
@@ -0,0 +1,98 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::FileEntry, type: :model do
+ let_it_be(:package_file) { create(:debian_package_file, :dsc) }
+
+ let(:filename) { 'sample_1.2.3~alpha2.dsc' }
+ let(:size) { 671 }
+ let(:md5sum) { '3b0817804f669e16cdefac583ad88f0e' }
+ let(:section) { 'libs' }
+ let(:priority) { 'optional' }
+ let(:sha1sum) { '32ecbd674f0bfd310df68484d87752490685a8d6' }
+ let(:sha256sum) { '844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba' }
+
+ let(:file_entry) do
+ described_class.new(
+ filename: filename,
+ size: size,
+ md5sum: md5sum,
+ section: section,
+ priority: priority,
+ sha1sum: sha1sum,
+ sha256sum: sha256sum,
+ package_file: package_file
+ )
+ end
+
+ subject { file_entry }
+
+ describe 'validations' do
+ it { is_expected.to be_valid }
+
+ describe '#filename' do
+ it { is_expected.to validate_presence_of(:filename) }
+ it { is_expected.not_to allow_value('Hé').for(:filename) }
+ end
+
+ describe '#size' do
+ it { is_expected.to validate_presence_of(:size) }
+ end
+
+ describe '#md5sum' do
+ it { is_expected.to validate_presence_of(:md5sum) }
+ it { is_expected.not_to allow_value('12345678901234567890123456789012').for(:md5sum).with_message('mismatch for sample_1.2.3~alpha2.dsc: 3b0817804f669e16cdefac583ad88f0e != 12345678901234567890123456789012') }
+ end
+
+ describe '#section' do
+ it { is_expected.to validate_presence_of(:section) }
+ end
+
+ describe '#priority' do
+ it { is_expected.to validate_presence_of(:priority) }
+ end
+
+ describe '#sha1sum' do
+ it { is_expected.to validate_presence_of(:sha1sum) }
+ it { is_expected.not_to allow_value('1234567890123456789012345678901234567890').for(:sha1sum).with_message('mismatch for sample_1.2.3~alpha2.dsc: 32ecbd674f0bfd310df68484d87752490685a8d6 != 1234567890123456789012345678901234567890') }
+ end
+
+ describe '#sha256sum' do
+ it { is_expected.to validate_presence_of(:sha256sum) }
+ it { is_expected.not_to allow_value('1234567890123456789012345678901234567890123456789012345678901234').for(:sha256sum).with_message('mismatch for sample_1.2.3~alpha2.dsc: 844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba != 1234567890123456789012345678901234567890123456789012345678901234') }
+ end
+
+ describe '#package_file' do
+ it { is_expected.to validate_presence_of(:package_file) }
+ end
+ end
+
+ describe '#component' do
+ subject { file_entry.component }
+
+ context 'without section' do
+ let(:section) { nil }
+
+ it { is_expected.to eq 'main' }
+ end
+
+ context 'with empty section' do
+ let(:section) { '' }
+
+ it { is_expected.to eq 'main' }
+ end
+
+ context 'with ruby section' do
+ let(:section) { 'ruby' }
+
+ it { is_expected.to eq 'main' }
+ end
+
+ context 'with contrib/ruby section' do
+ let(:section) { 'contrib/ruby' }
+
+ it { is_expected.to eq 'contrib' }
+ end
+ end
+end
diff --git a/spec/models/packages/dependency_spec.rb b/spec/models/packages/dependency_spec.rb
index fa6b0fd1848..4437cad46cd 100644
--- a/spec/models/packages/dependency_spec.rb
+++ b/spec/models/packages/dependency_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Packages::Dependency, type: :model do
context 'with too big parameter' do
let(:size) { (Packages::Dependency::MAX_CHUNKED_QUERIES_COUNT * chunk_size) + 1 }
- let(:names_and_version_patterns) { Hash[(1..size).map { |v| [v, v] }] }
+ let(:names_and_version_patterns) { (1..size).to_h { |v| [v, v] } }
it { expect { subject }.to raise_error(ArgumentError, 'Too many names_and_version_patterns') }
end
diff --git a/spec/models/packages/go/module_version_spec.rb b/spec/models/packages/go/module_version_spec.rb
index c4c6a07d9e9..7fa416d8537 100644
--- a/spec/models/packages/go/module_version_spec.rb
+++ b/spec/models/packages/go/module_version_spec.rb
@@ -3,19 +3,9 @@
require 'spec_helper'
RSpec.describe Packages::Go::ModuleVersion, type: :model do
- let_it_be(:user) { create :user }
- let_it_be(:project) { create :project_empty_repo, creator: user, path: 'my-go-lib' }
- let_it_be(:mod) { create :go_module, project: project }
+ include_context 'basic Go module'
- before :all do
- create :go_module_commit, :files, project: project, tag: 'v1.0.0', files: { 'README.md' => 'Hi' }
- create :go_module_commit, :module, project: project, tag: 'v1.0.1'
- create :go_module_commit, :package, project: project, tag: 'v1.0.2', path: 'pkg'
- create :go_module_commit, :module, project: project, tag: 'v1.0.3', name: 'mod'
- create :go_module_commit, :files, project: project, files: { 'y.go' => "package a\n" }
- create :go_module_commit, :module, project: project, name: 'v2'
- create :go_module_commit, :files, project: project, tag: 'v2.0.0', files: { 'v2/x.go' => "package a\n" }
- end
+ let_it_be(:mod) { create :go_module, project: project }
shared_examples '#files' do |desc, *entries|
it "returns #{desc}" do
diff --git a/spec/models/packages/maven/metadatum_spec.rb b/spec/models/packages/maven/metadatum_spec.rb
index 94a0e558985..0000543cb18 100644
--- a/spec/models/packages/maven/metadatum_spec.rb
+++ b/spec/models/packages/maven/metadatum_spec.rb
@@ -54,7 +54,7 @@ RSpec.describe Packages::Maven::Metadatum, type: :model do
let_it_be(:metadatum3) { create(:maven_metadatum, package: package) }
let_it_be(:metadatum4) { create(:maven_metadatum, package: package) }
- subject { Packages::Maven::Metadatum.for_package_ids(package.id).order_created }
+ subject { described_class.for_package_ids(package.id).order_created }
it { is_expected.to eq([metadatum1, metadatum2, metadatum3, metadatum4]) }
end
@@ -64,10 +64,20 @@ RSpec.describe Packages::Maven::Metadatum, type: :model do
let_it_be(:metadatum2) { create(:maven_metadatum, package: package, app_name: 'two') }
let_it_be(:metadatum3) { create(:maven_metadatum, package: package, app_name: 'three') }
- subject { Packages::Maven::Metadatum.for_package_ids(package.id).pluck_app_name }
+ subject { described_class.for_package_ids(package.id).pluck_app_name }
it { is_expected.to match_array([metadatum1, metadatum2, metadatum3].map(&:app_name)) }
end
+
+ describe '.with_path' do
+ let_it_be(:metadatum1) { create(:maven_metadatum, package: package, path: 'one') }
+ let_it_be(:metadatum2) { create(:maven_metadatum, package: package, path: 'two') }
+ let_it_be(:metadatum3) { create(:maven_metadatum, package: package, path: 'three') }
+
+ subject { described_class.with_path('two') }
+
+ it { is_expected.to match_array([metadatum2]) }
+ end
end
end
end
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index 82997acee3f..cf52749a186 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -99,6 +99,34 @@ RSpec.describe Packages::Package, type: :model do
end
end
+ describe '.for_projects' do
+ let_it_be(:package1) { create(:maven_package) }
+ let_it_be(:package2) { create(:maven_package) }
+ let_it_be(:package3) { create(:maven_package) }
+
+ let(:projects) { ::Project.id_in([package1.project_id, package2.project_id]) }
+
+ subject { described_class.for_projects(projects.select(:id)) }
+
+ it 'returns package1 and package2' do
+ expect(projects).not_to receive(:any?)
+
+ expect(subject).to match_array([package1, package2])
+ end
+
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
+ end
+
+ it 'returns package1 and package2' do
+ expect(projects).to receive(:any?).and_call_original
+
+ expect(subject).to match_array([package1, package2])
+ end
+ end
+ end
+
describe 'validations' do
subject { build(:package) }
@@ -339,7 +367,14 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.to validate_presence_of(:version) }
it { is_expected.to allow_value('1.2.3').for(:version) }
it { is_expected.to allow_value('1.3.350').for(:version) }
- it { is_expected.not_to allow_value('1.3.350-20201230123456').for(:version) }
+ it { is_expected.to allow_value('1.3.350-20201230123456').for(:version) }
+ it { is_expected.to allow_value('1.2.3-rc1').for(:version) }
+ it { is_expected.to allow_value('1.2.3g').for(:version) }
+ it { is_expected.to allow_value('1.2').for(:version) }
+ it { is_expected.to allow_value('1.2.bananas').for(:version) }
+ it { is_expected.to allow_value('v1.2.4-build').for(:version) }
+ it { is_expected.to allow_value('d50d836eb3de6177ce6c7a5482f27f9c2c84b672').for(:version) }
+ it { is_expected.to allow_value('this_is_a_string_only').for(:version) }
it { is_expected.not_to allow_value('..1.2.3').for(:version) }
it { is_expected.not_to allow_value(' 1.2.3').for(:version) }
it { is_expected.not_to allow_value("1.2.3 \r\t").for(:version) }
@@ -621,10 +656,12 @@ RSpec.describe Packages::Package, type: :model do
describe '.displayable' do
let_it_be(:hidden_package) { create(:maven_package, :hidden) }
let_it_be(:processing_package) { create(:maven_package, :processing) }
+ let_it_be(:error_package) { create(:maven_package, :error) }
subject { described_class.displayable }
- it 'does not include hidden packages', :aggregate_failures do
+ it 'does not include non-displayable packages', :aggregate_failures do
+ is_expected.to include(error_package)
is_expected.not_to include(hidden_package)
is_expected.not_to include(processing_package)
end
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index 9e65635da91..f2659771a49 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -59,7 +59,7 @@ RSpec.describe Pages::LookupPath do
it 'return nil when legacy storage is disabled and there is no deployment' do
stub_feature_flags(pages_serve_from_legacy_storage: false)
expect(Gitlab::ErrorTracking).to receive(:track_exception)
- .with(described_class::LegacyStorageDisabledError)
+ .with(described_class::LegacyStorageDisabledError, project_id: project.id)
.and_call_original
expect(source).to eq(nil)
@@ -136,14 +136,6 @@ RSpec.describe Pages::LookupPath do
)
end
end
-
- context 'when pages_serve_from_migrated_zip feature flag is disabled' do
- before do
- stub_feature_flags(pages_serve_from_migrated_zip: false)
- end
-
- include_examples 'uses disk storage'
- end
end
end
end
diff --git a/spec/models/pages_deployment_spec.rb b/spec/models/pages_deployment_spec.rb
index 029eb8e513a..a27d836e2c2 100644
--- a/spec/models/pages_deployment_spec.rb
+++ b/spec/models/pages_deployment_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe PagesDeployment do
+ let_it_be(:project) { create(:project) }
+
describe 'associations' do
it { is_expected.to belong_to(:project).required }
it { is_expected.to belong_to(:ci_build).optional }
@@ -28,7 +30,6 @@ RSpec.describe PagesDeployment do
describe '.migrated_from_legacy_storage' do
it 'only returns migrated deployments' do
- project = create(:project)
migrated_deployment = create_migrated_deployment(project)
# create one other deployment
create(:pages_deployment, project: project)
@@ -37,6 +38,27 @@ RSpec.describe PagesDeployment do
end
end
+ context 'with deployments stored locally and remotely' do
+ before do
+ stub_pages_object_storage(::Pages::DeploymentUploader)
+ end
+
+ let!(:remote_deployment) { create(:pages_deployment, project: project, file_store: ::ObjectStorage::Store::REMOTE) }
+ let!(:local_deployment) { create(:pages_deployment, project: project, file_store: ::ObjectStorage::Store::LOCAL) }
+
+ describe '.with_files_stored_locally' do
+ it 'only returns deployments with files stored locally' do
+ expect(described_class.with_files_stored_locally).to contain_exactly(local_deployment)
+ end
+ end
+
+ describe '.with_files_stored_remotely' do
+ it 'only returns deployments with files stored remotely' do
+ expect(described_class.with_files_stored_remotely).to contain_exactly(remote_deployment)
+ end
+ end
+ end
+
describe '#migrated?' do
it 'returns false for normal deployment' do
deployment = create(:pages_deployment)
@@ -45,7 +67,6 @@ RSpec.describe PagesDeployment do
end
it 'returns true for migrated deployment' do
- project = create(:project)
deployment = create_migrated_deployment(project)
expect(deployment.migrated?).to eq(true)
@@ -67,7 +88,6 @@ RSpec.describe PagesDeployment do
end
describe 'default for file_store' do
- let(:project) { create(:project) }
let(:deployment) do
filepath = Rails.root.join("spec/fixtures/pages.zip")
diff --git a/spec/models/preloaders/labels_preloader_spec.rb b/spec/models/preloaders/labels_preloader_spec.rb
new file mode 100644
index 00000000000..94de00bb94c
--- /dev/null
+++ b/spec/models/preloaders/labels_preloader_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Preloaders::LabelsPreloader do
+ let_it_be(:user) { create(:user) }
+
+ shared_examples 'an efficient database query' do
+ let(:subscriptions) { labels.each { |l| create(:subscription, subscribable: l, project: l.project, user: user) }}
+
+ it 'does not make n+1 queries' do
+ first_label = labels_with_preloaded_data.first
+ clean_labels = labels_with_preloaded_data
+
+ expect { access_data(clean_labels) }.to issue_same_number_of_queries_as { access_data([first_label]) }
+ end
+ end
+
+ context 'project labels' do
+ let_it_be(:projects) { create_list(:project, 3, :public, :repository) }
+ let_it_be(:labels) { projects.each { |p| create(:label, project: p) } }
+
+ it_behaves_like 'an efficient database query'
+ end
+
+ context 'group labels' do
+ let_it_be(:groups) { create_list(:group, 3) }
+ let_it_be(:labels) { groups.each { |g| create(:group_label, group: g) } }
+
+ it_behaves_like 'an efficient database query'
+ end
+
+ private
+
+ def labels_with_preloaded_data
+ l = Label.where(id: labels.map(&:id))
+ described_class.new(l, user).preload_all
+ l
+ end
+
+ def access_data(labels)
+ labels.each do |label|
+ if label.is_a?(ProjectLabel)
+ label.project.project_feature
+ label.lazy_subscription(user, label.project)
+ elsif label.is_a?(GroupLabel)
+ label.group.route
+ label.lazy_subscription(user)
+ end
+ end
+ end
+end
diff --git a/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
new file mode 100644
index 00000000000..16e699b7e0e
--- /dev/null
+++ b/spec/models/preloaders/user_max_access_level_in_projects_preloader_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Preloaders::UserMaxAccessLevelInProjectsPreloader do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+ let_it_be(:project_3) { create(:project) }
+
+ let(:projects) { [project_1, project_2, project_3] }
+
+ before do
+ project_1.add_developer(user)
+ project_2.add_developer(user)
+ end
+
+ context 'preload maximum access level to avoid querying project_authorizations', :request_store do
+ it 'avoids N+1 queries', :request_store do
+ Preloaders::UserMaxAccessLevelInProjectsPreloader.new(projects, user).execute
+
+ query_count = ActiveRecord::QueryRecorder.new do
+ projects.each { |project| user.can?(:read_project, project) }
+ end.count
+
+ expect(query_count).to eq(0)
+ end
+
+ it 'runs N queries without preloading' do
+ query_count = ActiveRecord::QueryRecorder.new do
+ projects.each { |project| user.can?(:read_project, project) }
+ end.count
+
+ expect(query_count).to eq(projects.size)
+ end
+ end
+end
diff --git a/spec/models/project_feature_usage_spec.rb b/spec/models/project_feature_usage_spec.rb
index cd70602fc4d..4baa59535e4 100644
--- a/spec/models/project_feature_usage_spec.rb
+++ b/spec/models/project_feature_usage_spec.rb
@@ -24,21 +24,91 @@ RSpec.describe ProjectFeatureUsage, type: :model do
subject { project.feature_usage }
- it 'logs Jira DVCS Cloud last sync' do
- freeze_time do
- subject.log_jira_dvcs_integration_usage
+ context 'when the feature usage has not been created yet' do
+ it 'logs Jira DVCS Cloud last sync' do
+ freeze_time do
+ subject.log_jira_dvcs_integration_usage
- expect(subject.jira_dvcs_server_last_sync_at).to be_nil
- expect(subject.jira_dvcs_cloud_last_sync_at).to be_like_time(Time.current)
+ expect(subject.jira_dvcs_server_last_sync_at).to be_nil
+ expect(subject.jira_dvcs_cloud_last_sync_at).to be_like_time(Time.current)
+ end
+ end
+
+ it 'logs Jira DVCS Server last sync' do
+ freeze_time do
+ subject.log_jira_dvcs_integration_usage(cloud: false)
+
+ expect(subject.jira_dvcs_server_last_sync_at).to be_like_time(Time.current)
+ expect(subject.jira_dvcs_cloud_last_sync_at).to be_nil
+ end
end
end
- it 'logs Jira DVCS Server last sync' do
- freeze_time do
- subject.log_jira_dvcs_integration_usage(cloud: false)
+ context 'when the feature usage already exists' do
+ let(:today) { Time.current.beginning_of_day }
+ let(:project) { create(:project) }
+
+ subject { project.feature_usage }
- expect(subject.jira_dvcs_server_last_sync_at).to be_like_time(Time.current)
- expect(subject.jira_dvcs_cloud_last_sync_at).to be_nil
+ where(:cloud, :timestamp_field) do
+ [
+ [true, :jira_dvcs_cloud_last_sync_at],
+ [false, :jira_dvcs_server_last_sync_at]
+ ]
+ end
+
+ with_them do
+ context 'when Jira DVCS Cloud last sync has not been logged' do
+ before do
+ travel_to today - 3.days do
+ subject.log_jira_dvcs_integration_usage(cloud: !cloud)
+ end
+ end
+
+ it 'logs Jira DVCS Cloud last sync' do
+ freeze_time do
+ subject.log_jira_dvcs_integration_usage(cloud: cloud)
+
+ expect(subject.reload.send(timestamp_field)).to be_like_time(Time.current)
+ end
+ end
+ end
+
+ context 'when Jira DVCS Cloud last sync was logged today' do
+ let(:last_updated) { today + 1.hour }
+
+ before do
+ travel_to last_updated do
+ subject.log_jira_dvcs_integration_usage(cloud: cloud)
+ end
+ end
+
+ it 'does not log Jira DVCS Cloud last sync' do
+ travel_to today + 2.hours do
+ subject.log_jira_dvcs_integration_usage(cloud: cloud)
+
+ expect(subject.reload.send(timestamp_field)).to be_like_time(last_updated)
+ end
+ end
+ end
+
+ context 'when Jira DVCS Cloud last sync was logged yesterday' do
+ let(:last_updated) { today - 2.days }
+
+ before do
+ travel_to last_updated do
+ subject.log_jira_dvcs_integration_usage(cloud: cloud)
+ end
+ end
+
+ it 'logs Jira DVCS Cloud last sync' do
+ travel_to today + 1.hour do
+ subject.log_jira_dvcs_integration_usage(cloud: cloud)
+
+ expect(subject.reload.send(timestamp_field)).to be_like_time(today + 1.hour)
+ end
+ end
+ end
end
end
diff --git a/spec/models/project_services/chat_message/alert_message_spec.rb b/spec/models/project_services/chat_message/alert_message_spec.rb
index 927c5dffe77..4d400990789 100644
--- a/spec/models/project_services/chat_message/alert_message_spec.rb
+++ b/spec/models/project_services/chat_message/alert_message_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ChatMessage::AlertMessage do
subject { described_class.new(args) }
let_it_be(:start_time) { Time.current }
+
let(:alert) { create(:alert_management_alert, started_at: start_time) }
let(:args) do
diff --git a/spec/models/project_services/chat_message/merge_message_spec.rb b/spec/models/project_services/chat_message/merge_message_spec.rb
index 02b266e4fae..71cfe3ff45b 100644
--- a/spec/models/project_services/chat_message/merge_message_spec.rb
+++ b/spec/models/project_services/chat_message/merge_message_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe ChatMessage::MergeMessage do
project_url: 'http://somewhere.com',
object_attributes: {
- title: "Merge Request title\nSecond line",
+ title: "Merge request title\nSecond line",
id: 10,
iid: 100,
assignee_id: 1,
@@ -35,7 +35,7 @@ RSpec.describe ChatMessage::MergeMessage do
context 'open' do
it 'returns a message regarding opening of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) opened merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>')
+ 'Test User (test.user) opened merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge request title*> in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
end
@@ -46,7 +46,7 @@ RSpec.describe ChatMessage::MergeMessage do
end
it 'returns a message regarding closing of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) closed merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> in <http://somewhere.com|project_name>')
+ 'Test User (test.user) closed merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge request title*> in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
end
@@ -60,12 +60,12 @@ RSpec.describe ChatMessage::MergeMessage do
context 'open' do
it 'returns a message regarding opening of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) opened merge request [!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100) in [project_name](http://somewhere.com)')
+ 'Test User (test.user) opened merge request [!100 *Merge request title*](http://somewhere.com/-/merge_requests/100) in [project_name](http://somewhere.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
- title: 'Merge Request opened by Test User (test.user)',
+ title: 'Merge request opened by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
- text: '[!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100)',
+ text: '[!100 *Merge request title*](http://somewhere.com/-/merge_requests/100)',
image: 'http://someavatar.com'
})
end
@@ -78,12 +78,12 @@ RSpec.describe ChatMessage::MergeMessage do
it 'returns a message regarding closing of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) closed merge request [!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100) in [project_name](http://somewhere.com)')
+ 'Test User (test.user) closed merge request [!100 *Merge request title*](http://somewhere.com/-/merge_requests/100) in [project_name](http://somewhere.com)')
expect(subject.attachments).to be_empty
expect(subject.activity).to eq({
- title: 'Merge Request closed by Test User (test.user)',
+ title: 'Merge request closed by Test User (test.user)',
subtitle: 'in [project_name](http://somewhere.com)',
- text: '[!100 *Merge Request title*](http://somewhere.com/-/merge_requests/100)',
+ text: '[!100 *Merge request title*](http://somewhere.com/-/merge_requests/100)',
image: 'http://someavatar.com'
})
end
@@ -97,7 +97,7 @@ RSpec.describe ChatMessage::MergeMessage do
it 'returns a message regarding completed approval of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) approved merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> '\
+ 'Test User (test.user) approved merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge request title*> '\
'in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
@@ -110,7 +110,7 @@ RSpec.describe ChatMessage::MergeMessage do
it 'returns a message regarding revocation of completed approval of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) unapproved merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> '\
+ 'Test User (test.user) unapproved merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge request title*> '\
'in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
@@ -123,7 +123,7 @@ RSpec.describe ChatMessage::MergeMessage do
it 'returns a message regarding added approval of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) added their approval to merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> '\
+ 'Test User (test.user) added their approval to merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge request title*> '\
'in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
@@ -136,7 +136,7 @@ RSpec.describe ChatMessage::MergeMessage do
it 'returns a message regarding revoking approval of merge requests' do
expect(subject.pretext).to eq(
- 'Test User (test.user) removed their approval from merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge Request title*> '\
+ 'Test User (test.user) removed their approval from merge request <http://somewhere.com/-/merge_requests/100|!100 *Merge request title*> '\
'in <http://somewhere.com|project_name>')
expect(subject.attachments).to be_empty
end
diff --git a/spec/models/project_services/emails_on_push_service_spec.rb b/spec/models/project_services/emails_on_push_service_spec.rb
index 6954a72f9c1..c5927503eec 100644
--- a/spec/models/project_services/emails_on_push_service_spec.rb
+++ b/spec/models/project_services/emails_on_push_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe EmailsOnPushService do
+ let_it_be(:project) { create_default(:project).freeze }
+
describe 'Validations' do
context 'when service is active' do
before do
@@ -19,6 +21,42 @@ RSpec.describe EmailsOnPushService do
it { is_expected.not_to validate_presence_of(:recipients) }
end
+
+ describe 'validates number of recipients' do
+ before do
+ stub_const("#{described_class}::RECIPIENTS_LIMIT", 2)
+ end
+
+ subject(:service) { described_class.new(project: project, recipients: recipients, active: true) }
+
+ context 'valid number of recipients' do
+ let(:recipients) { 'foo@bar.com duplicate@example.com Duplicate@example.com invalid-email' }
+
+ it 'does not count duplicates and invalid emails' do
+ is_expected.to be_valid
+ end
+ end
+
+ context 'invalid number of recipients' do
+ let(:recipients) { 'foo@bar.com bar@foo.com bob@gitlab.com' }
+
+ it { is_expected.not_to be_valid }
+
+ it 'adds an error message' do
+ service.valid?
+
+ expect(service.errors).to contain_exactly('Recipients can\'t exceed 2')
+ end
+
+ context 'when service is not active' do
+ before do
+ service.active = false
+ end
+
+ it { is_expected.to be_valid }
+ end
+ end
+ end
end
describe '.new' do
@@ -39,6 +77,14 @@ RSpec.describe EmailsOnPushService do
end
end
+ describe '.valid_recipients' do
+ let(:recipients) { '<invalid> foobar Valid@recipient.com Dup@lica.te dup@lica.te Dup@Lica.te' }
+
+ it 'removes invalid email addresses and removes duplicates by keeping the original capitalization' do
+ expect(described_class.valid_recipients(recipients)).to contain_exactly('Valid@recipient.com', 'Dup@lica.te')
+ end
+ end
+
describe '#execute' do
let(:push_data) { { object_kind: 'push' } }
let(:project) { create(:project, :repository) }
diff --git a/spec/models/project_services/jira_service_spec.rb b/spec/models/project_services/jira_service_spec.rb
index 3fc39fd3266..b50fa1edbc3 100644
--- a/spec/models/project_services/jira_service_spec.rb
+++ b/spec/models/project_services/jira_service_spec.rb
@@ -82,11 +82,8 @@ RSpec.describe JiraService do
subject(:fields) { service.fields }
- it 'includes transition help link' do
- transition_id_field = fields.find { |field| field[:name] == 'jira_issue_transition_id' }
-
- expect(transition_id_field[:title]).to eq('Jira workflow transition IDs')
- expect(transition_id_field[:help]).to include('/help/user/project/integrations/jira')
+ it 'returns custom fields' do
+ expect(fields.pluck(:name)).to eq(%w[url api_url username password])
end
end
@@ -460,10 +457,10 @@ RSpec.describe JiraService do
end
context 'with options' do
- let(:issue_url) { "#{url}/rest/api/2/issue/#{issue_key}?expand=renderedFields" }
+ let(:issue_url) { "#{url}/rest/api/2/issue/#{issue_key}?expand=renderedFields,transitions" }
it 'calls the Jira API with the options to get the issue' do
- jira_service.find_issue(issue_key, rendered_fields: true)
+ jira_service.find_issue(issue_key, rendered_fields: true, transitions: true)
expect(WebMock).to have_requested(:get, issue_url)
end
@@ -494,7 +491,7 @@ RSpec.describe JiraService do
end
before do
- allow(jira_service).to receive_messages(jira_issue_transition_id: '999')
+ jira_service.jira_issue_transition_id = '999'
# These stubs are needed to test JiraService#close_issue.
# We close the issue then do another request to API to check if it got closed.
@@ -505,7 +502,7 @@ RSpec.describe JiraService do
allow(closed_issue).to receive(:resolution).and_return(true)
allow(JIRA::Resource::Issue).to receive(:find).and_return(open_issue, closed_issue)
- allow_any_instance_of(JIRA::Resource::Issue).to receive(:key).and_return('JIRA-123')
+ allow_any_instance_of(JIRA::Resource::Issue).to receive(:key).and_return(issue_key)
allow(JIRA::Resource::Remotelink).to receive(:all).and_return([])
WebMock.stub_request(:get, issue_url).with(basic_auth: %w(jira-username jira-password))
@@ -664,6 +661,61 @@ RSpec.describe JiraService do
).once
end
+ context 'when custom transition IDs are blank' do
+ before do
+ jira_service.jira_issue_transition_id = ''
+ end
+
+ it 'does not transition the issue' do
+ close_issue
+
+ expect(WebMock).not_to have_requested(:post, transitions_url)
+ end
+ end
+
+ context 'when using automatic issue transitions' do
+ let(:transitions) do
+ [
+ { id: '1' },
+ { id: '2', to: { statusCategory: { key: 'new' } } },
+ { id: '3', to: { statusCategory: { key: 'done' } } },
+ { id: '4', to: { statusCategory: { key: 'done' } } }
+ ]
+ end
+
+ before do
+ jira_service.jira_issue_transition_automatic = true
+
+ close_issue
+ end
+
+ it 'uses the next transition with a status category of done' do
+ expect(WebMock).to have_requested(:post, transitions_url).with(
+ body: /"id":"3"/
+ ).once
+ end
+
+ context 'when no done transition is available' do
+ let(:transitions) do
+ [
+ { id: '1', to: { statusCategory: { key: 'new' } } }
+ ]
+ end
+
+ it 'does not attempt to transition' do
+ expect(WebMock).not_to have_requested(:post, transitions_url)
+ end
+ end
+
+ context 'when no valid transitions are returned' do
+ let(:transitions) { 'foo' }
+
+ it 'does not attempt to transition' do
+ expect(WebMock).not_to have_requested(:post, transitions_url)
+ end
+ end
+ end
+
context 'when using multiple transition ids' do
before do
allow(jira_service).to receive_messages(jira_issue_transition_id: '1,2,3')
@@ -738,6 +790,7 @@ RSpec.describe JiraService do
describe '#create_cross_reference_note' do
let_it_be(:user) { build_stubbed(:user) }
+
let(:jira_issue) { ExternalIssue.new('JIRA-123', project) }
subject { jira_service.create_cross_reference_note(jira_issue, resource, user) }
@@ -902,4 +955,22 @@ RSpec.describe JiraService do
end
end
end
+
+ describe '#issue_transition_enabled?' do
+ it 'returns true if automatic transitions are enabled' do
+ jira_service.jira_issue_transition_automatic = true
+
+ expect(jira_service.issue_transition_enabled?).to be(true)
+ end
+
+ it 'returns true if custom transitions are set' do
+ jira_service.jira_issue_transition_id = '1, 2, 3'
+
+ expect(jira_service.issue_transition_enabled?).to be(true)
+ end
+
+ it 'returns false if automatic and custom transitions are disabled' do
+ expect(jira_service.issue_transition_enabled?).to be(false)
+ end
+ end
end
diff --git a/spec/models/project_services/jira_tracker_data_spec.rb b/spec/models/project_services/jira_tracker_data_spec.rb
index 46194efcb3d..a698d3fce5f 100644
--- a/spec/models/project_services/jira_tracker_data_spec.rb
+++ b/spec/models/project_services/jira_tracker_data_spec.rb
@@ -11,20 +11,9 @@ RSpec.describe JiraTrackerData do
it { is_expected.to define_enum_for(:deployment_type).with_values([:unknown, :server, :cloud]).with_prefix(:deployment) }
end
- describe 'proxy settings' do
- it { is_expected.to validate_length_of(:proxy_address).is_at_most(2048) }
- it { is_expected.to validate_length_of(:proxy_port).is_at_most(5) }
- it { is_expected.to validate_length_of(:proxy_username).is_at_most(255) }
- it { is_expected.to validate_length_of(:proxy_password).is_at_most(255) }
- end
-
describe 'encrypted attributes' do
subject { described_class.encrypted_attributes.keys }
- it {
- is_expected.to contain_exactly(
- :api_url, :password, :proxy_address, :proxy_password, :proxy_port, :proxy_username, :url, :username
- )
- }
+ it { is_expected.to contain_exactly(:api_url, :password, :url, :username) }
end
end
diff --git a/spec/models/project_services/prometheus_service_spec.rb b/spec/models/project_services/prometheus_service_spec.rb
index 366c3f68e1d..37a6d49ff74 100644
--- a/spec/models/project_services/prometheus_service_spec.rb
+++ b/spec/models/project_services/prometheus_service_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
include ReactiveCachingHelpers
let_it_be_with_reload(:project) { create(:prometheus_project) }
+
let(:service) { project.prometheus_service }
describe "Associations" do
@@ -337,6 +338,7 @@ RSpec.describe PrometheusService, :use_clean_rails_memory_store_caching, :snowpl
context 'cluster belongs to projects group' do
let_it_be(:group) { create(:group) }
+
let(:project) { create(:prometheus_project, group: group) }
let(:cluster) { create(:cluster_for_group, :with_installed_helm, groups: [group]) }
diff --git a/spec/models/project_services/slack_service_spec.rb b/spec/models/project_services/slack_service_spec.rb
index aa5d92e5c61..688a59fcf09 100644
--- a/spec/models/project_services/slack_service_spec.rb
+++ b/spec/models/project_services/slack_service_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe SlackService do
context 'event is not supported for usage log' do
let_it_be(:pipeline) { create(:ci_pipeline) }
+
let(:data) { Gitlab::DataBuilder::Pipeline.build(pipeline) }
it 'does not increase the usage data counter' do
@@ -43,6 +44,7 @@ RSpec.describe SlackService do
context 'issue notification' do
let_it_be(:issue) { create(:issue) }
+
let(:data) { issue.to_hook_data(user) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_issue_notification'
@@ -56,6 +58,7 @@ RSpec.describe SlackService do
context 'deployment notification' do
let_it_be(:deployment) { create(:deployment, user: user) }
+
let(:data) { Gitlab::DataBuilder::Deployment.build(deployment) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_deployment_notification'
@@ -63,6 +66,7 @@ RSpec.describe SlackService do
context 'wiki_page notification' do
let_it_be(:wiki_page) { create(:wiki_page, wiki: project.wiki, message: 'user created page: Awesome wiki_page') }
+
let(:data) { Gitlab::DataBuilder::WikiPage.build(wiki_page, user, 'create') }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_wiki_page_notification'
@@ -70,6 +74,7 @@ RSpec.describe SlackService do
context 'merge_request notification' do
let_it_be(:merge_request) { create(:merge_request) }
+
let(:data) { merge_request.to_hook_data(user) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_merge_request_notification'
@@ -77,6 +82,7 @@ RSpec.describe SlackService do
context 'note notification' do
let_it_be(:issue_note) { create(:note_on_issue, note: 'issue note') }
+
let(:data) { Gitlab::DataBuilder::Note.build(issue_note, user) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_note_notification'
@@ -93,6 +99,7 @@ RSpec.describe SlackService do
context 'confidential note notification' do
let_it_be(:confidential_issue_note) { create(:note_on_issue, note: 'issue note', confidential: true) }
+
let(:data) { Gitlab::DataBuilder::Note.build(confidential_issue_note, user) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_confidential_note_notification'
@@ -100,6 +107,7 @@ RSpec.describe SlackService do
context 'confidential issue notification' do
let_it_be(:issue) { create(:issue, confidential: true) }
+
let(:data) { issue.to_hook_data(user) }
it_behaves_like 'increases the usage data counter', 'i_ecosystem_slack_service_confidential_issue_notification'
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index 49d9fd56d70..12c17e699e3 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -891,6 +891,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#get_issue' do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let!(:issue) { create(:issue, project: project) }
before_all do
@@ -1351,6 +1352,34 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '.with_remote_mirrors' do
+ let_it_be(:project) { create(:project, :repository) }
+
+ subject { described_class.with_remote_mirrors }
+
+ context 'when some remote mirrors are enabled for the project' do
+ let!(:remote_mirror) { create(:remote_mirror, project: project, enabled: true) }
+
+ it "returns a project" do
+ is_expected.to eq([project])
+ end
+ end
+
+ context 'when some remote mirrors exists but disabled for the project' do
+ let!(:remote_mirror) { create(:remote_mirror, project: project, enabled: false) }
+
+ it "returns a project" do
+ is_expected.to be_empty
+ end
+ end
+
+ context 'when no remote mirrors exist for the project' do
+ it "returns an empty list" do
+ is_expected.to be_empty
+ end
+ end
+ end
+
describe '.with_active_jira_services' do
it 'returns the correct project' do
active_jira_service = create(:jira_service)
@@ -1600,6 +1629,8 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#any_active_runners?' do
+ subject { project.any_active_runners? }
+
context 'shared runners' do
let(:project) { create(:project, shared_runners_enabled: shared_runners_enabled) }
let(:specific_runner) { create(:ci_runner, :project, projects: [project]) }
@@ -1609,19 +1640,19 @@ RSpec.describe Project, factory_default: :keep do
let(:shared_runners_enabled) { false }
it 'has no runners available' do
- expect(project.any_active_runners?).to be_falsey
+ is_expected.to be_falsey
end
it 'has a specific runner' do
specific_runner
- expect(project.any_active_runners?).to be_truthy
+ is_expected.to be_truthy
end
it 'has a shared runner, but they are prohibited to use' do
shared_runner
- expect(project.any_active_runners?).to be_falsey
+ is_expected.to be_falsey
end
it 'checks the presence of specific runner' do
@@ -1643,7 +1674,7 @@ RSpec.describe Project, factory_default: :keep do
it 'has a shared runner' do
shared_runner
- expect(project.any_active_runners?).to be_truthy
+ is_expected.to be_truthy
end
it 'checks the presence of shared runner' do
@@ -1669,13 +1700,13 @@ RSpec.describe Project, factory_default: :keep do
let(:group_runners_enabled) { false }
it 'has no runners available' do
- expect(project.any_active_runners?).to be_falsey
+ is_expected.to be_falsey
end
it 'has a group runner, but they are prohibited to use' do
group_runner
- expect(project.any_active_runners?).to be_falsey
+ is_expected.to be_falsey
end
end
@@ -1685,7 +1716,7 @@ RSpec.describe Project, factory_default: :keep do
it 'has a group runner' do
group_runner
- expect(project.any_active_runners?).to be_truthy
+ is_expected.to be_truthy
end
it 'checks the presence of group runner' do
@@ -1703,6 +1734,126 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ describe '#any_online_runners?' do
+ subject { project.any_online_runners? }
+
+ context 'shared runners' do
+ let(:project) { create(:project, shared_runners_enabled: shared_runners_enabled) }
+ let(:specific_runner) { create(:ci_runner, :project, :online, projects: [project]) }
+ let(:shared_runner) { create(:ci_runner, :instance, :online) }
+ let(:offline_runner) { create(:ci_runner, :instance) }
+
+ context 'for shared runners disabled' do
+ let(:shared_runners_enabled) { false }
+
+ it 'has no runners available' do
+ is_expected.to be_falsey
+ end
+
+ it 'has a specific runner' do
+ specific_runner
+
+ is_expected.to be_truthy
+ end
+
+ it 'has a shared runner, but they are prohibited to use' do
+ shared_runner
+
+ is_expected.to be_falsey
+ end
+
+ it 'checks the presence of specific runner' do
+ specific_runner
+
+ expect(project.any_online_runners? { |runner| runner == specific_runner }).to be_truthy
+ end
+
+ it 'returns false if match cannot be found' do
+ specific_runner
+
+ expect(project.any_online_runners? { false }).to be_falsey
+ end
+
+ it 'returns false if runner is offline' do
+ offline_runner
+
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'for shared runners enabled' do
+ let(:shared_runners_enabled) { true }
+
+ it 'has a shared runner' do
+ shared_runner
+
+ is_expected.to be_truthy
+ end
+
+ it 'checks the presence of shared runner' do
+ shared_runner
+
+ expect(project.any_online_runners? { |runner| runner == shared_runner }).to be_truthy
+ end
+
+ it 'returns false if match cannot be found' do
+ shared_runner
+
+ expect(project.any_online_runners? { false }).to be_falsey
+ end
+ end
+ end
+
+ context 'group runners' do
+ let(:project) { create(:project, group_runners_enabled: group_runners_enabled) }
+ let(:group) { create(:group, projects: [project]) }
+ let(:group_runner) { create(:ci_runner, :group, :online, groups: [group]) }
+ let(:offline_runner) { create(:ci_runner, :group, groups: [group]) }
+
+ context 'for group runners disabled' do
+ let(:group_runners_enabled) { false }
+
+ it 'has no runners available' do
+ is_expected.to be_falsey
+ end
+
+ it 'has a group runner, but they are prohibited to use' do
+ group_runner
+
+ is_expected.to be_falsey
+ end
+ end
+
+ context 'for group runners enabled' do
+ let(:group_runners_enabled) { true }
+
+ it 'has a group runner' do
+ group_runner
+
+ is_expected.to be_truthy
+ end
+
+ it 'has an offline group runner' do
+ offline_runner
+
+ is_expected.to be_falsey
+ end
+
+ it 'checks the presence of group runner' do
+ group_runner
+
+ expect(project.any_online_runners? { |runner| runner == group_runner }).to be_truthy
+ end
+
+ it 'returns false if match cannot be found' do
+ group_runner
+
+ expect(project.any_online_runners? { false }).to be_falsey
+ end
+ end
+ end
+ end
+
describe '#shared_runners' do
let!(:runner) { create(:ci_runner, :instance) }
@@ -2378,6 +2529,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#latest_pipeline' do
let_it_be(:project) { create(:project, :repository) }
+
let(:second_branch) { project.repository.branches[2] }
let!(:pipeline_for_default_branch) do
@@ -2842,6 +2994,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#emails_disabled?' do
let_it_be(:namespace) { create(:namespace) }
+
let(:project) { build(:project, namespace: namespace, emails_disabled: false) }
context 'emails disabled in group' do
@@ -3162,6 +3315,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#ci_variables_for' do
let_it_be(:project) { create(:project) }
+
let(:environment_scope) { '*' }
let!(:ci_variable) do
@@ -3649,49 +3803,74 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#default_merge_request_target' do
+ let_it_be(:project) { create(:project, :public) }
+
+ let!(:forked) { fork_project(project) }
+
+ context 'when mr_default_target_self is set to true' do
+ it 'returns the current project' do
+ expect(forked.project_setting).to receive(:mr_default_target_self)
+ .and_return(true)
+
+ expect(forked.default_merge_request_target).to eq(forked)
+ end
+ end
+
+ context 'when merge request can not target upstream' do
+ it 'returns the current project' do
+ expect(forked).to receive(:mr_can_target_upstream?).and_return(false)
+
+ expect(forked.default_merge_request_target).to eq(forked)
+ end
+ end
+
+ context 'when merge request can target upstream' do
+ it 'returns the source project' do
+ expect(forked).to receive(:mr_can_target_upstream?).and_return(true)
+
+ expect(forked.default_merge_request_target).to eq(project)
+ end
+ end
+ end
+
+ describe '#mr_can_target_upstream?' do
+ let_it_be(:project) { create(:project, :public) }
+
+ let!(:forked) { fork_project(project) }
+
context 'when forked from a more visible project' do
- it 'returns the more restrictive project' do
- project = create(:project, :public)
- forked = fork_project(project)
+ it 'can not target the upstream project' do
forked.visibility = Gitlab::VisibilityLevel::PRIVATE
forked.save!
expect(project.visibility).to eq 'public'
expect(forked.visibility).to eq 'private'
- expect(forked.default_merge_request_target).to eq(forked)
+ expect(forked.mr_can_target_upstream?).to be_falsey
end
end
context 'when forked from a project with disabled merge requests' do
- it 'returns the current project' do
- project = create(:project, :merge_requests_disabled)
- forked = fork_project(project)
+ it 'can not target the upstream project' do
+ project.project_feature
+ .update!(merge_requests_access_level: ProjectFeature::DISABLED)
expect(forked.forked_from_project).to receive(:merge_requests_enabled?)
.and_call_original
- expect(forked.default_merge_request_target).to eq(forked)
+ expect(forked.mr_can_target_upstream?).to be_falsey
end
end
context 'when forked from a project with enabled merge requests' do
- it 'returns the source project' do
- project = create(:project, :public)
- forked = fork_project(project)
-
- expect(project.visibility).to eq 'public'
- expect(forked.visibility).to eq 'public'
-
- expect(forked.default_merge_request_target).to eq(project)
+ it 'can target the upstream project' do
+ expect(forked.mr_can_target_upstream?).to be_truthy
end
end
context 'when not forked' do
- it 'returns the current project' do
- project = build_stubbed(:project)
-
- expect(project.default_merge_request_target).to eq(project)
+ it 'can not target the upstream project' do
+ expect(project.mr_can_target_upstream?).to be_falsey
end
end
end
@@ -4007,6 +4186,7 @@ RSpec.describe Project, factory_default: :keep do
include ProjectHelpers
let_it_be(:group) { create(:group) }
+
let!(:project) { create(:project, project_level, namespace: group ) }
let(:user) { create_user_from_membership(project, membership) }
@@ -4109,7 +4289,7 @@ RSpec.describe Project, factory_default: :keep do
subject { described_class.wrap_with_cte(projects) }
it 'wrapped query matches original' do
- expect(subject.to_sql).to match(/^WITH "projects_cte" AS/)
+ expect(subject.to_sql).to match(/^WITH "projects_cte" AS #{Gitlab::Database::AsWithMaterialized.materialized_if_supported}/)
expect(subject).to match_array(projects)
end
end
@@ -4200,7 +4380,7 @@ RSpec.describe Project, factory_default: :keep do
end
it 'does nothing if updates on legacy storage are disabled' do
- stub_feature_flags(pages_update_legacy_storage: false)
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
expect(Gitlab::PagesTransfer).not_to receive(:new)
expect(PagesWorker).not_to receive(:perform_in)
@@ -4272,6 +4452,7 @@ RSpec.describe Project, factory_default: :keep do
context 'legacy storage' do
let_it_be(:project) { create(:project, :repository, :legacy_storage) }
+
let(:gitlab_shell) { Gitlab::Shell.new }
let(:project_storage) { project.send(:storage) }
@@ -4371,6 +4552,7 @@ RSpec.describe Project, factory_default: :keep do
context 'hashed storage' do
let_it_be(:project) { create(:project, :repository, skip_disk_validation: true) }
+
let(:gitlab_shell) { Gitlab::Shell.new }
let(:hash) { Digest::SHA2.hexdigest(project.id.to_s) }
let(:hashed_prefix) { File.join('@hashed', hash[0..1], hash[2..3]) }
@@ -4461,6 +4643,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#has_ci?' do
let_it_be(:project, reload: true) { create(:project) }
+
let(:repository) { double }
before do
@@ -4957,6 +5140,7 @@ RSpec.describe Project, factory_default: :keep do
context 'branch protection' do
let_it_be(:namespace) { create(:namespace) }
+
let(:project) { create(:project, :repository, namespace: namespace) }
before do
@@ -5041,57 +5225,27 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#default_branch' do
- context 'with an empty repository' do
- let_it_be(:project) { create(:project_empty_repo) }
-
- context 'group.default_branch_name is available' do
- let(:project_group) { create(:group) }
- let(:project) { create(:project, path: 'avatar', namespace: project_group) }
+ context 'with default_branch_name' do
+ let_it_be_with_refind(:root_group) { create(:group) }
+ let_it_be_with_refind(:project_group) { create(:group, parent: root_group) }
+ let_it_be_with_refind(:project) { create(:project, path: 'avatar', namespace: project_group) }
- before do
- expect(Gitlab::CurrentSettings)
- .not_to receive(:default_branch_name)
-
- expect(project.group)
- .to receive(:default_branch_name)
- .and_return('example_branch')
- end
-
- it 'returns the group default value' do
- expect(project.default_branch).to eq('example_branch')
- end
+ where(:instance_branch, :root_group_branch, :project_group_branch, :project_branch) do
+ '' | nil | nil | nil
+ nil | nil | nil | nil
+ 'main' | nil | nil | 'main'
+ 'main' | 'root_branch' | nil | 'root_branch'
+ 'main' | 'root_branch' | 'group_branch' | 'group_branch'
end
- context 'Gitlab::CurrentSettings.default_branch_name is available' do
+ with_them do
before do
- expect(Gitlab::CurrentSettings)
- .to receive(:default_branch_name)
- .and_return(example_branch_name)
- end
-
- context 'is missing or nil' do
- let(:example_branch_name) { nil }
-
- it "returns nil" do
- expect(project.default_branch).to be_nil
- end
- end
-
- context 'is blank' do
- let(:example_branch_name) { '' }
-
- it 'returns nil' do
- expect(project.default_branch).to be_nil
- end
+ allow(Gitlab::CurrentSettings).to receive(:default_branch_name).and_return(instance_branch)
+ root_group.namespace_settings.update!(default_branch_name: root_group_branch)
+ project_group.namespace_settings.update!(default_branch_name: project_group_branch)
end
- context 'is present' do
- let(:example_branch_name) { 'example_branch_name' }
-
- it 'returns the expected branch name' do
- expect(project.default_branch).to eq(example_branch_name)
- end
- end
+ it { expect(project.default_branch).to eq(project_branch) }
end
end
end
@@ -5636,16 +5790,34 @@ RSpec.describe Project, factory_default: :keep do
end
describe '#find_or_initialize_services' do
- before do
- allow(Service).to receive(:available_services_names).and_return(%w[prometheus pushover teamcity])
- allow(subject).to receive(:disabled_services).and_return(%w[prometheus])
+ let_it_be(:subject) { create(:project) }
+
+ it 'avoids N+1 database queries' do
+ control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_services }.count
+
+ expect(control_count).to be <= 4
end
- it 'returns only enabled services' do
- services = subject.find_or_initialize_services
+ it 'avoids N+1 database queries with more available services' do
+ allow(Service).to receive(:available_services_names).and_return(%w[pushover])
+ control_count = ActiveRecord::QueryRecorder.new { subject.find_or_initialize_services }
- expect(services.count).to eq(2)
- expect(services.map(&:title)).to eq(['JetBrains TeamCity CI', 'Pushover'])
+ allow(Service).to receive(:available_services_names).and_call_original
+ expect { subject.find_or_initialize_services }.not_to exceed_query_limit(control_count)
+ end
+
+ context 'with disabled services' do
+ before do
+ allow(Service).to receive(:available_services_names).and_return(%w[prometheus pushover teamcity])
+ allow(subject).to receive(:disabled_services).and_return(%w[prometheus])
+ end
+
+ it 'returns only enabled services sorted' do
+ services = subject.find_or_initialize_services
+
+ expect(services.size).to eq(2)
+ expect(services.map(&:title)).to eq(['JetBrains TeamCity', 'Pushover'])
+ end
end
end
@@ -6074,12 +6246,15 @@ RSpec.describe Project, factory_default: :keep do
project.set_first_pages_deployment!(deployment)
expect(project.pages_metadatum.reload.pages_deployment).to eq(deployment)
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
end
it "updates the existing metadara record with deployment" do
expect do
project.set_first_pages_deployment!(deployment)
end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil).to(deployment)
+
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
end
it 'only updates metadata for this project' do
@@ -6088,6 +6263,8 @@ RSpec.describe Project, factory_default: :keep do
expect do
project.set_first_pages_deployment!(deployment)
end.not_to change { other_project.pages_metadatum.reload.pages_deployment }.from(nil)
+
+ expect(other_project.pages_metadatum.reload.deployed).to eq(false)
end
it 'does nothing if metadata already references some deployment' do
@@ -6098,6 +6275,14 @@ RSpec.describe Project, factory_default: :keep do
project.set_first_pages_deployment!(deployment)
end.not_to change { project.pages_metadatum.reload.pages_deployment }.from(existing_deployment)
end
+
+ it 'marks project as not deployed if deployment is nil' do
+ project.mark_pages_as_deployed
+
+ expect do
+ project.set_first_pages_deployment!(nil)
+ end.to change { project.pages_metadatum.reload.deployed }.from(true).to(false)
+ end
end
describe '#has_pool_repsitory?' do
@@ -6556,6 +6741,7 @@ RSpec.describe Project, factory_default: :keep do
describe '#latest_jira_import' do
let_it_be(:project) { create(:project) }
+
context 'when no jira imports' do
it 'returns nil' do
expect(project.latest_jira_import).to be nil
diff --git a/spec/models/protected_tag_spec.rb b/spec/models/protected_tag_spec.rb
index 7bc62b1d0e7..e5cee6f18cd 100644
--- a/spec/models/protected_tag_spec.rb
+++ b/spec/models/protected_tag_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe ProtectedTag do
describe 'Associations' do
- it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:project).touch(true) }
end
describe 'Validation' do
diff --git a/spec/models/raw_usage_data_spec.rb b/spec/models/raw_usage_data_spec.rb
index 7acfb8c19af..6ff4c6eb19b 100644
--- a/spec/models/raw_usage_data_spec.rb
+++ b/spec/models/raw_usage_data_spec.rb
@@ -13,14 +13,20 @@ RSpec.describe RawUsageData do
it { is_expected.to validate_uniqueness_of(:recorded_at) }
end
- describe '#update_sent_at!' do
+ describe '#update_version_metadata!' do
let(:raw_usage_data) { create(:raw_usage_data) }
it 'updates sent_at' do
- raw_usage_data.update_sent_at!
+ raw_usage_data.update_version_metadata!(usage_data_id: 123)
expect(raw_usage_data.sent_at).not_to be_nil
end
+
+ it 'updates version_usage_data_id_value' do
+ raw_usage_data.update_version_metadata!(usage_data_id: 123)
+
+ expect(raw_usage_data.version_usage_data_id_value).not_to be_nil
+ end
end
end
end
diff --git a/spec/models/release_highlight_spec.rb b/spec/models/release_highlight_spec.rb
index 60087278671..673451b5e76 100644
--- a/spec/models/release_highlight_spec.rb
+++ b/spec/models/release_highlight_spec.rb
@@ -13,26 +13,6 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
ReleaseHighlight.instance_variable_set(:@file_paths, nil)
end
- describe '.for_version' do
- subject { ReleaseHighlight.for_version(version: version) }
-
- let(:version) { '1.1' }
-
- context 'with version param that exists' do
- it 'returns items from that version' do
- expect(subject.items.first['title']).to eq("It's gonna be a bright")
- end
- end
-
- context 'with version param that does NOT exist' do
- let(:version) { '84.0' }
-
- it 'returns nil' do
- expect(subject).to be_nil
- end
- end
- end
-
describe '.paginated' do
let(:dot_com) { false }
@@ -143,28 +123,27 @@ RSpec.describe ReleaseHighlight, :clean_gitlab_redis_cache do
end
end
- describe '.versions' do
- subject { described_class.versions }
+ describe '.most_recent_version_digest' do
+ subject { ReleaseHighlight.most_recent_version_digest }
it 'uses process memory cache' do
- expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:versions:#{Gitlab.revision}", { expires_in: described_class::CACHE_DURATION })
+ expect(Gitlab::ProcessMemoryCache.cache_backend).to receive(:fetch).with("release_highlight:most_recent_version_digest:#{Gitlab.revision}", expires_in: described_class::CACHE_DURATION)
subject
end
- it 'returns versions from the file paths' do
- expect(subject).to eq(['1.5', '1.2', '1.1'])
+ context 'when recent release items exist' do
+ it 'returns a digest from the release of the first item of the most recent file' do
+ # this value is coming from fixture data
+ expect(subject).to eq(Digest::SHA256.hexdigest('01.05'))
+ end
end
- context 'when there are more than 12 versions' do
- let(:file_paths) do
- i = 0
- Array.new(20) { "20201225_01_#{i += 1}.yml" }
- end
+ context 'when recent release items do NOT exist' do
+ it 'returns nil' do
+ allow(ReleaseHighlight).to receive(:paginated).and_return(nil)
- it 'limits to 12 versions' do
- allow(ReleaseHighlight).to receive(:file_paths).and_return(file_paths)
- expect(subject.count).to eq(12)
+ expect(subject).to be_nil
end
end
end
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 209ac471210..540a8068b20 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Release do
it { expect(release).to be_valid }
describe 'associations' do
- it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:project).touch(true) }
it { is_expected.to belong_to(:author).class_name('User') }
it { is_expected.to have_many(:links).class_name('Releases::Link') }
it { is_expected.to have_many(:milestones) }
diff --git a/spec/models/remote_mirror_spec.rb b/spec/models/remote_mirror_spec.rb
index 4c3151f431c..d6951b5926e 100644
--- a/spec/models/remote_mirror_spec.rb
+++ b/spec/models/remote_mirror_spec.rb
@@ -263,6 +263,30 @@ RSpec.describe RemoteMirror, :mailer do
end
end
+ describe '#hard_retry!' do
+ let(:remote_mirror) { create(:remote_mirror).tap {|mirror| mirror.update_column(:url, 'invalid') } }
+
+ it 'transitions an invalid mirror to the to_retry state' do
+ remote_mirror.hard_retry!('Invalid')
+
+ expect(remote_mirror.update_status).to eq('to_retry')
+ expect(remote_mirror.last_error).to eq('Invalid')
+ end
+ end
+
+ describe '#hard_fail!' do
+ let(:remote_mirror) { create(:remote_mirror).tap {|mirror| mirror.update_column(:url, 'invalid') } }
+
+ it 'transitions an invalid mirror to the failed state' do
+ remote_mirror.hard_fail!('Invalid')
+
+ expect(remote_mirror.update_status).to eq('failed')
+ expect(remote_mirror.last_error).to eq('Invalid')
+ expect(remote_mirror.last_update_at).not_to be_nil
+ expect(RemoteMirrorNotificationWorker.jobs).not_to be_empty
+ end
+ end
+
context 'when remote mirror gets destroyed' do
it 'removes remote' do
mirror = create_mirror(url: 'http://foo:bar@test.com')
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 84347ec2a51..a739f523008 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -170,6 +170,22 @@ RSpec.describe Repository do
end
end
+ describe '#search_branch_names' do
+ subject(:search_branch_names) { repository.search_branch_names('conflict-*') }
+
+ it 'returns matching branch names' do
+ expect(search_branch_names).to contain_exactly(
+ 'conflict-binary-file',
+ 'conflict-resolvable',
+ 'conflict-contains-conflict-markers',
+ 'conflict-missing-side',
+ 'conflict-start',
+ 'conflict-non-utf8',
+ 'conflict-too-large'
+ )
+ end
+ end
+
describe '#list_last_commits_for_tree' do
let(:path_to_commit) do
{
@@ -977,6 +993,57 @@ RSpec.describe Repository do
end
end
+ describe '#search_files_by_wildcard_path' do
+ let(:ref) { 'master' }
+
+ subject(:result) { repository.search_files_by_wildcard_path(path, ref) }
+
+ context 'when specifying a normal path' do
+ let(:path) { 'files/images/logo-black.png' }
+
+ it 'returns the path' do
+ expect(result).to eq(['files/images/logo-black.png'])
+ end
+ end
+
+ context 'when specifying a path with wildcard' do
+ let(:path) { 'files/*/*.png' }
+
+ it 'returns all files matching the path' do
+ expect(result).to contain_exactly('files/images/logo-black.png',
+ 'files/images/logo-white.png')
+ end
+ end
+
+ context 'when specifying an extension with wildcard' do
+ let(:path) { '*.rb' }
+
+ it 'returns all files matching the extension' do
+ expect(result).to contain_exactly('encoding/russian.rb',
+ 'files/ruby/popen.rb',
+ 'files/ruby/regex.rb',
+ 'files/ruby/version_info.rb')
+ end
+ end
+
+ context 'when sending regexp' do
+ let(:path) { '.*\.rb' }
+
+ it 'ignores the regexp and returns an empty array' do
+ expect(result).to eq([])
+ end
+ end
+
+ context 'when sending another ref' do
+ let(:path) { 'files' }
+ let(:ref) { 'other-branch' }
+
+ it 'returns an empty array' do
+ expect(result).to eq([])
+ end
+ end
+ end
+
describe '#async_remove_remote' do
before do
masterrev = repository.find_branch('master').dereferenced_target
@@ -1036,7 +1103,8 @@ RSpec.describe Repository do
describe '#create_ref' do
it 'redirects the call to write_ref' do
- ref, ref_path = '1', '2'
+ ref = '1'
+ ref_path = '2'
expect(repository.raw_repository).to receive(:write_ref).with(ref_path, ref)
@@ -1647,12 +1715,13 @@ RSpec.describe Repository do
end
it 'writes merge of source SHA and first parent ref to MR merge_ref_path' do
- merge_commit_id = repository.merge_to_ref(user,
- merge_request.diff_head_sha,
- merge_request,
- merge_request.merge_ref_path,
- 'Custom message',
- merge_request.target_branch_ref)
+ merge_commit_id =
+ repository.merge_to_ref(user,
+ source_sha: merge_request.diff_head_sha,
+ branch: merge_request.target_branch,
+ target_ref: merge_request.merge_ref_path,
+ message: 'Custom message',
+ first_parent_ref: merge_request.target_branch_ref)
merge_commit = repository.commit(merge_commit_id)
diff --git a/spec/models/sent_notification_spec.rb b/spec/models/sent_notification_spec.rb
index aeafb49f8b5..aa515952c2b 100644
--- a/spec/models/sent_notification_spec.rb
+++ b/spec/models/sent_notification_spec.rb
@@ -72,8 +72,8 @@ RSpec.describe SentNotification do
it_behaves_like 'a successful sent notification'
- it 'does not set in_reply_to_discussion_id' do
- expect(subject.in_reply_to_discussion_id).to be_nil
+ it 'sets in_reply_to_discussion_id' do
+ expect(subject.in_reply_to_discussion_id).to eq(note.discussion_id)
end
end
end
@@ -212,10 +212,10 @@ RSpec.describe SentNotification do
subject { described_class.record_note(note, note.author.id) }
- it 'creates a comment on the issue' do
+ it 'converts the comment to a discussion on the issue' do
new_note = subject.create_reply('Test')
expect(new_note.in_reply_to?(note)).to be_truthy
- expect(new_note.discussion_id).not_to eq(note.discussion_id)
+ expect(new_note.discussion_id).to eq(note.discussion_id)
end
end
@@ -247,10 +247,10 @@ RSpec.describe SentNotification do
subject { described_class.record_note(note, note.author.id) }
- it 'creates a comment on the merge request' do
+ it 'converts the comment to a discussion on the merge request' do
new_note = subject.create_reply('Test')
expect(new_note.in_reply_to?(note)).to be_truthy
- expect(new_note.discussion_id).not_to eq(note.discussion_id)
+ expect(new_note.discussion_id).to eq(note.discussion_id)
end
end
diff --git a/spec/models/service_spec.rb b/spec/models/service_spec.rb
index 9ffefd4bbf7..d8eb4ebc432 100644
--- a/spec/models/service_spec.rb
+++ b/spec/models/service_spec.rb
@@ -202,6 +202,16 @@ RSpec.describe Service do
end
end
+ describe '#project_level?' do
+ it 'is true when service has a project' do
+ expect(build(:service, project: project)).to be_project_level
+ end
+
+ it 'is false when service has no project' do
+ expect(build(:service, project: nil)).not_to be_project_level
+ end
+ end
+
describe '.find_or_initialize_non_project_specific_integration' do
let!(:service1) { create(:jira_service, project_id: nil, group_id: group.id) }
let!(:service2) { create(:jira_service) }
diff --git a/spec/models/sidebars/menu_spec.rb b/spec/models/sidebars/menu_spec.rb
new file mode 100644
index 00000000000..320f5f1ad1e
--- /dev/null
+++ b/spec/models/sidebars/menu_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Menu do
+ let(:menu) { described_class.new(context) }
+ let(:context) { Sidebars::Context.new(current_user: nil, container: nil) }
+
+ describe '#all_active_routes' do
+ it 'gathers all active routes of items and the current menu' do
+ menu_item1 = Sidebars::MenuItem.new(context)
+ menu_item2 = Sidebars::MenuItem.new(context)
+ menu_item3 = Sidebars::MenuItem.new(context)
+ menu.add_item(menu_item1)
+ menu.add_item(menu_item2)
+ menu.add_item(menu_item3)
+
+ allow(menu).to receive(:active_routes).and_return({ path: 'foo' })
+ allow(menu_item1).to receive(:active_routes).and_return({ path: %w(bar test) })
+ allow(menu_item2).to receive(:active_routes).and_return({ controller: 'fooc' })
+ allow(menu_item3).to receive(:active_routes).and_return({ controller: 'barc' })
+
+ expect(menu.all_active_routes).to eq({ path: %w(foo bar test), controller: %w(fooc barc) })
+ end
+
+ it 'does not include routes for non renderable items' do
+ menu_item = Sidebars::MenuItem.new(context)
+ menu.add_item(menu_item)
+
+ allow(menu).to receive(:active_routes).and_return({ path: 'foo' })
+ allow(menu_item).to receive(:render?).and_return(false)
+ allow(menu_item).to receive(:active_routes).and_return({ controller: 'bar' })
+
+ expect(menu.all_active_routes).to eq({ path: ['foo'] })
+ end
+ end
+
+ describe '#render?' do
+ context 'when the menus has no items' do
+ it 'returns true' do
+ expect(menu.render?).to be true
+ end
+ end
+
+ context 'when the menu has items' do
+ let(:menu_item) { Sidebars::MenuItem.new(context) }
+
+ before do
+ menu.add_item(menu_item)
+ end
+
+ context 'when items are not renderable' do
+ it 'returns false' do
+ allow(menu_item).to receive(:render?).and_return(false)
+
+ expect(menu.render?).to be false
+ end
+ end
+
+ context 'when there are renderable items' do
+ it 'returns true' do
+ expect(menu.render?).to be true
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/sidebars/panel_spec.rb b/spec/models/sidebars/panel_spec.rb
new file mode 100644
index 00000000000..0e539460810
--- /dev/null
+++ b/spec/models/sidebars/panel_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Panel do
+ let(:context) { Sidebars::Context.new(current_user: nil, container: nil) }
+ let(:panel) { Sidebars::Panel.new(context) }
+ let(:menu1) { Sidebars::Menu.new(context) }
+ let(:menu2) { Sidebars::Menu.new(context) }
+
+ describe '#renderable_menus' do
+ it 'returns only renderable menus' do
+ panel.add_menu(menu1)
+ panel.add_menu(menu2)
+
+ allow(menu1).to receive(:render?).and_return(true)
+ allow(menu2).to receive(:render?).and_return(false)
+
+ expect(panel.renderable_menus).to eq([menu1])
+ end
+ end
+
+ describe '#has_renderable_menus?' do
+ it 'returns false when no renderable menus' do
+ expect(panel.has_renderable_menus?).to be false
+ end
+
+ it 'returns true when no renderable menus' do
+ panel.add_menu(menu1)
+
+ expect(panel.has_renderable_menus?).to be true
+ end
+ end
+end
diff --git a/spec/models/sidebars/projects/context_spec.rb b/spec/models/sidebars/projects/context_spec.rb
new file mode 100644
index 00000000000..44578ae1583
--- /dev/null
+++ b/spec/models/sidebars/projects/context_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Context do
+ let(:project) { build(:project) }
+
+ subject { described_class.new(current_user: nil, container: project) }
+
+ it 'sets project attribute reader' do
+ expect(subject.project).to eq(project)
+ end
+end
diff --git a/spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb b/spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb
new file mode 100644
index 00000000000..bc1815558d3
--- /dev/null
+++ b/spec/models/sidebars/projects/menus/learn_gitlab/menu_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::LearnGitlab::Menu do
+ let(:project) { build(:project) }
+ let(:experiment_enabled) { true }
+ let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project, learn_gitlab_experiment_enabled: experiment_enabled) }
+
+ subject { described_class.new(context) }
+
+ it 'does not contain any sub menu' do
+ expect(subject.instance_variable_get(:@items)).to be_empty
+ end
+
+ describe '#render?' do
+ context 'when learn gitlab experiment is enabled' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when learn gitlab experiment is disabled' do
+ let(:experiment_enabled) { false }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+end
diff --git a/spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb b/spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb
new file mode 100644
index 00000000000..db124c2252e
--- /dev/null
+++ b/spec/models/sidebars/projects/menus/project_overview/menu_items/releases_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::ProjectOverview::MenuItems::Releases do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when project repository is empty' do
+ it 'returns false' do
+ allow(project).to receive(:empty_repo?).and_return(true)
+
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when project repository is not empty' do
+ context 'when user can read releases' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when user cannot read releases' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/sidebars/projects/menus/project_overview/menu_spec.rb b/spec/models/sidebars/projects/menus/project_overview/menu_spec.rb
new file mode 100644
index 00000000000..105a28ce953
--- /dev/null
+++ b/spec/models/sidebars/projects/menus/project_overview/menu_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::ProjectOverview::Menu do
+ let(:project) { build(:project) }
+ let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
+
+ subject { described_class.new(context) }
+
+ it 'has the required items' do
+ items = subject.instance_variable_get(:@items)
+
+ expect(items[0]).to be_a(Sidebars::Projects::Menus::ProjectOverview::MenuItems::Details)
+ expect(items[1]).to be_a(Sidebars::Projects::Menus::ProjectOverview::MenuItems::Activity)
+ expect(items[2]).to be_a(Sidebars::Projects::Menus::ProjectOverview::MenuItems::Releases)
+ end
+end
diff --git a/spec/models/sidebars/projects/menus/repository/menu_spec.rb b/spec/models/sidebars/projects/menus/repository/menu_spec.rb
new file mode 100644
index 00000000000..04eb3357a6f
--- /dev/null
+++ b/spec/models/sidebars/projects/menus/repository/menu_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Menus::Repository::Menu do
+ let_it_be(:project) { create(:project, :repository) }
+
+ let(:user) { project.owner }
+ let(:context) { Sidebars::Projects::Context.new(current_user: user, container: project) }
+
+ subject { described_class.new(context) }
+
+ describe '#render?' do
+ context 'when project repository is empty' do
+ it 'returns false' do
+ allow(project).to receive(:empty_repo?).and_return(true)
+
+ expect(subject.render?).to eq false
+ end
+ end
+
+ context 'when project repository is not empty' do
+ context 'when user can download code' do
+ it 'returns true' do
+ expect(subject.render?).to eq true
+ end
+ end
+
+ context 'when user cannot download code' do
+ let(:user) { nil }
+
+ it 'returns false' do
+ expect(subject.render?).to eq false
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/sidebars/projects/panel_spec.rb b/spec/models/sidebars/projects/panel_spec.rb
new file mode 100644
index 00000000000..bad9b17bc83
--- /dev/null
+++ b/spec/models/sidebars/projects/panel_spec.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::Projects::Panel do
+ let(:project) { build(:project) }
+ let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project) }
+
+ subject { described_class.new(context) }
+
+ it 'has a scope menu' do
+ expect(subject.scope_menu).to be_a(Sidebars::Projects::Menus::Scope::Menu)
+ end
+end
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index e9019b55635..6a252b444f9 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -56,9 +56,9 @@ RSpec.describe Timelog do
group = create(:group)
subgroup = create(:group, parent: group)
- create(:timelog, issue: create(:issue, project: create(:project)))
- timelog1 = create(:timelog, issue: create(:issue, project: create(:project, group: group)))
- timelog2 = create(:timelog, issue: create(:issue, project: create(:project, group: subgroup)))
+ create(:issue_timelog)
+ timelog1 = create(:issue_timelog, issue: create(:issue, project: create(:project, group: group)))
+ timelog2 = create(:issue_timelog, issue: create(:issue, project: create(:project, group: subgroup)))
expect(described_class.for_issues_in_group(group)).to contain_exactly(timelog1, timelog2)
end
@@ -66,9 +66,9 @@ RSpec.describe Timelog do
describe 'between_times' do
it 'returns collection of timelogs within given times' do
- create(:timelog, spent_at: 65.days.ago)
- timelog1 = create(:timelog, spent_at: 15.days.ago)
- timelog2 = create(:timelog, spent_at: 5.days.ago)
+ create(:issue_timelog, spent_at: 65.days.ago)
+ timelog1 = create(:issue_timelog, spent_at: 15.days.ago)
+ timelog2 = create(:issue_timelog, spent_at: 5.days.ago)
timelogs = described_class.between_times(20.days.ago, 1.day.ago)
expect(timelogs).to contain_exactly(timelog1, timelog2)
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 855b1b0f3f7..c4146b347d7 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -376,6 +376,22 @@ RSpec.describe Todo do
end
end
+ describe '.group_by_user_id_and_state' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ before do
+ create(:todo, user: user1, state: :pending)
+ create(:todo, user: user1, state: :pending)
+ create(:todo, user: user1, state: :done)
+ create(:todo, user: user2, state: :pending)
+ end
+
+ specify do
+ expect(Todo.count_grouped_by_user_id_and_state).to eq({ [user1.id, "done"] => 1, [user1.id, "pending"] => 2, [user2.id, "pending"] => 1 })
+ end
+ end
+
describe '.any_for_target?' do
it 'returns true if there are todos for a given target' do
todo = create(:todo)
@@ -435,4 +451,12 @@ RSpec.describe Todo do
end
end
end
+
+ describe '.pluck_user_id' do
+ subject { described_class.pluck_user_id }
+
+ let_it_be(:todo) { create(:todo) }
+
+ it { is_expected.to eq([todo.user_id]) }
+ end
end
diff --git a/spec/models/user_callout_spec.rb b/spec/models/user_callout_spec.rb
index cdf70dd5190..eb66f074293 100644
--- a/spec/models/user_callout_spec.rb
+++ b/spec/models/user_callout_spec.rb
@@ -18,36 +18,14 @@ RSpec.describe UserCallout do
it { is_expected.to validate_uniqueness_of(:feature_name).scoped_to(:user_id).ignoring_case_sensitivity }
end
- describe 'scopes' do
- describe '.with_feature_name' do
- let(:second_feature_name) { described_class.feature_names.keys.second }
- let(:last_feature_name) { described_class.feature_names.keys.last }
-
- it 'returns callout for requested feature name only' do
- callout1 = create(:user_callout, feature_name: second_feature_name )
- create(:user_callout, feature_name: last_feature_name )
-
- callouts = described_class.with_feature_name(second_feature_name)
-
- expect(callouts).to match_array([callout1])
- end
- end
-
- describe '.with_dismissed_after' do
- let(:some_feature_name) { described_class.feature_names.keys.second }
- let(:callout_dismissed_month_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )}
-
- it 'does not return callouts dismissed before specified date' do
- callouts = described_class.with_dismissed_after(15.days.ago)
-
- expect(callouts).to match_array([])
- end
-
- it 'returns callouts dismissed after specified date' do
- callouts = described_class.with_dismissed_after(2.months.ago)
-
- expect(callouts).to match_array([callout_dismissed_month_ago])
- end
+ describe '#dismissed_after?' do
+ let(:some_feature_name) { described_class.feature_names.keys.second }
+ let(:callout_dismissed_month_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.month.ago )}
+ let(:callout_dismissed_day_ago) { create(:user_callout, feature_name: some_feature_name, dismissed_at: 1.day.ago )}
+
+ it 'returns whether a callout dismissed after specified date' do
+ expect(callout_dismissed_month_ago.dismissed_after?(15.days.ago)).to eq(false)
+ expect(callout_dismissed_day_ago.dismissed_after?(15.days.ago)).to eq(true)
end
end
end
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 5f2842c9d16..3abf2a651a0 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -85,6 +85,7 @@ RSpec.describe User do
it { is_expected.to have_many(:group_members) }
it { is_expected.to have_many(:groups) }
it { is_expected.to have_many(:keys).dependent(:destroy) }
+ it { is_expected.to have_many(:expired_today_and_unnotified_keys) }
it { is_expected.to have_many(:deploy_keys).dependent(:nullify) }
it { is_expected.to have_many(:group_deploy_keys) }
it { is_expected.to have_many(:events).dependent(:delete_all) }
@@ -108,6 +109,7 @@ RSpec.describe User do
it { is_expected.to have_many(:merge_request_assignees).inverse_of(:assignee) }
it { is_expected.to have_many(:merge_request_reviewers).inverse_of(:reviewer) }
it { is_expected.to have_many(:created_custom_emoji).inverse_of(:creator) }
+ it { is_expected.to have_many(:in_product_marketing_emails) }
describe "#user_detail" do
it 'does not persist `user_detail` by default' do
@@ -999,6 +1001,27 @@ RSpec.describe User do
end
end
+ context 'SSH key expiration scopes' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:expired_today_not_notified) { create(:key, expires_at: Time.current, user: user1) }
+ let_it_be(:expired_today_already_notified) { create(:key, expires_at: Time.current, user: user2, expiry_notification_delivered_at: Time.current) }
+ let_it_be(:expiring_soon_not_notified) { create(:key, expires_at: 2.days.from_now, user: user2) }
+ let_it_be(:expiring_soon_notified) { create(:key, expires_at: 2.days.from_now, user: user1, before_expiry_notification_delivered_at: Time.current) }
+
+ describe '.with_ssh_key_expired_today' do
+ it 'returns users whose key has expired today' do
+ expect(described_class.with_ssh_key_expired_today).to contain_exactly(user1)
+ end
+ end
+
+ describe '.with_ssh_key_expiring_soon' do
+ it 'returns users whose keys will expire soon' do
+ expect(described_class.with_ssh_key_expiring_soon).to contain_exactly(user2)
+ end
+ end
+ end
+
describe '.active_without_ghosts' do
let_it_be(:user1) { create(:user, :external) }
let_it_be(:user2) { create(:user, state: 'blocked') }
@@ -1766,7 +1789,7 @@ RSpec.describe User do
end
describe 'blocking user' do
- let(:user) { create(:user, name: 'John Smith') }
+ let_it_be_with_refind(:user) { create(:user, name: 'John Smith') }
it 'blocks user' do
user.block
@@ -1776,17 +1799,22 @@ RSpec.describe User do
context 'when user has running CI pipelines' do
let(:service) { double }
+ let(:pipelines) { build_list(:ci_pipeline, 3, :running) }
- before do
- pipeline = create(:ci_pipeline, :running, user: user)
- create(:ci_build, :running, pipeline: pipeline)
+ it 'aborts all running pipelines and related jobs' do
+ expect(user).to receive(:pipelines).and_return(pipelines)
+ expect(Ci::DropPipelineService).to receive(:new).and_return(service)
+ expect(service).to receive(:execute_async_for_all).with(pipelines, :user_blocked, user)
+
+ user.block
end
+ end
- it 'cancels all running pipelines and related jobs' do
- expect(Ci::CancelUserPipelinesService).to receive(:new).and_return(service)
- expect(service).to receive(:execute).with(user)
+ context 'when user has active CI pipeline schedules' do
+ let_it_be(:schedule) { create(:ci_pipeline_schedule, active: true, owner: user) }
- user.block
+ it 'disables any pipeline schedules' do
+ expect { user.block }.to change { schedule.reload.active? }.to(false)
end
end
end
@@ -2502,32 +2530,12 @@ RSpec.describe User do
describe "#clear_avatar_caches" do
let(:user) { create(:user) }
- context "when :avatar_cache_for_email flag is enabled" do
- before do
- stub_feature_flags(avatar_cache_for_email: true)
- end
+ it "clears the avatar cache when saving" do
+ allow(user).to receive(:avatar_changed?).and_return(true)
- it "clears the avatar cache when saving" do
- allow(user).to receive(:avatar_changed?).and_return(true)
+ expect(Gitlab::AvatarCache).to receive(:delete_by_email).with(*user.verified_emails)
- expect(Gitlab::AvatarCache).to receive(:delete_by_email).with(*user.verified_emails)
-
- user.update(avatar: fixture_file_upload('spec/fixtures/dk.png'))
- end
- end
-
- context "when :avatar_cache_for_email flag is disabled" do
- before do
- stub_feature_flags(avatar_cache_for_email: false)
- end
-
- it "doesn't attempt to clear the avatar cache" do
- allow(user).to receive(:avatar_changed?).and_return(true)
-
- expect(Gitlab::AvatarCache).not_to receive(:delete_by_email)
-
- user.update(avatar: fixture_file_upload('spec/fixtures/dk.png'))
- end
+ user.update(avatar: fixture_file_upload('spec/fixtures/dk.png'))
end
end
@@ -5500,6 +5508,12 @@ RSpec.describe User do
it_behaves_like 'bot user avatars', :alert_bot, 'alert-bot.png'
it_behaves_like 'bot user avatars', :support_bot, 'support-bot.png'
it_behaves_like 'bot user avatars', :security_bot, 'security-bot.png'
+
+ context 'when bot is the support_bot' do
+ subject { described_class.support_bot }
+
+ it { is_expected.to be_confirmed }
+ end
end
describe '#confirmation_required_on_sign_in?' do
diff --git a/spec/models/users/in_product_marketing_email_spec.rb b/spec/models/users/in_product_marketing_email_spec.rb
new file mode 100644
index 00000000000..772d875d69e
--- /dev/null
+++ b/spec/models/users/in_product_marketing_email_spec.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::InProductMarketingEmail, type: :model do
+ let(:track) { :create }
+ let(:series) { 0 }
+
+ describe 'associations' do
+ it { is_expected.to belong_to(:user) }
+ end
+
+ describe 'validations' do
+ subject { build(:in_product_marketing_email) }
+
+ it { is_expected.to validate_presence_of(:user) }
+ it { is_expected.to validate_presence_of(:track) }
+ it { is_expected.to validate_presence_of(:series) }
+ it { is_expected.to validate_uniqueness_of(:user_id).scoped_to([:track, :series]).with_message('has already been sent') }
+ end
+
+ describe '.without_track_and_series' do
+ let_it_be(:user) { create(:user) }
+
+ subject(:without_track_and_series) { User.merge(described_class.without_track_and_series(track, series)) }
+
+ before do
+ create(:in_product_marketing_email, track: :create, series: 0, user: user)
+ create(:in_product_marketing_email, track: :create, series: 1, user: user)
+ create(:in_product_marketing_email, track: :verify, series: 0, user: user)
+ end
+
+ context 'when given track and series already exists' do
+ it { expect(without_track_and_series).to be_empty }
+ end
+
+ context 'when track does not exist' do
+ let(:track) { :trial }
+
+ it { expect(without_track_and_series).to eq [user] }
+ end
+
+ context 'when series does not exist' do
+ let(:series) { 2 }
+
+ it { expect(without_track_and_series).to eq [user] }
+ end
+
+ context 'when no track or series for a user exists' do
+ let(:track) { :create }
+ let(:series) { 0 }
+
+ before do
+ @other_user = create(:user)
+ end
+
+ it { expect(without_track_and_series).to eq [@other_user] }
+ end
+ end
+
+ describe '.for_user_with_track_and_series' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:in_product_marketing_email) { create(:in_product_marketing_email, series: 0, track: 0, user: user) }
+
+ subject(:for_user_with_track_and_series) { described_class.for_user_with_track_and_series(user, track, series).first }
+
+ context 'when record for user with given track and series exists' do
+ it { is_expected.to eq(in_product_marketing_email) }
+ end
+
+ context 'when user is different' do
+ let(:user) { build_stubbed(:user) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when track is different' do
+ let(:track) { 1 }
+
+ it { is_expected.to be_nil }
+ end
+
+ context 'when series is different' do
+ let(:series) { 1 }
+
+ it { is_expected.to be_nil }
+ end
+ end
+
+ describe '.save_cta_click' do
+ let(:user) { create(:user) }
+
+ subject(:save_cta_click) { described_class.save_cta_click(user, track, series) }
+
+ context 'when there is no record' do
+ it 'does not error' do
+ expect { save_cta_click }.not_to raise_error
+ end
+ end
+
+ context 'when there is no record for the track and series' do
+ it 'does not perform an update' do
+ other_email = create(:in_product_marketing_email, user: user, track: :verify, series: 2, cta_clicked_at: nil)
+
+ expect { save_cta_click }.not_to change { other_email.reload }
+ end
+ end
+
+ context 'when there is a record for the track and series' do
+ it 'saves the cta click date' do
+ email = create(:in_product_marketing_email, user: user, track: track, series: series, cta_clicked_at: nil)
+
+ freeze_time do
+ expect { save_cta_click }.to change { email.reload.cta_clicked_at }.from(nil).to(Time.zone.now)
+ end
+ end
+
+ context 'cta_clicked_at is already set' do
+ it 'does not update' do
+ create(:in_product_marketing_email, user: user, track: track, series: series, cta_clicked_at: Time.zone.now)
+
+ expect_next_found_instance_of(described_class) do |record|
+ expect(record).not_to receive(:update)
+ end
+
+ save_cta_click
+ end
+ end
+ end
+ end
+end
diff --git a/spec/models/users/merge_request_interaction_spec.rb b/spec/models/users/merge_request_interaction_spec.rb
new file mode 100644
index 00000000000..d333577fa1a
--- /dev/null
+++ b/spec/models/users/merge_request_interaction_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ::Users::MergeRequestInteraction do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project) }
+
+ subject(:interaction) do
+ ::Users::MergeRequestInteraction.new(user: user, merge_request: merge_request.reset)
+ end
+
+ describe 'declarative policy delegation' do
+ it 'delegates to the merge request' do
+ expect(subject.declarative_policy_subject).to eq(merge_request)
+ end
+ end
+
+ describe '#can_merge?' do
+ context 'when the user cannot merge' do
+ it { is_expected.not_to be_can_merge }
+ end
+
+ context 'when the user can merge' do
+ before do
+ project.add_maintainer(user)
+ end
+
+ it { is_expected.to be_can_merge }
+ end
+ end
+
+ describe '#can_update?' do
+ context 'when the user cannot update the MR' do
+ it { is_expected.not_to be_can_update }
+ end
+
+ context 'when the user can update the MR' do
+ before do
+ project.add_developer(user)
+ end
+
+ it { is_expected.to be_can_update }
+ end
+ end
+
+ describe '#review_state' do
+ subject { interaction.review_state }
+
+ context 'when the user has not been asked to review the MR' do
+ it { is_expected.to be_nil }
+
+ it 'implies not reviewed' do
+ expect(interaction).not_to be_reviewed
+ end
+ end
+
+ context 'when the user has been asked to review the MR' do
+ before do
+ merge_request.reviewers << user
+ end
+
+ it { is_expected.to eq(Types::MergeRequestReviewStateEnum.values['UNREVIEWED'].value) }
+
+ it 'implies not reviewed' do
+ expect(interaction).not_to be_reviewed
+ end
+ end
+
+ context 'when the user has provided a review' do
+ before do
+ merge_request.merge_request_reviewers.create!(reviewer: user, state: MergeRequestReviewer.states['reviewed'])
+ end
+
+ it { is_expected.to eq(Types::MergeRequestReviewStateEnum.values['REVIEWED'].value) }
+
+ it 'implies reviewed' do
+ expect(interaction).to be_reviewed
+ end
+ end
+ end
+
+ describe '#approved?' do
+ context 'when the user has not approved the MR' do
+ it { is_expected.not_to be_approved }
+ end
+
+ context 'when the user has approved the MR' do
+ before do
+ merge_request.approved_by_users << user
+ end
+
+ it { is_expected.to be_approved }
+ end
+ end
+end
diff --git a/spec/policies/application_setting/term_policy_spec.rb b/spec/policies/application_setting/term_policy_spec.rb
index 00b48402fa6..fd361c8b649 100644
--- a/spec/policies/application_setting/term_policy_spec.rb
+++ b/spec/policies/application_setting/term_policy_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ApplicationSetting::TermPolicy do
include TermsHelper
let_it_be(:term) { create(:term) }
+
let(:user) { create(:user) }
subject(:policy) { described_class.new(user, term) }
diff --git a/spec/policies/ci/build_policy_spec.rb b/spec/policies/ci/build_policy_spec.rb
index 098efd7daa6..1ec749fb394 100644
--- a/spec/policies/ci/build_policy_spec.rb
+++ b/spec/policies/ci/build_policy_spec.rb
@@ -283,6 +283,7 @@ RSpec.describe Ci::BuildPolicy do
describe 'manage a web ide terminal' do
let(:build_permissions) { %i[read_web_ide_terminal create_build_terminal update_web_ide_terminal create_build_service_proxy] }
let_it_be(:maintainer) { create(:user) }
+
let(:owner) { create(:owner) }
let(:admin) { create(:admin) }
let(:maintainer) { create(:user) }
diff --git a/spec/policies/design_management/design_policy_spec.rb b/spec/policies/design_management/design_policy_spec.rb
index 117279d1638..c62e97dcdb9 100644
--- a/spec/policies/design_management/design_policy_spec.rb
+++ b/spec/policies/design_management/design_policy_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe DesignManagement::DesignPolicy do
let_it_be(:admin) { create(:admin) }
let_it_be(:project) { create(:project, :public, namespace: owner.namespace) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:design) { create(:design, issue: issue) }
subject(:design_policy) { described_class.new(current_user, design) }
@@ -131,6 +132,7 @@ RSpec.describe DesignManagement::DesignPolicy do
context "for guests in private projects" do
let_it_be(:project) { create(:project, :private) }
+
let(:current_user) { guest }
it_behaves_like "read-only design abilities"
@@ -163,6 +165,7 @@ RSpec.describe DesignManagement::DesignPolicy do
context "when the project is archived" do
let_it_be(:project) { create(:project, :public, :archived) }
let_it_be(:issue) { create(:issue, project: project) }
+
let(:current_user) { owner }
it_behaves_like "read-only design abilities"
diff --git a/spec/policies/group_deploy_keys_group_policy_spec.rb b/spec/policies/group_deploy_keys_group_policy_spec.rb
index 7ad9b655411..50d555d2094 100644
--- a/spec/policies/group_deploy_keys_group_policy_spec.rb
+++ b/spec/policies/group_deploy_keys_group_policy_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe GroupDeployKeysGroupPolicy do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:group_deploy_key) { create(:group_deploy_key) }
+
let(:group_deploy_keys_group) { create(:group_deploy_keys_group, group: group, group_deploy_key: group_deploy_key) }
describe 'edit a group deploy key for a given group' do
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 1794934dd20..f5e389ff338 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -722,6 +722,7 @@ RSpec.describe GroupPolicy do
describe 'design activity' do
let_it_be(:group) { create(:group, :public) }
+
let(:current_user) { nil }
subject { described_class.new(current_user, group) }
@@ -922,4 +923,54 @@ RSpec.describe GroupPolicy do
it { expect(described_class.new(current_user, subgroup)).to be_allowed(:read_label) }
end
end
+
+ context 'timelogs' do
+ context 'with admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'with maintainer' do
+ let(:current_user) { maintainer }
+
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'with reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'with guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+
+ context 'with non member' do
+ let(:current_user) { create(:user) }
+
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+
+ context 'with anonymous' do
+ let(:current_user) { nil }
+
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+ end
end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index 60c54f97312..f2c941080b5 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -511,7 +511,7 @@ RSpec.describe ProjectPolicy do
project.add_maintainer(project_bot)
end
- it { is_expected.not_to be_allowed(:admin_resource_access_tokens)}
+ it { is_expected.not_to be_allowed(:create_resource_access_tokens)}
end
end
@@ -1353,4 +1353,54 @@ RSpec.describe ProjectPolicy do
end
end
end
+
+ context 'timelogs' do
+ context 'with admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'when admin mode disabled' do
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+ end
+
+ context 'with owner' do
+ let(:current_user) { owner }
+
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'with maintainer' do
+ let(:current_user) { maintainer }
+
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'with reporter' do
+ let(:current_user) { reporter }
+
+ it { is_expected.to be_allowed(:read_group_timelogs) }
+ end
+
+ context 'with guest' do
+ let(:current_user) { guest }
+
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+
+ context 'with non member' do
+ let(:current_user) { non_member }
+
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+
+ context 'with anonymous' do
+ let(:current_user) { anonymous }
+
+ it { is_expected.to be_disallowed(:read_group_timelogs) }
+ end
+ end
end
diff --git a/spec/policies/project_snippet_policy_spec.rb b/spec/policies/project_snippet_policy_spec.rb
index bdf9eaedbf1..8b96aa99f69 100644
--- a/spec/policies/project_snippet_policy_spec.rb
+++ b/spec/policies/project_snippet_policy_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe ProjectSnippetPolicy do
let_it_be(:other_user) { create(:user) }
let_it_be(:external_user) { create(:user, :external) }
let_it_be(:project) { create(:project, :public) }
+
let(:snippet) { create(:project_snippet, snippet_visibility, project: project, author: author) }
let(:author) { other_user }
let(:author_permissions) do
diff --git a/spec/policies/service_policy_spec.rb b/spec/policies/service_policy_spec.rb
index 5d2c9c1f6c3..84c74ca7e31 100644
--- a/spec/policies/service_policy_spec.rb
+++ b/spec/policies/service_policy_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ServicePolicy, :models do
let_it_be(:user) { create(:user) }
+
let(:project) { integration.project }
subject(:policy) { Ability.policy_for(user, integration) }
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index 1eecc9d1ce6..9cb00781e99 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -85,7 +85,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
Ci::JobArtifact::DEFAULT_FILE_NAMES.each do |file_type, filename|
context file_type.to_s do
let(:report) { { "#{file_type}": [filename] } }
- let(:build) { create(:ci_build, options: { artifacts: { reports: report } } ) }
+ let(:build) { create(:ci_build, options: { artifacts: { reports: report } }) }
let(:report_expectation) do
{
@@ -106,7 +106,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
context "when option has both archive and reports specification" do
let(:report) { { junit: ['junit.xml'] } }
- let(:build) { create(:ci_build, options: { script: 'echo', artifacts: { **archive, reports: report } } ) }
+ let(:build) { create(:ci_build, options: { script: 'echo', artifacts: { **archive, reports: report } }) }
let(:report_expectation) do
{
@@ -223,7 +223,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
let(:build) { create(:ci_build, ref: pipeline.ref, pipeline: pipeline) }
before do
- pipeline.persistent_ref.create
+ pipeline.persistent_ref.create # rubocop:disable Rails/SaveBang
end
it 'returns the correct refspecs' do
@@ -261,7 +261,7 @@ RSpec.describe Ci::BuildRunnerPresenter do
let(:build) { create(:ci_build, pipeline: pipeline) }
before do
- pipeline.persistent_ref.create
+ pipeline.persistent_ref.create # rubocop:disable Rails/SaveBang
end
it 'exposes the persistent pipeline ref' do
@@ -272,27 +272,82 @@ RSpec.describe Ci::BuildRunnerPresenter do
end
end
- describe '#variables' do
- subject { presenter.variables }
+ describe '#runner_variables' do
+ subject { presenter.runner_variables }
- let(:build) { create(:ci_build) }
+ let_it_be(:project_with_flag_disabled) { create(:project, :repository) }
+ let_it_be(:project_with_flag_enabled) { create(:project, :repository) }
+
+ before do
+ stub_feature_flags(variable_inside_variable: [project_with_flag_enabled])
+ end
+
+ shared_examples 'returns an array with the expected variables' do
+ it 'returns an array' do
+ is_expected.to be_an_instance_of(Array)
+ end
+
+ it 'returns the expected variables' do
+ is_expected.to eq(presenter.variables.to_runner_variables)
+ end
+ end
+
+ context 'when FF :variable_inside_variable is disabled' do
+ let(:sha) { project_with_flag_disabled.repository.commit.sha }
+ let(:pipeline) { create(:ci_pipeline, sha: sha, project: project_with_flag_disabled) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
+
+ it_behaves_like 'returns an array with the expected variables'
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ let(:sha) { project_with_flag_enabled.repository.commit.sha }
+ let(:pipeline) { create(:ci_pipeline, sha: sha, project: project_with_flag_enabled) }
+ let(:build) { create(:ci_build, pipeline: pipeline) }
- it 'returns a Collection' do
- is_expected.to be_an_instance_of(Gitlab::Ci::Variables::Collection)
+ it_behaves_like 'returns an array with the expected variables'
end
end
- describe '#runner_variables' do
- subject { presenter.runner_variables }
+ describe '#runner_variables subset' do
+ subject { presenter.runner_variables.select { |v| %w[A B C].include?(v.fetch(:key)) } }
let(:build) { create(:ci_build) }
- it 'returns an array' do
- is_expected.to be_an_instance_of(Array)
- end
+ context 'with references in pipeline variables' do
+ before do
+ create(:ci_pipeline_variable, key: 'A', value: 'refA-$B', pipeline: build.pipeline)
+ create(:ci_pipeline_variable, key: 'B', value: 'refB-$C-$D', pipeline: build.pipeline)
+ create(:ci_pipeline_variable, key: 'C', value: 'value', pipeline: build.pipeline)
+ end
+
+ context 'when FF :variable_inside_variable is disabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: false)
+ end
- it 'returns the expected variables' do
- is_expected.to eq(presenter.variables.to_runner_variables)
+ it 'returns non-expanded variables' do
+ is_expected.to eq [
+ { key: 'A', value: 'refA-$B', public: false, masked: false },
+ { key: 'B', value: 'refB-$C-$D', public: false, masked: false },
+ { key: 'C', value: 'value', public: false, masked: false }
+ ]
+ end
+ end
+
+ context 'when FF :variable_inside_variable is enabled' do
+ before do
+ stub_feature_flags(variable_inside_variable: [build.project])
+ end
+
+ it 'returns expanded and sorted variables' do
+ is_expected.to eq [
+ { key: 'C', value: 'value', public: false, masked: false },
+ { key: 'B', value: 'refB-value-$D', public: false, masked: false },
+ { key: 'A', value: 'refA-refB-value-$D', public: false, masked: false }
+ ]
+ end
+ end
end
end
end
diff --git a/spec/presenters/ci/trigger_presenter_spec.rb b/spec/presenters/ci/trigger_presenter_spec.rb
index bac1c94e0b7..b86684707b9 100644
--- a/spec/presenters/ci/trigger_presenter_spec.rb
+++ b/spec/presenters/ci/trigger_presenter_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Ci::TriggerPresenter do
context 'when user is a trigger owner and builds admin' do
before do
- trigger.update(owner: user)
+ trigger.update!(owner: user)
end
describe '#token' do
diff --git a/spec/presenters/clusters/cluster_presenter_spec.rb b/spec/presenters/clusters/cluster_presenter_spec.rb
index 2d38c91499a..2e8364b2987 100644
--- a/spec/presenters/clusters/cluster_presenter_spec.rb
+++ b/spec/presenters/clusters/cluster_presenter_spec.rb
@@ -347,7 +347,7 @@ RSpec.describe Clusters::ClusterPresenter do
before do
project.add_maintainer(user)
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
context 'user can read logs' do
@@ -363,7 +363,7 @@ RSpec.describe Clusters::ClusterPresenter do
before do
project.add_developer(user)
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
it 'returns nil' do
diff --git a/spec/presenters/packages/composer/packages_presenter_spec.rb b/spec/presenters/packages/composer/packages_presenter_spec.rb
index d0e3b68fc9f..c1d8c9816a6 100644
--- a/spec/presenters/packages/composer/packages_presenter_spec.rb
+++ b/spec/presenters/packages/composer/packages_presenter_spec.rb
@@ -9,8 +9,8 @@ RSpec.describe ::Packages::Composer::PackagesPresenter do
let_it_be(:json) { { 'name' => package_name } }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :custom_repo, files: { 'composer.json' => json.to_json }, group: group) }
- let_it_be(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
- let_it_be(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
+ let!(:package1) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '1.0.0', json: json) }
+ let!(:package2) { create(:composer_package, :with_metadatum, project: project, name: package_name, version: '2.0.0', json: json) }
let(:branch) { project.repository.find_branch('master') }
@@ -29,6 +29,11 @@ RSpec.describe ::Packages::Composer::PackagesPresenter do
'type' => 'zip',
'url' => "http://localhost/api/v4/projects/#{project.id}/packages/composer/archives/#{package.name}.zip?sha=#{branch.target}"
},
+ 'source' => {
+ 'reference' => branch.target,
+ 'type' => 'git',
+ 'url' => "http://localhost/#{group.path}/#{project.path}.git"
+ },
'name' => package.name,
'uid' => package.id,
'version' => package.version
diff --git a/spec/presenters/packages/conan/package_presenter_spec.rb b/spec/presenters/packages/conan/package_presenter_spec.rb
index 4e8af752f3e..dad9460c8eb 100644
--- a/spec/presenters/packages/conan/package_presenter_spec.rb
+++ b/spec/presenters/packages/conan/package_presenter_spec.rb
@@ -146,7 +146,7 @@ RSpec.describe ::Packages::Conan::PackagePresenter do
before do
[info_file, manifest_file, package_file].each do |file|
file.conan_file_metadatum.conan_package_reference = alternative_reference
- file.save
+ file.save!
end
end
diff --git a/spec/presenters/packages/detail/package_presenter_spec.rb b/spec/presenters/packages/detail/package_presenter_spec.rb
index 5e20eed877f..4c3e0228583 100644
--- a/spec/presenters/packages/detail/package_presenter_spec.rb
+++ b/spec/presenters/packages/detail/package_presenter_spec.rb
@@ -31,7 +31,6 @@ RSpec.describe ::Packages::Detail::PackagePresenter do
id: pipeline.id,
sha: pipeline.sha,
ref: pipeline.ref,
- git_commit_message: pipeline.git_commit_message,
user: user_info,
project: {
name: pipeline.project.name,
diff --git a/spec/presenters/project_hook_presenter_spec.rb b/spec/presenters/project_hook_presenter_spec.rb
index 061ec38ae34..2e4bd17bbe1 100644
--- a/spec/presenters/project_hook_presenter_spec.rb
+++ b/spec/presenters/project_hook_presenter_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe ProjectHookPresenter do
subject { web_hook.present.logs_details_path(web_hook_log) }
let(:expected_path) do
- "/#{project.namespace.path}/#{project.name}/hooks/#{web_hook.id}/hook_logs/#{web_hook_log.id}"
+ "/#{project.namespace.path}/#{project.name}/-/hooks/#{web_hook.id}/hook_logs/#{web_hook_log.id}"
end
it { is_expected.to eq(expected_path) }
@@ -21,7 +21,7 @@ RSpec.describe ProjectHookPresenter do
subject { web_hook.present.logs_details_path(web_hook_log) }
let(:expected_path) do
- "/#{project.namespace.path}/#{project.name}/hooks/#{web_hook.id}/hook_logs/#{web_hook_log.id}"
+ "/#{project.namespace.path}/#{project.name}/-/hooks/#{web_hook.id}/hook_logs/#{web_hook_log.id}"
end
it { is_expected.to eq(expected_path) }
diff --git a/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb b/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb
index 7a679a03b53..0d340c5e713 100644
--- a/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb
+++ b/spec/presenters/projects/settings/deploy_keys_presenter_spec.rb
@@ -3,17 +3,58 @@
require 'spec_helper'
RSpec.describe Projects::Settings::DeployKeysPresenter do
- let(:project) { create(:project) }
- let(:user) { create(:user) }
+ let_it_be(:project, refind: true) { create(:project) }
+ let_it_be(:other_project) { create(:project) }
+ let_it_be(:user) { create(:user) }
subject(:presenter) do
described_class.new(project, current_user: user)
end
+ before_all do
+ project.add_maintainer(user)
+ other_project.add_maintainer(user)
+ end
+
it 'inherits from Gitlab::View::Presenter::Simple' do
expect(described_class.superclass).to eq(Gitlab::View::Presenter::Simple)
end
+ describe 'deploy key groups' do
+ let_it_be(:deploy_key) { create(:deploy_key, projects: [project]) }
+ let_it_be(:other_deploy_key) { create(:deploy_key, projects: [other_project]) }
+ let_it_be(:public_deploy_key) { create(:deploy_key, public: true) }
+ let_it_be(:unrelated_project) { create(:project, :private) }
+ let_it_be(:unrelated_deploy_key) { create(:deploy_key, projects: [unrelated_project]) }
+
+ context 'with enabled keys' do
+ it 'returns correct deploy keys' do
+ expect(presenter.enabled_keys).to eq([deploy_key])
+ expect(presenter.enabled_keys_size).to eq(1)
+ end
+ end
+
+ context 'with available keys' do
+ it 'returns correct deploy keys' do
+ expect(presenter.available_keys).to eq([other_deploy_key, public_deploy_key])
+ end
+ end
+
+ context 'with available project keys' do
+ it 'returns correct deploy keys' do
+ expect(presenter.available_project_keys).to eq([other_deploy_key])
+ expect(presenter.available_project_keys_size).to eq(1)
+ end
+ end
+
+ context 'with available public keys' do
+ it 'returns correct deploy keys' do
+ expect(presenter.available_public_keys).to eq([public_deploy_key])
+ expect(presenter.available_public_keys_size).to eq(1)
+ end
+ end
+ end
+
describe '#enabled_keys' do
let!(:deploy_key) { create(:deploy_key, public: true) }
diff --git a/spec/requests/admin/clusters/integrations_controller_spec.rb b/spec/requests/admin/clusters/integrations_controller_spec.rb
new file mode 100644
index 00000000000..ee1c1d5aad4
--- /dev/null
+++ b/spec/requests/admin/clusters/integrations_controller_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Admin::Clusters::IntegrationsController, :enable_admin_mode do
+ include AccessMatchersForController
+
+ shared_examples 'a secure endpoint' do
+ context 'it is allowed for admins only' do
+ it { expect { subject }.to be_allowed_for(:admin) }
+ it { expect { subject }.to be_denied_for(:user) }
+ it { expect { subject }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'POST create_or_update' do
+ let(:cluster) { create(:cluster, :instance, :provided_by_gcp) }
+ let(:user) { create(:admin) }
+
+ it_behaves_like '#create_or_update action' do
+ let(:path) { create_or_update_admin_cluster_integration_path(cluster) }
+ let(:redirect_path) { admin_cluster_path(cluster, params: { tab: 'integrations' }) }
+ end
+ end
+end
diff --git a/spec/requests/api/api_spec.rb b/spec/requests/api/api_spec.rb
index 522030652bd..b3e425630e5 100644
--- a/spec/requests/api/api_spec.rb
+++ b/spec/requests/api/api_spec.rb
@@ -105,9 +105,9 @@ RSpec.describe API::API do
it 'logs all application context fields' do
allow_any_instance_of(Gitlab::GrapeLogging::Loggers::ContextLogger).to receive(:parameters) do
- Labkit::Context.current.to_h.tap do |log_context|
+ Gitlab::ApplicationContext.current.tap do |log_context|
expect(log_context).to match('correlation_id' => an_instance_of(String),
- 'meta.caller_id' => '/api/:version/projects/:id/issues',
+ 'meta.caller_id' => 'GET /api/:version/projects/:id/issues',
'meta.remote_ip' => an_instance_of(String),
'meta.project' => project.full_path,
'meta.root_namespace' => project.namespace.full_path,
@@ -122,9 +122,9 @@ RSpec.describe API::API do
it 'skips fields that do not apply' do
allow_any_instance_of(Gitlab::GrapeLogging::Loggers::ContextLogger).to receive(:parameters) do
- Labkit::Context.current.to_h.tap do |log_context|
+ Gitlab::ApplicationContext.current.tap do |log_context|
expect(log_context).to match('correlation_id' => an_instance_of(String),
- 'meta.caller_id' => '/api/:version/users',
+ 'meta.caller_id' => 'GET /api/:version/users',
'meta.remote_ip' => an_instance_of(String),
'meta.client_id' => an_instance_of(String),
'meta.feature_category' => 'users')
@@ -141,7 +141,7 @@ RSpec.describe API::API do
let(:component_map) do
{
"application" => "test",
- "endpoint_id" => "/api/:version/users/:id"
+ "endpoint_id" => "GET /api/:version/users/:id"
}
end
diff --git a/spec/requests/api/applications_spec.rb b/spec/requests/api/applications_spec.rb
index ca09f5524ca..959e68e6a0d 100644
--- a/spec/requests/api/applications_spec.rb
+++ b/spec/requests/api/applications_spec.rb
@@ -5,7 +5,7 @@ require 'spec_helper'
RSpec.describe API::Applications, :api do
let(:admin_user) { create(:user, admin: true) }
let(:user) { create(:user, admin: false) }
- let!(:application) { create(:application, name: 'another_application', redirect_uri: 'http://other_application.url', scopes: '') }
+ let!(:application) { create(:application, name: 'another_application', owner: nil, redirect_uri: 'http://other_application.url', scopes: '') }
describe 'POST /applications' do
context 'authenticated and authorized user' do
@@ -143,6 +143,12 @@ RSpec.describe API::Applications, :api do
expect(response).to have_gitlab_http_status(:no_content)
end
+
+ it 'cannot delete non-existing application' do
+ delete api("/applications/#{non_existing_record_id}", admin_user)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
end
context 'authorized user without authorization' do
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
index 9369b6aa464..017a12a4a40 100644
--- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -127,7 +127,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
authorize_artifacts_with_token_in_params
end
- it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts/authorize' do
+ it_behaves_like 'API::CI::Runner application context metadata', 'POST /api/:version/jobs/:id/artifacts/authorize' do
let(:send_request) { subject }
end
@@ -180,6 +180,18 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
it_behaves_like 'authorizes local file'
end
end
+
+ context 'when job does not exist anymore' do
+ before do
+ allow(job).to receive(:id).and_return(non_existing_record_id)
+ end
+
+ it 'returns 403 Forbidden' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
end
end
@@ -262,7 +274,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
describe 'POST /api/v4/jobs/:id/artifacts' do
- it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts' do
+ it_behaves_like 'API::CI::Runner application context metadata', 'POST /api/:version/jobs/:id/artifacts' do
let(:send_request) do
upload_artifacts(file_upload, headers_with_token)
end
@@ -321,6 +333,18 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
+ context 'when job does not exist anymore' do
+ before do
+ allow(job).to receive(:id).and_return(non_existing_record_id)
+ end
+
+ it 'returns 403 Forbidden' do
+ upload_artifacts(file_upload, headers_with_token)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
context 'when job is running' do
shared_examples 'successful artifacts upload' do
it 'updates successfully' do
@@ -784,7 +808,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
describe 'GET /api/v4/jobs/:id/artifacts' do
let(:token) { job.token }
- it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/artifacts' do
+ it_behaves_like 'API::CI::Runner application context metadata', 'GET /api/:version/jobs/:id/artifacts' do
let(:send_request) { download_artifact }
end
@@ -867,6 +891,18 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
+ context 'when job does not exist anymore' do
+ before do
+ allow(job).to receive(:id).and_return(non_existing_record_id)
+ end
+
+ it 'responds with 403 Forbidden' do
+ get api("/jobs/#{job.id}/artifacts"), params: { token: token }, headers: headers
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
def download_artifact(params = {}, request_headers = headers)
params = params.merge(token: token)
job.reload
diff --git a/spec/requests/api/ci/runner/jobs_put_spec.rb b/spec/requests/api/ci/runner/jobs_put_spec.rb
index b5d2c4608c5..3d5021fba08 100644
--- a/spec/requests/api/ci/runner/jobs_put_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_put_spec.rb
@@ -36,7 +36,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
job.run!
end
- it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id' do
+ it_behaves_like 'API::CI::Runner application context metadata', 'PUT /api/:version/jobs/:id' do
let(:send_request) { update_job(state: 'success') }
end
@@ -278,14 +278,22 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
end
- def update_job(token = job.token, **params)
+ context 'when job does not exist anymore' do
+ it 'returns 403 Forbidden' do
+ update_job(non_existing_record_id, state: 'success', trace: 'BUILD TRACE UPDATED')
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ def update_job(job_id = job.id, token = job.token, **params)
new_params = params.merge(token: token)
- put api("/jobs/#{job.id}"), params: new_params
+ put api("/jobs/#{job_id}"), params: new_params
end
def update_job_after_time(update_interval = 20.minutes, state = 'running')
travel_to(job.updated_at + update_interval) do
- update_job(job.token, state: state)
+ update_job(job.id, job.token, state: state)
end
end
end
diff --git a/spec/requests/api/ci/runner/jobs_request_post_spec.rb b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
index aced094e219..cf0d8a632f1 100644
--- a/spec/requests/api/ci/runner/jobs_request_post_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_request_post_spec.rb
@@ -143,7 +143,8 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
context 'when there is a pending job' do
let(:expected_job_info) do
- { 'name' => job.name,
+ { 'id' => job.id,
+ 'name' => job.name,
'stage' => job.stage,
'project_id' => job.project.id,
'project_name' => job.project.name }
@@ -490,6 +491,36 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
{ 'id' => job.id, 'name' => job.name, 'token' => job.token },
{ 'id' => job2.id, 'name' => job2.name, 'token' => job2.token })
end
+
+ describe 'preloading job_artifacts_archive' do
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(preload_associations_jobs_request_api_endpoint: false)
+ end
+
+ it 'queries the ci_job_artifacts table multiple times' do
+ expect { request_job }.to exceed_all_query_limit(1).for_model(::Ci::JobArtifact)
+ end
+
+ it 'queries the ci_builds table more than five times' do
+ expect { request_job }.to exceed_all_query_limit(5).for_model(::Ci::Build)
+ end
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(preload_associations_jobs_request_api_endpoint: true)
+ end
+
+ it 'queries the ci_job_artifacts table once only' do
+ expect { request_job }.not_to exceed_all_query_limit(1).for_model(::Ci::JobArtifact)
+ end
+
+ it 'queries the ci_builds table five times' do
+ expect { request_job }.not_to exceed_all_query_limit(5).for_model(::Ci::Build)
+ end
+ end
+ end
end
context 'when pipeline have jobs with artifacts' do
diff --git a/spec/requests/api/ci/runner/jobs_trace_spec.rb b/spec/requests/api/ci/runner/jobs_trace_spec.rb
index 659cf055023..e077a174b08 100644
--- a/spec/requests/api/ci/runner/jobs_trace_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_trace_spec.rb
@@ -41,7 +41,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
initial_patch_the_trace
end
- it_behaves_like 'API::CI::Runner application context metadata', '/api/:version/jobs/:id/trace' do
+ it_behaves_like 'API::CI::Runner application context metadata', 'PATCH /api/:version/jobs/:id/trace' do
let(:send_request) { patch_the_trace }
end
@@ -210,15 +210,23 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
context 'when build trace is not being watched' do
- it 'returns X-GitLab-Trace-Update-Interval as 30' do
+ it 'returns the interval in X-GitLab-Trace-Update-Interval' do
patch_the_trace
expect(response).to have_gitlab_http_status(:accepted)
- expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('30')
+ expect(response.header['X-GitLab-Trace-Update-Interval']).to eq('60')
end
end
end
+ context 'when job does not exist anymore' do
+ it 'returns 403 Forbidden' do
+ patch_the_trace(job_id: non_existing_record_id)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
context 'when Runner makes a force-patch' do
before do
force_patch_the_trace
@@ -264,7 +272,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
it { expect(response).to have_gitlab_http_status(:forbidden) }
end
- def patch_the_trace(content = ' appended', request_headers = nil)
+ def patch_the_trace(content = ' appended', request_headers = nil, job_id: job.id)
unless request_headers
job.trace.read do |stream|
offset = stream.size
@@ -274,7 +282,7 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state do
end
Timecop.travel(job.updated_at + update_interval) do
- patch api("/jobs/#{job.id}/trace"), params: content, headers: request_headers
+ patch api("/jobs/#{job_id}/trace"), params: content, headers: request_headers
job.reload
end
end
diff --git a/spec/requests/api/commit_statuses_spec.rb b/spec/requests/api/commit_statuses_spec.rb
index 10fa15d468f..ac125e81acd 100644
--- a/spec/requests/api/commit_statuses_spec.rb
+++ b/spec/requests/api/commit_statuses_spec.rb
@@ -14,8 +14,8 @@ RSpec.describe API::CommitStatuses do
let(:get_url) { "/projects/#{project.id}/repository/commits/#{sha}/statuses" }
context 'ci commit exists' do
- let!(:master) { project.ci_pipelines.create(source: :push, sha: commit.id, ref: 'master', protected: false) }
- let!(:develop) { project.ci_pipelines.create(source: :push, sha: commit.id, ref: 'develop', protected: false) }
+ let!(:master) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'master', protected: false) }
+ let!(:develop) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'develop', protected: false) }
context "reporter user" do
let(:statuses_id) { json_response.map { |status| status['id'] } }
@@ -270,8 +270,8 @@ RSpec.describe API::CommitStatuses do
end
context 'when a pipeline id is specified' do
- let!(:first_pipeline) { project.ci_pipelines.create(source: :push, sha: commit.id, ref: 'master', status: 'created') }
- let!(:other_pipeline) { project.ci_pipelines.create(source: :push, sha: commit.id, ref: 'master', status: 'created') }
+ let!(:first_pipeline) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'master', status: 'created') }
+ let!(:other_pipeline) { project.ci_pipelines.create!(source: :push, sha: commit.id, ref: 'master', status: 'created') }
subject do
post api(post_url, developer), params: {
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index de2cfb8fea0..ac3aa808f37 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -1439,6 +1439,22 @@ RSpec.describe API::Commits do
it_behaves_like 'ref comments'
end
end
+
+ context 'multiple notes' do
+ let!(:note) { create(:diff_note_on_commit, project: project) }
+ let(:commit) { note.commit }
+ let(:commit_id) { note.commit_id }
+
+ it 'are returned without N + 1' do
+ get api(route, current_user) # warm up the cache
+
+ control_count = ActiveRecord::QueryRecorder.new { get api(route, current_user) }.count
+
+ create(:diff_note_on_commit, project: project, author: create(:user))
+
+ expect { get api(route, current_user) }.not_to exceed_query_limit(control_count)
+ end
+ end
end
context 'when the commit is present on two projects' do
@@ -1898,8 +1914,12 @@ RSpec.describe API::Commits do
let(:merged_mr) { create(:merge_request, source_project: project, source_branch: 'master', target_branch: 'feature') }
let(:commit) { merged_mr.merge_request_diff.commits.last }
- it 'returns the correct merge request' do
+ def perform_request(user)
get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
+ end
+
+ it 'returns the correct merge request' do
+ perform_request(user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_limited_pagination_headers
@@ -1910,7 +1930,7 @@ RSpec.describe API::Commits do
it 'returns 403 for an unauthorized user' do
project.add_guest(user)
- get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", user)
+ perform_request(user)
expect(response).to have_gitlab_http_status(:forbidden)
end
@@ -1926,11 +1946,21 @@ RSpec.describe API::Commits do
let(:non_member) { create(:user) }
it 'responds 403 when only members are allowed to read merge requests' do
- get api("/projects/#{project.id}/repository/commits/#{commit.id}/merge_requests", non_member)
+ perform_request(non_member)
expect(response).to have_gitlab_http_status(:forbidden)
end
end
+
+ it 'returns multiple merge requests without N + 1' do
+ perform_request(user)
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request(user) }.count
+
+ create(:merge_request, :closed, source_project: project, source_branch: 'master', target_branch: 'feature')
+
+ expect { perform_request(user) }.not_to exceed_query_limit(control_count)
+ end
end
describe 'GET /projects/:id/repository/commits/:sha/signature' do
diff --git a/spec/requests/api/composer_packages_spec.rb b/spec/requests/api/composer_packages_spec.rb
index 30a831d24fd..0ff88cb41a8 100644
--- a/spec/requests/api/composer_packages_spec.rb
+++ b/spec/requests/api/composer_packages_spec.rb
@@ -434,6 +434,7 @@ RSpec.describe API::ComposerPackages do
end
it_behaves_like 'process Composer api request', params[:user_role], params[:expected_status], params[:member]
+ it_behaves_like 'a package tracking event', described_class.name, 'pull_package'
end
end
end
diff --git a/spec/requests/api/conan_project_packages_spec.rb b/spec/requests/api/conan_project_packages_spec.rb
index fefaf9790b1..da054ed2e96 100644
--- a/spec/requests/api/conan_project_packages_spec.rb
+++ b/spec/requests/api/conan_project_packages_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe API::ConanProjectPackages do
+RSpec.describe API::ConanProjectPackages, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/326194' do
include_context 'conan api setup'
let(:project_id) { project.id }
diff --git a/spec/requests/api/deploy_keys_spec.rb b/spec/requests/api/deploy_keys_spec.rb
index 591d994fec9..a01c66a311c 100644
--- a/spec/requests/api/deploy_keys_spec.rb
+++ b/spec/requests/api/deploy_keys_spec.rb
@@ -3,12 +3,13 @@
require 'spec_helper'
RSpec.describe API::DeployKeys do
- let(:user) { create(:user) }
- let(:maintainer) { create(:user) }
- let(:admin) { create(:admin) }
- let(:project) { create(:project, creator_id: user.id) }
- let(:project2) { create(:project, creator_id: user.id) }
- let(:deploy_key) { create(:deploy_key, public: true) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:project) { create(:project, creator_id: user.id) }
+ let_it_be(:project2) { create(:project, creator_id: user.id) }
+
+ let(:deploy_key) { create(:deploy_key, public: true) }
let!(:deploy_keys_project) do
create(:deploy_keys_project, project: project, deploy_key: deploy_key)
@@ -44,18 +45,30 @@ RSpec.describe API::DeployKeys do
end
describe 'GET /projects/:id/deploy_keys' do
- before do
- deploy_key
+ let(:deploy_key) { create(:deploy_key, public: true, user: admin) }
+
+ def perform_request
+ get api("/projects/#{project.id}/deploy_keys", admin)
end
it 'returns array of ssh keys' do
- get api("/projects/#{project.id}/deploy_keys", admin)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
expect(json_response.first['title']).to eq(deploy_key.title)
end
+
+ it 'returns multiple deploy keys without N + 1' do
+ perform_request
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+
+ create(:deploy_key, public: true, projects: [project], user: maintainer)
+
+ expect { perform_request }.not_to exceed_query_limit(control_count)
+ end
end
describe 'GET /projects/:id/deploy_keys/:key_id' do
diff --git a/spec/requests/api/deployments_spec.rb b/spec/requests/api/deployments_spec.rb
index 8113de96ac4..c89c59a2151 100644
--- a/spec/requests/api/deployments_spec.rb
+++ b/spec/requests/api/deployments_spec.rb
@@ -3,22 +3,26 @@
require 'spec_helper'
RSpec.describe API::Deployments do
- let(:user) { create(:user) }
- let(:non_member) { create(:user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
before do
project.add_maintainer(user)
end
describe 'GET /projects/:id/deployments' do
- let(:project) { create(:project, :repository) }
- let!(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: Time.now, updated_at: Time.now) }
- let!(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'master', created_at: 1.day.ago, updated_at: 2.hours.ago) }
- let!(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'master', created_at: 2.days.ago, updated_at: 1.hour.ago) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:deployment_1) { create(:deployment, :success, project: project, iid: 11, ref: 'master', created_at: Time.now, updated_at: Time.now) }
+ let_it_be(:deployment_2) { create(:deployment, :success, project: project, iid: 12, ref: 'master', created_at: 1.day.ago, updated_at: 2.hours.ago) }
+ let_it_be(:deployment_3) { create(:deployment, :success, project: project, iid: 8, ref: 'master', created_at: 2.days.ago, updated_at: 1.hour.ago) }
+
+ def perform_request(params = {})
+ get api("/projects/#{project.id}/deployments", user), params: params
+ end
context 'as member of the project' do
it 'returns projects deployments sorted by id asc' do
- get api("/projects/#{project.id}/deployments", user)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -32,7 +36,7 @@ RSpec.describe API::Deployments do
context 'with updated_at filters specified' do
it 'returns projects deployments with last update in specified datetime range' do
- get api("/projects/#{project.id}/deployments", user), params: { updated_before: 30.minutes.ago, updated_after: 90.minutes.ago }
+ perform_request({ updated_before: 30.minutes.ago, updated_after: 90.minutes.ago })
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
@@ -42,10 +46,7 @@ RSpec.describe API::Deployments do
context 'with the environment filter specifed' do
it 'returns deployments for the environment' do
- get(
- api("/projects/#{project.id}/deployments", user),
- params: { environment: deployment_1.environment.name }
- )
+ perform_request({ environment: deployment_1.environment.name })
expect(json_response.size).to eq(1)
expect(json_response.first['iid']).to eq(deployment_1.iid)
@@ -86,6 +87,16 @@ RSpec.describe API::Deployments do
end
end
end
+
+ it 'returns multiple deployments without N + 1' do
+ perform_request # warm up the cache
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+
+ create(:deployment, :success, project: project, iid: 21, ref: 'master')
+
+ expect { perform_request }.not_to exceed_query_limit(control_count)
+ end
end
context 'as non member' do
@@ -334,7 +345,7 @@ RSpec.describe API::Deployments do
context 'as a maintainer' do
it 'returns a 403 when updating a deployment with a build' do
- deploy.update(deployable: build)
+ deploy.update!(deployable: build)
put(
api("/projects/#{project.id}/deployments/#{deploy.id}", user),
@@ -383,7 +394,7 @@ RSpec.describe API::Deployments do
end
it 'returns a 403 when updating a deployment with a build' do
- deploy.update(deployable: build)
+ deploy.update!(deployable: build)
put(
api("/projects/#{project.id}/deployments/#{deploy.id}", developer),
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index 303e510883d..aa1a4643593 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -214,7 +214,7 @@ RSpec.describe API::Environments do
context 'as a maintainer' do
context 'with a stoppable environment' do
before do
- environment.update(state: :available)
+ environment.update!(state: :available)
post api("/projects/#{project.id}/environments/#{environment.id}/stop", user)
end
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index 8cd2f00a718..71a4a1a2784 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -517,6 +517,21 @@ RSpec.describe API::Files do
expect(response).to have_gitlab_http_status(:ok)
end
+ context 'when ref is not provided' do
+ before do
+ stub_application_setting(default_branch_name: 'main')
+ end
+
+ it 'returns response :ok', :aggregate_failures do
+ url = route(file_path) + "/raw"
+ expect(Gitlab::Workhorse).to receive(:send_git_blob)
+
+ get api(url, current_user), params: {}
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
it 'returns raw file info for files with dots' do
url = route('.gitignore') + "/raw"
expect(Gitlab::Workhorse).to receive(:send_git_blob)
diff --git a/spec/requests/api/generic_packages_spec.rb b/spec/requests/api/generic_packages_spec.rb
index 16d56b6cfbe..a5e40eec919 100644
--- a/spec/requests/api/generic_packages_spec.rb
+++ b/spec/requests/api/generic_packages_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe API::GenericPackages do
let_it_be(:project_deploy_token_ro) { create(:project_deploy_token, deploy_token: deploy_token_ro, project: project) }
let_it_be(:deploy_token_wo) { create(:deploy_token, read_package_registry: false, write_package_registry: true) }
let_it_be(:project_deploy_token_wo) { create(:project_deploy_token, deploy_token: deploy_token_wo, project: project) }
+
let(:user) { personal_access_token.user }
let(:ci_build) { create(:ci_build, :running, user: user) }
@@ -326,6 +327,34 @@ RSpec.describe API::GenericPackages do
end
end
end
+
+ context 'different versions' do
+ where(:version, :expected_status) do
+ '1.3.350-20201230123456' | :created
+ '1.2.3' | :created
+ '1.2.3g' | :created
+ '1.2' | :created
+ '1.2.bananas' | :created
+ 'v1.2.4-build' | :created
+ 'd50d836eb3de6177ce6c7a5482f27f9c2c84b672' | :created
+ '..1.2.3' | :bad_request
+ '1.2.3-4/../../' | :bad_request
+ '%2e%2e%2f1.2.3' | :bad_request
+ end
+
+ with_them do
+ let(:expected_package_diff_count) { expected_status == :created ? 1 : 0 }
+ let(:headers) { workhorse_headers.merge(auth_header) }
+
+ subject { upload_file(params, headers, package_version: version) }
+
+ it "returns the #{params[:expected_status]}", :aggregate_failures do
+ expect { subject }.to change { project.packages.generic.count }.by(expected_package_diff_count)
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
+ end
end
end
@@ -418,8 +447,8 @@ RSpec.describe API::GenericPackages do
end
end
- def upload_file(params, request_headers, send_rewritten_field: true, package_name: 'mypackage', file_name: 'myfile.tar.gz')
- url = "/projects/#{project.id}/packages/generic/#{package_name}/0.0.1/#{file_name}"
+ def upload_file(params, request_headers, send_rewritten_field: true, package_name: 'mypackage', package_version: '0.0.1', file_name: 'myfile.tar.gz')
+ url = "/projects/#{project.id}/packages/generic/#{package_name}/#{package_version}/#{file_name}"
workhorse_finalize(
api(url),
diff --git a/spec/requests/api/go_proxy_spec.rb b/spec/requests/api/go_proxy_spec.rb
index d45e24241b2..e678b6cf1c8 100644
--- a/spec/requests/api/go_proxy_spec.rb
+++ b/spec/requests/api/go_proxy_spec.rb
@@ -363,7 +363,7 @@ RSpec.describe API::GoProxy do
let(:module_name) { base }
before do
- project.update(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
end
describe 'GET /projects/:id/packages/go/*module_name/@v/list' do
@@ -412,7 +412,7 @@ RSpec.describe API::GoProxy do
let(:module_name) { base }
before do
- project.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
end
describe 'GET /projects/:id/packages/go/*module_name/@v/list' do
diff --git a/spec/requests/api/graphql/ci/groups_spec.rb b/spec/requests/api/graphql/ci/groups_spec.rb
index 9e81358a152..d1a4395d2c9 100644
--- a/spec/requests/api/graphql/ci/groups_spec.rb
+++ b/spec/requests/api/graphql/ci/groups_spec.rb
@@ -4,10 +4,15 @@ require 'spec_helper'
RSpec.describe 'Query.project.pipeline.stages.groups' do
include GraphqlHelpers
- let(:project) { create(:project, :repository, :public) }
- let(:user) { create(:user) }
- let(:pipeline) { create(:ci_pipeline, project: project, user: user) }
- let(:group_graphql_data) { graphql_data.dig('project', 'pipeline', 'stages', 'nodes', 0, 'groups', 'nodes') }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
+ let(:group_graphql_data) { graphql_data_at(:project, :pipeline, :stages, :nodes, 0, :groups, :nodes) }
+
+ let_it_be(:ref) { 'master' }
+ let_it_be(:job_a) { create(:commit_status, pipeline: pipeline, name: 'rspec 0 2', ref: ref) }
+ let_it_be(:job_b) { create(:ci_build, pipeline: pipeline, name: 'rspec 0 1', ref: ref) }
+ let_it_be(:job_c) { create(:ci_bridge, pipeline: pipeline, name: 'spinach 0 1', ref: ref) }
let(:params) { {} }
@@ -38,18 +43,15 @@ RSpec.describe 'Query.project.pipeline.stages.groups' do
end
before do
- create(:commit_status, pipeline: pipeline, name: 'rspec 0 2')
- create(:commit_status, pipeline: pipeline, name: 'rspec 0 1')
- create(:commit_status, pipeline: pipeline, name: 'spinach 0 1')
post_graphql(query, current_user: user)
end
it_behaves_like 'a working graphql query'
it 'returns a array of jobs belonging to a pipeline' do
- expect(group_graphql_data.map { |g| g.slice('name', 'size') }).to eq([
- { 'name' => 'rspec', 'size' => 2 },
- { 'name' => 'spinach', 'size' => 1 }
- ])
+ expect(group_graphql_data).to contain_exactly(
+ a_hash_including('name' => 'rspec', 'size' => 2),
+ a_hash_including('name' => 'spinach', 'size' => 1)
+ )
end
end
diff --git a/spec/requests/api/graphql/ci/job_spec.rb b/spec/requests/api/graphql/ci/job_spec.rb
new file mode 100644
index 00000000000..78f7d3e149b
--- /dev/null
+++ b/spec/requests/api/graphql/ci/job_spec.rb
@@ -0,0 +1,100 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Query.project(fullPath).pipelines.job(id)' do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create_default(:user) }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let_it_be(:prepare_stage) { create(:ci_stage_entity, pipeline: pipeline, project: project, name: 'prepare') }
+ let_it_be(:test_stage) { create(:ci_stage_entity, pipeline: pipeline, project: project, name: 'test') }
+
+ let_it_be(:job_1) { create(:ci_build, pipeline: pipeline, stage: 'prepare', name: 'Job 1') }
+ let_it_be(:job_2) { create(:ci_build, pipeline: pipeline, stage: 'test', name: 'Job 2') }
+ let_it_be(:job_3) { create(:ci_build, pipeline: pipeline, stage: 'test', name: 'Job 3') }
+
+ let(:path_to_job) do
+ [
+ [:project, { full_path: project.full_path }],
+ [:pipelines, { first: 1 }],
+ [:nodes, nil],
+ [:job, { id: global_id_of(job_2) }]
+ ]
+ end
+
+ let(:query) do
+ wrap_fields(query_graphql_path(query_path, all_graphql_fields_for(terminal_type)))
+ end
+
+ describe 'scalar fields' do
+ let(:path) { [:project, :pipelines, :nodes, 0, :job] }
+ let(:query_path) { path_to_job }
+ let(:terminal_type) { 'CiJob' }
+
+ it 'retrieves scalar fields' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(*path)).to match a_hash_including(
+ 'id' => global_id_of(job_2),
+ 'name' => job_2.name,
+ 'allowFailure' => job_2.allow_failure,
+ 'duration' => job_2.duration,
+ 'status' => job_2.status.upcase
+ )
+ end
+
+ context 'when fetching by name' do
+ before do
+ query_path.last[1] = { name: job_2.name }
+ end
+
+ it 'retrieves scalar fields' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(*path)).to match a_hash_including(
+ 'id' => global_id_of(job_2),
+ 'name' => job_2.name
+ )
+ end
+ end
+ end
+
+ describe '.detailedStatus' do
+ let(:path) { [:project, :pipelines, :nodes, 0, :job, :detailed_status] }
+ let(:query_path) { path_to_job + [:detailed_status] }
+ let(:terminal_type) { 'DetailedStatus' }
+
+ it 'retrieves detailed status' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(*path)).to match a_hash_including(
+ 'text' => 'pending',
+ 'label' => 'pending',
+ 'action' => a_hash_including('buttonTitle' => 'Cancel this job', 'icon' => 'cancel')
+ )
+ end
+ end
+
+ describe '.stage' do
+ let(:path) { [:project, :pipelines, :nodes, 0, :job, :stage] }
+ let(:query_path) { path_to_job + [:stage] }
+ let(:terminal_type) { 'CiStage' }
+
+ it 'returns appropriate data' do
+ post_graphql(query, current_user: user)
+
+ expect(graphql_data_at(*path)).to match a_hash_including(
+ 'name' => test_stage.name,
+ 'jobs' => a_hash_including(
+ 'nodes' => contain_exactly(
+ a_hash_including('id' => global_id_of(job_2)),
+ a_hash_including('id' => global_id_of(job_3))
+ )
+ )
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/custom_emoji_query_spec.rb b/spec/requests/api/graphql/custom_emoji_query_spec.rb
index d5a423d0eba..874357d9eef 100644
--- a/spec/requests/api/graphql/custom_emoji_query_spec.rb
+++ b/spec/requests/api/graphql/custom_emoji_query_spec.rb
@@ -16,7 +16,15 @@ RSpec.describe 'getting custom emoji within namespace' do
describe "Query CustomEmoji on Group" do
def custom_emoji_query(group)
- graphql_query_for('group', 'fullPath' => group.full_path)
+ fields = all_graphql_fields_for('Group')
+ # TODO: Set required timelogs args elsewhere https://gitlab.com/gitlab-org/gitlab/-/issues/325499
+ fields.selection['timelogs(startDate: "2021-03-01" endDate: "2021-03-30")'] = fields.selection.delete('timelogs')
+
+ graphql_query_for(
+ 'group',
+ { fullPath: group.full_path },
+ fields
+ )
end
it 'returns emojis when authorised' do
diff --git a/spec/requests/api/graphql/gitlab_schema_spec.rb b/spec/requests/api/graphql/gitlab_schema_spec.rb
index fe1c7c15de2..b41d851439b 100644
--- a/spec/requests/api/graphql/gitlab_schema_spec.rb
+++ b/spec/requests/api/graphql/gitlab_schema_spec.rb
@@ -125,9 +125,9 @@ RSpec.describe 'GitlabSchema configurations' do
subject do
queries = [
- { query: graphql_query_for('project', { 'fullPath' => '$fullPath' }, %w(id name description)) },
- { query: graphql_query_for('echo', { 'text' => "$test" }, []), variables: { "test" => "Hello world" } },
- { query: graphql_query_for('project', { 'fullPath' => project.full_path }, "userPermissions { createIssue }") }
+ { query: graphql_query_for('project', { 'fullPath' => '$fullPath' }, %w(id name description)) }, # Complexity 4
+ { query: graphql_query_for('echo', { 'text' => "$test" }, []), variables: { "test" => "Hello world" } }, # Complexity 1
+ { query: graphql_query_for('project', { 'fullPath' => project.full_path }, "userPermissions { createIssue }") } # Complexity 3
]
post_multiplex(queries, current_user: current_user)
@@ -139,10 +139,9 @@ RSpec.describe 'GitlabSchema configurations' do
expect(json_response.last['data']['project']).to be_nil
end
- it_behaves_like 'imposing query limits' do
- it 'fails all queries when only one of the queries is too complex' do
- # The `project` query above has a complexity of 5
- allow(GitlabSchema).to receive(:max_query_complexity).and_return 4
+ shared_examples 'query is too complex' do |description, max_complexity|
+ it description, :aggregate_failures do
+ allow(GitlabSchema).to receive(:max_query_complexity).and_return max_complexity
subject
@@ -155,11 +154,17 @@ RSpec.describe 'GitlabSchema configurations' do
# Expect errors for each query
expect(graphql_errors.size).to eq(3)
graphql_errors.each do |single_query_errors|
- expect_graphql_errors_to_include(/which exceeds max complexity of 4/)
+ expect_graphql_errors_to_include(/Query has complexity of 8, which exceeds max complexity of #{max_complexity}/)
end
end
end
+ it_behaves_like 'imposing query limits' do
+ # The total complexity of the multiplex query above is 8
+ it_behaves_like 'query is too complex', 'fails all queries when only one of the queries is too complex', 4
+ it_behaves_like 'query is too complex', 'fails when all queries combined are too complex', 7
+ end
+
context 'authentication' do
let(:current_user) { project.owner }
@@ -191,6 +196,7 @@ RSpec.describe 'GitlabSchema configurations' do
complexity: 181,
depth: 13,
duration_s: 7,
+ operation_name: 'IntrospectionQuery',
used_fields: an_instance_of(Array),
used_deprecated_fields: an_instance_of(Array)
}
diff --git a/spec/requests/api/graphql/group/milestones_spec.rb b/spec/requests/api/graphql/group/milestones_spec.rb
index 380eaea17f8..a5b489d72fd 100644
--- a/spec/requests/api/graphql/group/milestones_spec.rb
+++ b/spec/requests/api/graphql/group/milestones_spec.rb
@@ -9,12 +9,14 @@ RSpec.describe 'Milestones through GroupQuery' do
let_it_be(:now) { Time.now }
describe 'Get list of milestones from a group' do
- let_it_be(:group) { create(:group) }
+ let_it_be(:parent_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: parent_group) }
let_it_be(:milestone_1) { create(:milestone, group: group) }
let_it_be(:milestone_2) { create(:milestone, group: group, state: :closed, start_date: now, due_date: now + 1.day) }
let_it_be(:milestone_3) { create(:milestone, group: group, start_date: now, due_date: now + 2.days) }
let_it_be(:milestone_4) { create(:milestone, group: group, state: :closed, start_date: now - 2.days, due_date: now - 1.day) }
let_it_be(:milestone_from_other_group) { create(:milestone, group: create(:group)) }
+ let_it_be(:parent_milestone) { create(:milestone, group: parent_group) }
let(:milestone_data) { graphql_data['group']['milestones']['edges'] }
@@ -64,14 +66,32 @@ RSpec.describe 'Milestones through GroupQuery' do
accessible_group.add_developer(user)
end
- it 'returns milestones also from subgroups and subprojects visible to user' do
- fetch_milestones(user, args)
+ context 'when including decendants' do
+ let(:args) { { include_descendants: true } }
+
+ it 'returns milestones also from subgroups and subprojects visible to user' do
+ fetch_milestones(user, args)
+
+ expect_array_response(
+ milestone_1.to_global_id.to_s, milestone_2.to_global_id.to_s,
+ milestone_3.to_global_id.to_s, milestone_4.to_global_id.to_s,
+ submilestone_1.to_global_id.to_s, submilestone_2.to_global_id.to_s
+ )
+ end
+ end
+
+ context 'when including ancestors' do
+ let(:args) { { include_ancestors: true } }
- expect_array_response(
- milestone_1.to_global_id.to_s, milestone_2.to_global_id.to_s,
- milestone_3.to_global_id.to_s, milestone_4.to_global_id.to_s,
- submilestone_1.to_global_id.to_s, submilestone_2.to_global_id.to_s
- )
+ it 'returns milestones from ancestor groups' do
+ fetch_milestones(user, args)
+
+ expect_array_response(
+ milestone_1.to_global_id.to_s, milestone_2.to_global_id.to_s,
+ milestone_3.to_global_id.to_s, milestone_4.to_global_id.to_s,
+ parent_milestone.to_global_id.to_s
+ )
+ end
end
end
diff --git a/spec/requests/api/graphql/group/timelogs_spec.rb b/spec/requests/api/graphql/group/timelogs_spec.rb
new file mode 100644
index 00000000000..6e21a73afa9
--- /dev/null
+++ b/spec/requests/api/graphql/group/timelogs_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Timelogs through GroupQuery' do
+ include GraphqlHelpers
+
+ describe 'Get list of timelogs from a group issues' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+ let_it_be(:milestone) { create(:milestone, group: group) }
+ let_it_be(:issue) { create(:issue, project: project, milestone: milestone) }
+ let_it_be(:timelog1) { create(:timelog, issue: issue, user: user, spent_at: '2019-08-13 14:00:00') }
+ let_it_be(:timelog2) { create(:timelog, issue: issue, user: user, spent_at: '2019-08-10 08:00:00') }
+ let_it_be(:params) { { startTime: '2019-08-10 12:00:00', endTime: '2019-08-21 12:00:00' } }
+
+ let(:timelogs_data) { graphql_data['group']['timelogs']['nodes'] }
+
+ before do
+ group.add_developer(user)
+ end
+
+ context 'when the request is correct' do
+ before do
+ post_graphql(query, current_user: user)
+ end
+
+ it_behaves_like 'a working graphql query'
+
+ it 'returns timelogs successfully' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(graphql_errors).to be_nil
+ expect(timelog_array.size).to eq 1
+ end
+
+ it 'contains correct data', :aggregate_failures do
+ username = timelog_array.map { |data| data['user']['username'] }
+ spent_at = timelog_array.map { |data| data['spentAt'].to_time }
+ time_spent = timelog_array.map { |data| data['timeSpent'] }
+ issue_title = timelog_array.map { |data| data['issue']['title'] }
+ milestone_title = timelog_array.map { |data| data['issue']['milestone']['title'] }
+
+ expect(username).to eq([user.username])
+ expect(spent_at.first).to be_like_time(timelog1.spent_at)
+ expect(time_spent).to eq([timelog1.time_spent])
+ expect(issue_title).to eq([issue.title])
+ expect(milestone_title).to eq([milestone.title])
+ end
+
+ context 'when arguments with no time are present' do
+ let!(:timelog3) { create(:timelog, issue: issue, user: user, spent_at: '2019-08-10 15:00:00') }
+ let!(:timelog4) { create(:timelog, issue: issue, user: user, spent_at: '2019-08-21 15:00:00') }
+ let(:params) { { startDate: '2019-08-10', endDate: '2019-08-21' } }
+
+ it 'sets times as start of day and end of day' do
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(timelog_array.size).to eq 2
+ end
+ end
+ end
+
+ context 'when requests has errors' do
+ context 'when there are no timelogs present' do
+ before do
+ Timelog.delete_all
+ end
+
+ it 'returns empty result' do
+ post_graphql(query, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_errors).to be_nil
+ expect(timelogs_data).to be_empty
+ end
+ end
+
+ context 'when user has no permission to read group timelogs' do
+ it 'returns empty result' do
+ guest = create(:user)
+ group.add_guest(guest)
+ post_graphql(query, current_user: guest)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_errors).to be_nil
+ expect(timelogs_data).to be_empty
+ end
+ end
+ end
+ end
+
+ def timelog_array(extract_attribute = nil)
+ timelogs_data.map do |item|
+ extract_attribute ? item[extract_attribute] : item
+ end
+ end
+
+ def query(timelog_params = params)
+ timelog_nodes = <<~NODE
+ nodes {
+ spentAt
+ timeSpent
+ user {
+ username
+ }
+ issue {
+ title
+ milestone {
+ title
+ }
+ }
+ }
+ NODE
+
+ graphql_query_for(
+ :group,
+ { full_path: group.full_path },
+ query_graphql_field(:timelogs, timelog_params, timelog_nodes)
+ )
+ end
+end
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index 391bae4cfcf..8e4f808f794 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -17,7 +17,15 @@ RSpec.describe 'getting group information' do
# similar to the API "GET /groups/:id"
describe "Query group(fullPath)" do
def group_query(group)
- graphql_query_for('group', 'fullPath' => group.full_path)
+ fields = all_graphql_fields_for('Group')
+ # TODO: Set required timelogs args elsewhere https://gitlab.com/gitlab-org/gitlab/-/issues/325499
+ fields.selection['timelogs(startDate: "2021-03-01" endDate: "2021-03-30")'] = fields.selection.delete('timelogs')
+
+ graphql_query_for(
+ 'group',
+ { fullPath: group.full_path },
+ fields
+ )
end
it_behaves_like 'a working graphql query' do
diff --git a/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb b/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb
index e24ab0b07f2..46ec22e7ef8 100644
--- a/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb
+++ b/spec/requests/api/graphql/mutations/boards/issues/issue_move_list_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe 'Reposition and move issue within board lists' do
let(:mutation_name) { mutation_class.graphql_name }
let(:mutation_result_identifier) { mutation_name.camelize(:lower) }
let(:current_user) { user }
- let(:params) { { board_id: board.to_global_id.to_s, project_path: project.full_path, iid: issue1.iid.to_s } }
+ let(:board_id) { global_id_of(board) }
+ let(:params) { { board_id: board_id, project_path: project.full_path, iid: issue1.iid.to_s } }
let(:issue_move_params) do
{
from_list_id: list1.id,
@@ -34,16 +35,44 @@ RSpec.describe 'Reposition and move issue within board lists' do
end
shared_examples 'returns an error' do
- it 'fails with error' do
- message = "The resource that you are attempting to access does not exist or you don't have "\
- "permission to perform this action"
+ let(:message) do
+ "The resource that you are attempting to access does not exist or you don't have " \
+ "permission to perform this action"
+ end
+ it 'fails with error' do
post_graphql_mutation(mutation(params), current_user: current_user)
expect(graphql_errors).to include(a_hash_including('message' => message))
end
end
+ context 'when the board_id is not a board' do
+ let(:board_id) { global_id_of(project) }
+ let(:issue_move_params) do
+ { move_after_id: existing_issue1.id, move_before_id: existing_issue2.id }
+ end
+
+ it_behaves_like 'returns an error' do
+ let(:message) { include('does not represent an instance of') }
+ end
+ end
+
+ # This test aims to distinguish between the failures to authorize
+ # :read_issue_board and :update_issue
+ context 'when the user cannot read the issue board' do
+ let(:issue_move_params) do
+ { move_after_id: existing_issue1.id, move_before_id: existing_issue2.id }
+ end
+
+ before do
+ allow(Ability).to receive(:allowed?).with(any_args).and_return(true)
+ allow(Ability).to receive(:allowed?).with(current_user, :read_issue_board, board).and_return(false)
+ end
+
+ it_behaves_like 'returns an error'
+ end
+
context 'when user has access to resources' do
context 'when repositioning an issue' do
let(:issue_move_params) { { move_after_id: existing_issue1.id, move_before_id: existing_issue2.id } }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
index 97873b01338..bcede4d37dd 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -5,11 +5,12 @@ require 'spec_helper'
RSpec.describe 'Setting assignees of a merge request' do
include GraphqlHelpers
- let(:current_user) { create(:user) }
- let(:merge_request) { create(:merge_request) }
- let(:project) { merge_request.project }
- let(:assignee) { create(:user) }
- let(:assignee2) { create(:user) }
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:current_user) { create(:user, developer_projects: [project]) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:assignee2) { create(:user) }
+ let_it_be_with_reload(:merge_request) { create(:merge_request, source_project: project) }
+
let(:input) { { assignee_usernames: [assignee.username] } }
let(:expected_result) do
[{ 'username' => assignee.username }]
@@ -44,10 +45,19 @@ RSpec.describe 'Setting assignees of a merge request' do
mutation_response['mergeRequest']['assignees']['nodes']
end
+ def run_mutation!
+ recorder = ActiveRecord::QueryRecorder.new do
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ expect(recorder.count).to be <= db_query_limit
+ end
+
before do
- project.add_developer(current_user)
project.add_developer(assignee)
project.add_developer(assignee2)
+
+ merge_request.update!(assignees: [])
end
it 'returns an error if the user is not allowed to update the merge request' do
@@ -56,23 +66,29 @@ RSpec.describe 'Setting assignees of a merge request' do
expect(graphql_errors).not_to be_empty
end
- it 'does not allow members without the right permission to add assignees' do
- user = create(:user)
- project.add_guest(user)
+ context 'when the current user does not have permission to add assignees' do
+ let(:current_user) { create(:user) }
+ let(:db_query_limit) { 27 }
- post_graphql_mutation(mutation, current_user: user)
+ it 'does not change the assignees' do
+ project.add_guest(current_user)
- expect(graphql_errors).not_to be_empty
+ expect { run_mutation! }.not_to change { merge_request.reset.assignees.pluck(:id) }
+
+ expect(graphql_errors).not_to be_empty
+ end
end
context 'with assignees already assigned' do
+ let(:db_query_limit) { 39 }
+
before do
merge_request.assignees = [assignee2]
merge_request.save!
end
it 'replaces the assignee' do
- post_graphql_mutation(mutation, current_user: current_user)
+ run_mutation!
expect(response).to have_gitlab_http_status(:success)
expect(mutation_assignee_nodes).to match_array(expected_result)
@@ -80,6 +96,7 @@ RSpec.describe 'Setting assignees of a merge request' do
end
context 'when passing an empty list of assignees' do
+ let(:db_query_limit) { 31 }
let(:input) { { assignee_usernames: [] } }
before do
@@ -88,7 +105,7 @@ RSpec.describe 'Setting assignees of a merge request' do
end
it 'removes assignee' do
- post_graphql_mutation(mutation, current_user: current_user)
+ run_mutation!
expect(response).to have_gitlab_http_status(:success)
expect(mutation_assignee_nodes).to eq([])
@@ -96,7 +113,9 @@ RSpec.describe 'Setting assignees of a merge request' do
end
context 'when passing append as true' do
- let(:input) { { assignee_usernames: [assignee2.username], operation_mode: Types::MutationOperationModeEnum.enum[:append] } }
+ let(:mode) { Types::MutationOperationModeEnum.enum[:append] }
+ let(:input) { { assignee_usernames: [assignee2.username], operation_mode: mode } }
+ let(:db_query_limit) { 20 }
before do
# In CE, APPEND is a NOOP as you can't have multiple assignees
@@ -108,7 +127,7 @@ RSpec.describe 'Setting assignees of a merge request' do
end
it 'does not replace the assignee in CE' do
- post_graphql_mutation(mutation, current_user: current_user)
+ run_mutation!
expect(response).to have_gitlab_http_status(:success)
expect(mutation_assignee_nodes).to match_array(expected_result)
@@ -116,7 +135,9 @@ RSpec.describe 'Setting assignees of a merge request' do
end
context 'when passing remove as true' do
- let(:input) { { assignee_usernames: [assignee.username], operation_mode: Types::MutationOperationModeEnum.enum[:remove] } }
+ let(:db_query_limit) { 31 }
+ let(:mode) { Types::MutationOperationModeEnum.enum[:remove] }
+ let(:input) { { assignee_usernames: [assignee.username], operation_mode: mode } }
let(:expected_result) { [] }
before do
@@ -125,7 +146,7 @@ RSpec.describe 'Setting assignees of a merge request' do
end
it 'removes the users in the list, while adding none' do
- post_graphql_mutation(mutation, current_user: current_user)
+ run_mutation!
expect(response).to have_gitlab_http_status(:success)
expect(mutation_assignee_nodes).to match_array(expected_result)
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb
index 34d347c76fd..0d0cc66c52a 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_labels_spec.rb
@@ -52,7 +52,7 @@ RSpec.describe 'Setting labels of a merge request' do
end
it 'sets the merge request labels, removing existing ones' do
- merge_request.update(labels: [label2])
+ merge_request.update!(labels: [label2])
post_graphql_mutation(mutation, current_user: current_user)
diff --git a/spec/requests/api/graphql/mutations/release_asset_links/delete_spec.rb b/spec/requests/api/graphql/mutations/release_asset_links/delete_spec.rb
new file mode 100644
index 00000000000..57489c82ec2
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/release_asset_links/delete_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Deletes a release asset link' do
+ include GraphqlHelpers
+
+ let_it_be(:project) { create(:project, :private, :repository) }
+ let_it_be(:release) { create(:release, project: project) }
+ let_it_be(:maintainer) { create(:user).tap { |u| project.add_maintainer(u) } }
+ let_it_be(:release_link) { create(:release_link, release: release) }
+
+ let(:current_user) { maintainer }
+ let(:mutation_name) { :release_asset_link_delete }
+ let(:mutation_arguments) { { id: release_link.to_global_id.to_s } }
+
+ let(:mutation) do
+ graphql_mutation(mutation_name, mutation_arguments, <<~FIELDS)
+ link {
+ id
+ name
+ url
+ linkType
+ directAssetUrl
+ external
+ }
+ errors
+ FIELDS
+ end
+
+ let(:delete_link) { post_graphql_mutation(mutation, current_user: current_user) }
+ let(:mutation_response) { graphql_mutation_response(mutation_name)&.with_indifferent_access }
+
+ it 'deletes the release asset link and returns the deleted link', :aggregate_failures do
+ delete_link
+
+ expected_response = {
+ id: release_link.to_global_id.to_s,
+ name: release_link.name,
+ url: release_link.url,
+ linkType: release_link.link_type.upcase,
+ directAssetUrl: end_with(release_link.filepath),
+ external: true
+ }.with_indifferent_access
+
+ expect(mutation_response[:link]).to match(expected_response)
+ expect(mutation_response[:errors]).to eq([])
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/snippets/create_spec.rb b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
index 1c2260070ec..d944c9e9e57 100644
--- a/spec/requests/api/graphql/mutations/snippets/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/create_spec.rb
@@ -211,5 +211,9 @@ RSpec.describe 'Creating a Snippet' do
end
end
end
+
+ it_behaves_like 'has spam protection' do
+ let(:mutation_class) { ::Mutations::Snippets::Create }
+ end
end
end
diff --git a/spec/requests/api/graphql/mutations/snippets/update_spec.rb b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
index 43dc8d8bc44..28ab593526a 100644
--- a/spec/requests/api/graphql/mutations/snippets/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/snippets/update_spec.rb
@@ -157,6 +157,9 @@ RSpec.describe 'Updating a Snippet' do
it_behaves_like 'graphql update actions'
it_behaves_like 'when the snippet is not found'
it_behaves_like 'snippet edit usage data counters'
+ it_behaves_like 'has spam protection' do
+ let(:mutation_class) { ::Mutations::Snippets::Update }
+ end
end
describe 'ProjectSnippet' do
@@ -201,6 +204,10 @@ RSpec.describe 'Updating a Snippet' do
end
it_behaves_like 'snippet edit usage data counters'
+
+ it_behaves_like 'has spam protection' do
+ let(:mutation_class) { ::Mutations::Snippets::Update }
+ end
end
it_behaves_like 'when the snippet is not found'
diff --git a/spec/requests/api/graphql/packages/package_spec.rb b/spec/requests/api/graphql/packages/package_spec.rb
index 654215041cb..a0131c7733e 100644
--- a/spec/requests/api/graphql/packages/package_spec.rb
+++ b/spec/requests/api/graphql/packages/package_spec.rb
@@ -2,33 +2,47 @@
require 'spec_helper'
RSpec.describe 'package details' do
- using RSpec::Parameterized::TableSyntax
include GraphqlHelpers
let_it_be(:project) { create(:project) }
- let_it_be(:package) { create(:composer_package, project: project) }
+ let_it_be(:composer_package) { create(:composer_package, project: project) }
let_it_be(:composer_json) { { name: 'name', type: 'type', license: 'license', version: 1 } }
let_it_be(:composer_metadatum) do
# we are forced to manually create the metadatum, without using the factory to force the sha to be a string
# and avoid an error where gitaly can't find the repository
- create(:composer_metadatum, package: package, target_sha: 'foo_sha', composer_json: composer_json)
+ create(:composer_metadatum, package: composer_package, target_sha: 'foo_sha', composer_json: composer_json)
end
let(:depth) { 3 }
- let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline] }
+ let(:excluded) { %w[metadata apiFuzzingCiConfiguration pipeline packageFiles] }
+ let(:metadata) { query_graphql_fragment('ComposerMetadata') }
+ let(:package_files) {all_graphql_fields_for('PackageFile')}
+ let(:package_files_metadata) {query_graphql_fragment('ConanFileMetadata')}
let(:query) do
graphql_query_for(:package, { id: package_global_id }, <<~FIELDS)
- #{all_graphql_fields_for('Package', max_depth: depth, excluded: excluded)}
+ #{all_graphql_fields_for('PackageDetailsType', max_depth: depth, excluded: excluded)}
metadata {
- #{query_graphql_fragment('ComposerMetadata')}
+ #{metadata}
+ }
+ packageFiles {
+ nodes {
+ #{package_files}
+ fileMetadata {
+ #{package_files_metadata}
+ }
+ }
}
FIELDS
end
let(:user) { project.owner }
- let(:package_global_id) { global_id_of(package) }
+ let(:package_global_id) { global_id_of(composer_package) }
let(:package_details) { graphql_data_at(:package) }
+ let(:metadata_response) { graphql_data_at(:package, :metadata) }
+ let(:package_files_response) { graphql_data_at(:package, :package_files, :nodes) }
+ let(:first_file_response) { graphql_data_at(:package, :package_files, :nodes, 0)}
+ let(:first_file_response_metadata) { graphql_data_at(:package, :package_files, :nodes, 0, :file_metadata)}
subject { post_graphql(query, current_user: user) }
@@ -40,15 +54,68 @@ RSpec.describe 'package details' do
it 'matches the JSON schema' do
expect(package_details).to match_schema('graphql/packages/package_details')
end
+ end
+
+ describe 'Packages Metadata' do
+ before do
+ subject
+ end
- it 'includes the fields of the correct package' do
- expect(package_details).to include(
- 'id' => package_global_id,
- 'metadata' => {
+ describe 'Composer' do
+ it 'has the correct metadata' do
+ expect(metadata_response).to include(
'targetSha' => 'foo_sha',
'composerJson' => composer_json.transform_keys(&:to_s).transform_values(&:to_s)
- }
- )
+ )
+ end
+
+ it 'does not have files' do
+ expect(package_files_response).to be_empty
+ end
+ end
+
+ describe 'Conan' do
+ let_it_be(:conan_package) { create(:conan_package, project: project) }
+
+ let(:package_global_id) { global_id_of(conan_package) }
+ let(:metadata) { query_graphql_fragment('ConanMetadata') }
+ let(:first_file) { conan_package.package_files.find { |f| global_id_of(f) == first_file_response['id'] } }
+
+ it 'has the correct metadata' do
+ expect(metadata_response).to include(
+ 'id' => global_id_of(conan_package.conan_metadatum),
+ 'recipe' => conan_package.conan_metadatum.recipe,
+ 'packageChannel' => conan_package.conan_metadatum.package_channel,
+ 'packageUsername' => conan_package.conan_metadatum.package_username,
+ 'recipePath' => conan_package.conan_metadatum.recipe_path
+ )
+ end
+
+ it 'has the right amount of files' do
+ expect(package_files_response.length).to be(conan_package.package_files.length)
+ end
+
+ it 'has the basic package files data' do
+ expect(first_file_response).to include(
+ 'id' => global_id_of(first_file),
+ 'fileName' => first_file.file_name,
+ 'size' => first_file.size.to_s,
+ 'downloadPath' => first_file.download_path,
+ 'fileSha1' => first_file.file_sha1,
+ 'fileMd5' => first_file.file_md5,
+ 'fileSha256' => first_file.file_sha256
+ )
+ end
+
+ it 'has the correct file metadata' do
+ expect(first_file_response_metadata).to include(
+ 'id' => global_id_of(first_file.conan_file_metadatum),
+ 'packageRevision' => first_file.conan_file_metadatum.package_revision,
+ 'conanPackageReference' => first_file.conan_file_metadatum.conan_package_reference,
+ 'recipeRevision' => first_file.conan_file_metadatum.recipe_revision,
+ 'conanFileType' => first_file.conan_file_metadatum.conan_file_type.upcase
+ )
+ end
end
end
@@ -56,7 +123,7 @@ RSpec.describe 'package details' do
let(:depth) { 3 }
let(:excluded) { %w[metadata project tags pipelines] } # to limit the query complexity
- let_it_be(:siblings) { create_list(:composer_package, 2, project: project, name: package.name) }
+ let_it_be(:siblings) { create_list(:composer_package, 2, project: project, name: composer_package.name) }
it 'includes the sibling versions' do
subject
@@ -73,8 +140,32 @@ RSpec.describe 'package details' do
subject
expect(graphql_data_at(:package, :versions, :nodes, :version)).to be_present
- expect(graphql_data_at(:package, :versions, :nodes, :versions)).not_to be_present
+ expect(graphql_data_at(:package, :versions, :nodes, :versions, :nodes)).to be_empty
end
end
end
+
+ context 'with a batched query' do
+ let_it_be(:conan_package) { create(:conan_package, project: project) }
+
+ let(:batch_query) do
+ <<~QUERY
+ {
+ a: package(id: "#{global_id_of(composer_package)}") { name }
+ b: package(id: "#{global_id_of(conan_package)}") { name }
+ }
+ QUERY
+ end
+
+ let(:a_packages_names) { graphql_data_at(:a, :packages, :nodes, :name) }
+
+ it 'returns an error for the second package and data for the first' do
+ post_graphql(batch_query, current_user: user)
+
+ expect(graphql_data_at(:a, :name)).to eq(composer_package.name)
+
+ expect_graphql_errors_to_include [/Package details can be requested only for one package at a time/]
+ expect(graphql_data_at(:b)).to be(nil)
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
index 9ab94f1d749..a59402208ec 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/assignees_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'getting Alert Management Alert Assignees' do
end
let(:alerts) { graphql_data.dig('project', 'alertManagementAlerts', 'nodes') }
- let(:assignees) { alerts.map { |alert| [alert['iid'], alert['assignees']['nodes']] }.to_h }
+ let(:assignees) { alerts.to_h { |alert| [alert['iid'], alert['assignees']['nodes']] } }
let(:first_assignees) { assignees[first_alert.iid.to_s] }
let(:second_assignees) { assignees[second_alert.iid.to_s] }
diff --git a/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb
index 5d46f370756..72d185144ef 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/notes_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe 'getting Alert Management Alert Notes' do
end
let(:alerts_result) { graphql_data.dig('project', 'alertManagementAlerts', 'nodes') }
- let(:notes_result) { alerts_result.map { |alert| [alert['iid'], alert['notes']['nodes']] }.to_h }
+ let(:notes_result) { alerts_result.to_h { |alert| [alert['iid'], alert['notes']['nodes']] } }
let(:first_notes_result) { notes_result[first_alert.iid.to_s] }
let(:second_notes_result) { notes_result[second_alert.iid.to_s] }
diff --git a/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb b/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb
index 3a9077061ad..ca58079fdfe 100644
--- a/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/alert/todos_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe 'getting Alert Management Alert Assignees' do
end
let(:gql_alerts) { graphql_data.dig('project', 'alertManagementAlerts', 'nodes') }
- let(:gql_todos) { gql_alerts.map { |gql_alert| [gql_alert['iid'], gql_alert['todos']['nodes']] }.to_h }
+ let(:gql_todos) { gql_alerts.to_h { |gql_alert| [gql_alert['iid'], gql_alert['todos']['nodes']] } }
let(:gql_alert_todo) { gql_todos[alert.iid.to_s].first }
let(:gql_other_alert_todo) { gql_todos[other_alert.iid.to_s].first }
diff --git a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
index b13805a61ce..0e029aee9e8 100644
--- a/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
+++ b/spec/requests/api/graphql/project/alert_management/integrations_spec.rb
@@ -13,6 +13,8 @@ RSpec.describe 'getting Alert Management Integrations' do
let_it_be(:inactive_http_integration) { create(:alert_management_http_integration, :inactive, project: project) }
let_it_be(:other_project_http_integration) { create(:alert_management_http_integration) }
+ let(:params) { {} }
+
let(:fields) do
<<~QUERY
nodes {
@@ -25,7 +27,7 @@ RSpec.describe 'getting Alert Management Integrations' do
graphql_query_for(
'project',
{ 'fullPath' => project.full_path },
- query_graphql_field('alertManagementIntegrations', {}, fields)
+ query_graphql_field('alertManagementIntegrations', params, fields)
)
end
@@ -50,34 +52,78 @@ RSpec.describe 'getting Alert Management Integrations' do
post_graphql(query, current_user: current_user)
end
- let(:http_integration) { integrations.first }
- let(:prometheus_integration) { integrations.second }
+ context 'when no extra params given' do
+ let(:http_integration) { integrations.first }
+ let(:prometheus_integration) { integrations.second }
- it_behaves_like 'a working graphql query'
+ it_behaves_like 'a working graphql query'
+
+ it { expect(integrations.size).to eq(2) }
+
+ it 'returns the correct properties of the integrations' do
+ expect(http_integration).to include(
+ 'id' => global_id_of(active_http_integration),
+ 'type' => 'HTTP',
+ 'name' => active_http_integration.name,
+ 'active' => active_http_integration.active,
+ 'token' => active_http_integration.token,
+ 'url' => active_http_integration.url,
+ 'apiUrl' => nil
+ )
- it { expect(integrations.size).to eq(2) }
-
- it 'returns the correct properties of the integrations' do
- expect(http_integration).to include(
- 'id' => GitlabSchema.id_from_object(active_http_integration).to_s,
- 'type' => 'HTTP',
- 'name' => active_http_integration.name,
- 'active' => active_http_integration.active,
- 'token' => active_http_integration.token,
- 'url' => active_http_integration.url,
- 'apiUrl' => nil
- )
-
- expect(prometheus_integration).to include(
- 'id' => GitlabSchema.id_from_object(prometheus_service).to_s,
- 'type' => 'PROMETHEUS',
- 'name' => 'Prometheus',
- 'active' => prometheus_service.manual_configuration?,
- 'token' => project_alerting_setting.token,
- 'url' => "http://localhost/#{project.full_path}/prometheus/alerts/notify.json",
- 'apiUrl' => prometheus_service.api_url
- )
+ expect(prometheus_integration).to include(
+ 'id' => global_id_of(prometheus_service),
+ 'type' => 'PROMETHEUS',
+ 'name' => 'Prometheus',
+ 'active' => prometheus_service.manual_configuration?,
+ 'token' => project_alerting_setting.token,
+ 'url' => "http://localhost/#{project.full_path}/prometheus/alerts/notify.json",
+ 'apiUrl' => prometheus_service.api_url
+ )
+ end
end
+
+ context 'when HTTP Integration ID is given' do
+ let(:params) { { id: global_id_of(active_http_integration) } }
+
+ it_behaves_like 'a working graphql query'
+
+ it { expect(integrations).to be_one }
+
+ it 'returns the correct properties of the HTTP integration' do
+ expect(integrations.first).to include(
+ 'id' => global_id_of(active_http_integration),
+ 'type' => 'HTTP',
+ 'name' => active_http_integration.name,
+ 'active' => active_http_integration.active,
+ 'token' => active_http_integration.token,
+ 'url' => active_http_integration.url,
+ 'apiUrl' => nil
+ )
+ end
+ end
+
+ context 'when Prometheus Integration ID is given' do
+ let(:params) { { id: global_id_of(prometheus_service) } }
+
+ it_behaves_like 'a working graphql query'
+
+ it { expect(integrations).to be_one }
+
+ it 'returns the correct properties of the Prometheus Integration' do
+ expect(integrations.first).to include(
+ 'id' => global_id_of(prometheus_service),
+ 'type' => 'PROMETHEUS',
+ 'name' => 'Prometheus',
+ 'active' => prometheus_service.manual_configuration?,
+ 'token' => project_alerting_setting.token,
+ 'url' => "http://localhost/#{project.full_path}/prometheus/alerts/notify.json",
+ 'apiUrl' => prometheus_service.api_url
+ )
+ end
+ end
+
+ it_behaves_like 'GraphQL query with several integrations requested', graphql_query_name: 'alertManagementIntegrations'
end
end
end
diff --git a/spec/requests/api/graphql/project/container_repositories_spec.rb b/spec/requests/api/graphql/project/container_repositories_spec.rb
index 5ffd48a7bc4..3ad56223b61 100644
--- a/spec/requests/api/graphql/project/container_repositories_spec.rb
+++ b/spec/requests/api/graphql/project/container_repositories_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe 'getting container repositories in a project' do
<<~GQL
edges {
node {
- #{all_graphql_fields_for('container_repositories'.classify, excluded: ['pipeline'])}
+ #{all_graphql_fields_for('container_repositories'.classify, excluded: %w(pipeline jobs))}
}
}
GQL
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index 9c915075c42..dd9d44136e5 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -5,14 +5,15 @@ require 'spec_helper'
RSpec.describe 'getting an issue list for a project' do
include GraphqlHelpers
- let(:issues_data) { graphql_data['project']['issues']['edges'] }
-
let_it_be(:project) { create(:project, :repository, :public) }
let_it_be(:current_user) { create(:user) }
let_it_be(:issue_a, reload: true) { create(:issue, project: project, discussion_locked: true) }
let_it_be(:issue_b, reload: true) { create(:issue, :with_alert, project: project) }
let_it_be(:issues, reload: true) { [issue_a, issue_b] }
+ let(:issues_data) { graphql_data['project']['issues']['edges'] }
+ let(:issue_filter_params) { {} }
+
let(:fields) do
<<~QUERY
edges {
@@ -27,7 +28,7 @@ RSpec.describe 'getting an issue list for a project' do
graphql_query_for(
'project',
{ 'fullPath' => project.full_path },
- query_graphql_field('issues', {}, fields)
+ query_graphql_field('issues', issue_filter_params, fields)
)
end
@@ -50,6 +51,16 @@ RSpec.describe 'getting an issue list for a project' do
expect(issues_data[1]['node']['discussionLocked']).to eq(true)
end
+ context 'when both assignee_username filters are provided' do
+ let(:issue_filter_params) { { assignee_username: current_user.username, assignee_usernames: [current_user.username] } }
+
+ it 'returns a mutually exclusive param error' do
+ post_graphql(query, current_user: current_user)
+
+ expect_graphql_errors_to_include('only one of [assigneeUsernames, assigneeUsername] arguments is allowed at the same time.')
+ end
+ end
+
context 'when limiting the number of results' do
let(:query) do
<<~GQL
@@ -76,7 +87,7 @@ RSpec.describe 'getting an issue list for a project' do
end
end
- context 'no limit is provided' do
+ context 'when no limit is provided' do
let(:issue_limit) { nil }
it 'returns all issues' do
@@ -143,13 +154,15 @@ RSpec.describe 'getting an issue list for a project' do
let_it_be(:data_path) { [:project, :issues] }
def pagination_query(params)
- graphql_query_for(:project, { full_path: sort_project.full_path },
+ graphql_query_for(
+ :project,
+ { full_path: sort_project.full_path },
query_graphql_field(:issues, params, "#{page_info} nodes { iid }")
)
end
def pagination_results_data(data)
- data.map { |issue| issue.dig('iid').to_i }
+ data.map { |issue| issue['iid'].to_i }
end
context 'when sorting by due date' do
@@ -189,27 +202,38 @@ RSpec.describe 'getting an issue list for a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :RELATIVE_POSITION_ASC }
let(:first_param) { 2 }
- let(:expected_results) { [relative_issue5.iid, relative_issue3.iid, relative_issue1.iid, relative_issue4.iid, relative_issue2.iid] }
+ let(:expected_results) do
+ [
+ relative_issue5.iid, relative_issue3.iid, relative_issue1.iid,
+ relative_issue4.iid, relative_issue2.iid
+ ]
+ end
end
end
end
context 'when sorting by priority' do
let_it_be(:sort_project) { create(:project, :public) }
- let_it_be(:early_milestone) { create(:milestone, project: sort_project, due_date: 10.days.from_now) }
- let_it_be(:late_milestone) { create(:milestone, project: sort_project, due_date: 30.days.from_now) }
- let_it_be(:priority_label1) { create(:label, project: sort_project, priority: 1) }
- let_it_be(:priority_label2) { create(:label, project: sort_project, priority: 5) }
- let_it_be(:priority_issue1) { create(:issue, project: sort_project, labels: [priority_label1], milestone: late_milestone) }
- let_it_be(:priority_issue2) { create(:issue, project: sort_project, labels: [priority_label2]) }
- let_it_be(:priority_issue3) { create(:issue, project: sort_project, milestone: early_milestone) }
- let_it_be(:priority_issue4) { create(:issue, project: sort_project) }
+ let_it_be(:on_project) { { project: sort_project } }
+ let_it_be(:early_milestone) { create(:milestone, **on_project, due_date: 10.days.from_now) }
+ let_it_be(:late_milestone) { create(:milestone, **on_project, due_date: 30.days.from_now) }
+ let_it_be(:priority_1) { create(:label, **on_project, priority: 1) }
+ let_it_be(:priority_2) { create(:label, **on_project, priority: 5) }
+ let_it_be(:priority_issue1) { create(:issue, **on_project, labels: [priority_1], milestone: late_milestone) }
+ let_it_be(:priority_issue2) { create(:issue, **on_project, labels: [priority_2]) }
+ let_it_be(:priority_issue3) { create(:issue, **on_project, milestone: early_milestone) }
+ let_it_be(:priority_issue4) { create(:issue, **on_project) }
context 'when ascending' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :PRIORITY_ASC }
let(:first_param) { 2 }
- let(:expected_results) { [priority_issue3.iid, priority_issue1.iid, priority_issue2.iid, priority_issue4.iid] }
+ let(:expected_results) do
+ [
+ priority_issue3.iid, priority_issue1.iid,
+ priority_issue2.iid, priority_issue4.iid
+ ]
+ end
end
end
@@ -217,7 +241,9 @@ RSpec.describe 'getting an issue list for a project' do
it_behaves_like 'sorted paginated query' do
let(:sort_param) { :PRIORITY_DESC }
let(:first_param) { 2 }
- let(:expected_results) { [priority_issue1.iid, priority_issue3.iid, priority_issue2.iid, priority_issue4.iid] }
+ let(:expected_results) do
+ [priority_issue1.iid, priority_issue3.iid, priority_issue2.iid, priority_issue4.iid]
+ end
end
end
end
@@ -275,7 +301,7 @@ RSpec.describe 'getting an issue list for a project' do
end
end
- context 'fetching alert management alert' do
+ context 'when fetching alert management alert' do
let(:fields) do
<<~QUERY
edges {
@@ -297,7 +323,7 @@ RSpec.describe 'getting an issue list for a project' do
it 'avoids N+1 queries' do
control = ActiveRecord::QueryRecorder.new { post_graphql(query, current_user: current_user) }
- create(:alert_management_alert, :with_issue, project: project )
+ create(:alert_management_alert, :with_issue, project: project)
expect { post_graphql(query, current_user: current_user) }.not_to exceed_query_limit(control)
end
@@ -312,7 +338,7 @@ RSpec.describe 'getting an issue list for a project' do
end
end
- context 'fetching labels' do
+ context 'when fetching labels' do
let(:fields) do
<<~QUERY
edges {
@@ -362,7 +388,7 @@ RSpec.describe 'getting an issue list for a project' do
end
end
- context 'fetching assignees' do
+ context 'when fetching assignees' do
let(:fields) do
<<~QUERY
edges {
@@ -420,9 +446,10 @@ RSpec.describe 'getting an issue list for a project' do
query = graphql_query_for(
:project,
{ full_path: project.full_path },
- query_graphql_field(:issues, search_params, [
+ query_graphql_field(
+ :issues, search_params,
query_graphql_field(:nodes, nil, requested_fields)
- ])
+ )
)
post_graphql(query, current_user: current_user)
end
@@ -448,5 +475,16 @@ RSpec.describe 'getting an issue list for a project' do
include_examples 'N+1 query check'
end
+
+ context 'when requesting `timelogs`' do
+ let(:requested_fields) { 'timelogs { nodes { timeSpent } }' }
+
+ before do
+ create_list(:issue_timelog, 2, issue: issue_a)
+ create(:issue_timelog, issue: issue_b)
+ end
+
+ include_examples 'N+1 query check'
+ end
end
end
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index e32899c600e..15551005502 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -5,21 +5,25 @@ require 'spec_helper'
RSpec.describe 'getting merge request information nested in a project' do
include GraphqlHelpers
- let(:project) { create(:project, :repository, :public) }
- let(:current_user) { create(:user) }
- let(:merge_request_graphql_data) { graphql_data['project']['mergeRequest'] }
- let!(:merge_request) { create(:merge_request, source_project: project) }
- let(:mr_fields) { all_graphql_fields_for('MergeRequest', excluded: ['pipeline']) }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be_with_reload(:merge_request) { create(:merge_request, source_project: project) }
+
+ let(:merge_request_graphql_data) { graphql_data_at(:project, :merge_request) }
+ let(:mr_fields) { all_graphql_fields_for('MergeRequest', max_depth: 1) }
let(:query) do
graphql_query_for(
- 'project',
- { 'fullPath' => project.full_path },
- query_graphql_field('mergeRequest', { iid: merge_request.iid.to_s }, mr_fields)
+ :project,
+ { full_path: project.full_path },
+ query_graphql_field(:merge_request, { iid: merge_request.iid.to_s }, mr_fields)
)
end
it_behaves_like 'a working graphql query' do
+ # we exclude Project.pipeline because it needs arguments
+ let(:mr_fields) { all_graphql_fields_for('MergeRequest', excluded: %w[jobs pipeline]) }
+
before do
post_graphql(query, current_user: current_user)
end
@@ -38,13 +42,17 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(merge_request_graphql_data['webUrl']).to be_present
end
- it 'includes author' do
- post_graphql(query, current_user: current_user)
+ context 'when selecting author' do
+ let(:mr_fields) { 'author { username }' }
- expect(merge_request_graphql_data['author']['username']).to eq(merge_request.author.username)
+ it 'includes author' do
+ post_graphql(query, current_user: current_user)
+
+ expect(merge_request_graphql_data['author']['username']).to eq(merge_request.author.username)
+ end
end
- context 'the merge_request has reviewers' do
+ context 'when the merge_request has reviewers' do
let(:mr_fields) do
<<~SELECT
reviewers { nodes { id username } }
@@ -68,63 +76,76 @@ RSpec.describe 'getting merge request information nested in a project' do
end
end
- it 'includes diff stats' do
- be_natural = an_instance_of(Integer).and(be >= 0)
-
- post_graphql(query, current_user: current_user)
-
- sums = merge_request_graphql_data['diffStats'].reduce([0, 0, 0]) do |(a, d, c), node|
- a_, d_ = node.values_at('additions', 'deletions')
- [a + a_, d + d_, c + a_ + d_]
+ describe 'diffStats' do
+ let(:mr_fields) do
+ <<~FIELDS
+ diffStats { #{all_graphql_fields_for('DiffStats')} }
+ diffStatsSummary { #{all_graphql_fields_for('DiffStatsSummary')} }
+ FIELDS
end
- expect(merge_request_graphql_data).to include(
- 'diffStats' => all(a_hash_including('path' => String, 'additions' => be_natural, 'deletions' => be_natural)),
- 'diffStatsSummary' => a_hash_including(
- 'fileCount' => merge_request.diff_stats.count,
- 'additions' => be_natural,
- 'deletions' => be_natural,
- 'changes' => be_natural
- )
- )
+ it 'includes diff stats' do
+ be_natural = an_instance_of(Integer).and(be >= 0)
- # diff_stats is consistent with summary
- expect(merge_request_graphql_data['diffStatsSummary']
- .values_at('additions', 'deletions', 'changes')).to eq(sums)
-
- # diff_stats_summary is internally consistent
- expect(merge_request_graphql_data['diffStatsSummary']
- .values_at('additions', 'deletions').sum)
- .to eq(merge_request_graphql_data.dig('diffStatsSummary', 'changes'))
- .and be_positive
- end
+ post_graphql(query, current_user: current_user)
- context 'requesting a specific diff stat' do
- let(:diff_stat) { merge_request.diff_stats.first }
+ sums = merge_request_graphql_data['diffStats'].reduce([0, 0, 0]) do |(a, d, c), node|
+ a_, d_ = node.values_at('additions', 'deletions')
+ [a + a_, d + d_, c + a_ + d_]
+ end
- let(:query) do
- graphql_query_for(:project, { full_path: project.full_path },
- query_graphql_field(:merge_request, { iid: merge_request.iid.to_s }, [
- query_graphql_field(:diff_stats, { path: diff_stat.path }, all_graphql_fields_for('DiffStats'))
- ])
+ expect(merge_request_graphql_data).to include(
+ 'diffStats' => all(a_hash_including('path' => String, 'additions' => be_natural, 'deletions' => be_natural)),
+ 'diffStatsSummary' => a_hash_including(
+ 'fileCount' => merge_request.diff_stats.count,
+ 'additions' => be_natural,
+ 'deletions' => be_natural,
+ 'changes' => be_natural
+ )
)
+
+ # diff_stats is consistent with summary
+ expect(merge_request_graphql_data['diffStatsSummary']
+ .values_at('additions', 'deletions', 'changes')).to eq(sums)
+
+ # diff_stats_summary is internally consistent
+ expect(merge_request_graphql_data['diffStatsSummary']
+ .values_at('additions', 'deletions').sum)
+ .to eq(merge_request_graphql_data.dig('diffStatsSummary', 'changes'))
+ .and be_positive
end
- it 'includes only the requested stats' do
- post_graphql(query, current_user: current_user)
+ context 'when requesting a specific diff stat' do
+ let(:diff_stat) { merge_request.diff_stats.first }
- expect(merge_request_graphql_data).to include(
- 'diffStats' => contain_exactly(
- a_hash_including('path' => diff_stat.path, 'additions' => diff_stat.additions, 'deletions' => diff_stat.deletions)
+ let(:mr_fields) do
+ query_graphql_field(
+ :diff_stats,
+ { path: diff_stat.path },
+ all_graphql_fields_for('DiffStats')
)
- )
+ end
+
+ it 'includes only the requested stats' do
+ post_graphql(query, current_user: current_user)
+
+ expect(merge_request_graphql_data).to include(
+ 'diffStats' => contain_exactly(
+ a_hash_including(
+ 'path' => diff_stat.path,
+ 'additions' => diff_stat.additions,
+ 'deletions' => diff_stat.deletions
+ )
+ )
+ )
+ end
end
end
it 'includes correct mergedAt value when merged' do
time = 1.week.ago
merge_request.mark_as_merged
- merge_request.metrics.update_columns(merged_at: time)
+ merge_request.metrics.update!(merged_at: time)
post_graphql(query, current_user: current_user)
retrieved = merge_request_graphql_data['mergedAt']
@@ -139,7 +160,11 @@ RSpec.describe 'getting merge request information nested in a project' do
expect(retrieved).to be_nil
end
- context 'permissions on the merge request' do
+ describe 'permissions on the merge request' do
+ let(:mr_fields) do
+ "userPermissions { #{all_graphql_fields_for('MergeRequestPermissions')} }"
+ end
+
it 'includes the permissions for the current user on a public project' do
expected_permissions = {
'readMergeRequest' => true,
@@ -162,8 +187,6 @@ RSpec.describe 'getting merge request information nested in a project' do
end
context 'when the user does not have access to the merge request' do
- let(:project) { create(:project, :public, :repository) }
-
it 'returns nil' do
project.project_feature.update!(merge_requests_access_level: ProjectFeature::PRIVATE)
@@ -174,13 +197,23 @@ RSpec.describe 'getting merge request information nested in a project' do
end
context 'when there are pipelines' do
- before do
+ let_it_be(:pipeline) do
create(
:ci_pipeline,
project: merge_request.source_project,
ref: merge_request.source_branch,
sha: merge_request.diff_head_sha
)
+ end
+
+ let(:mr_fields) do
+ <<~FIELDS
+ headPipeline { id }
+ pipelines { nodes { id } }
+ FIELDS
+ end
+
+ before do
merge_request.update_head_pipeline
end
@@ -193,20 +226,12 @@ RSpec.describe 'getting merge request information nested in a project' do
it 'has pipeline connections' do
post_graphql(query, current_user: current_user)
- expect(merge_request_graphql_data['pipelines']['edges'].size).to eq(1)
+ expect(merge_request_graphql_data['pipelines']['nodes']).to be_one
end
end
context 'when limiting the number of results' do
- let(:merge_requests_graphql_data) { graphql_data['project']['mergeRequests']['edges'] }
-
- let!(:merge_requests) do
- [
- create(:merge_request, source_project: project, source_branch: 'branch-1'),
- create(:merge_request, source_project: project, source_branch: 'branch-2'),
- create(:merge_request, source_project: project, source_branch: 'branch-3')
- ]
- end
+ let(:merge_requests_graphql_data) { graphql_data_at(:project, :merge_requests, :edges) }
let(:fields) do
<<~QUERY
@@ -228,6 +253,10 @@ RSpec.describe 'getting merge request information nested in a project' do
end
it 'returns the correct number of results' do
+ create(:merge_request, source_project: project, source_branch: 'branch-1')
+ create(:merge_request, source_project: project, source_branch: 'branch-2')
+ create(:merge_request, source_project: project, source_branch: 'branch-3')
+
post_graphql(query, current_user: current_user)
expect(merge_requests_graphql_data.size).to eq 2
@@ -281,4 +310,129 @@ RSpec.describe 'getting merge request information nested in a project' do
)
end
end
+
+ context 'when requesting information about MR interactions' do
+ let_it_be(:user) { create(:user) }
+
+ let(:selected_fields) { all_graphql_fields_for('UserMergeRequestInteraction') }
+
+ let(:mr_fields) do
+ query_nodes(
+ :reviewers,
+ query_graphql_field(:merge_request_interaction, nil, selected_fields)
+ )
+ end
+
+ def interaction_data
+ graphql_data_at(:project, :merge_request, :reviewers, :nodes, :merge_request_interaction)
+ end
+
+ context 'when the user does not have interactions' do
+ it 'returns null data' do
+ post_graphql(query)
+
+ expect(interaction_data).to be_empty
+ end
+ end
+
+ context 'when the user is a reviewer, but has not reviewed' do
+ before do
+ project.add_guest(user)
+ merge_request.merge_request_reviewers.create!(reviewer: user)
+ end
+
+ it 'returns falsey values' do
+ post_graphql(query)
+
+ expect(interaction_data).to contain_exactly a_hash_including(
+ 'canMerge' => false,
+ 'canUpdate' => false,
+ 'reviewState' => 'UNREVIEWED',
+ 'reviewed' => false,
+ 'approved' => false
+ )
+ end
+ end
+
+ context 'when the user has interacted' do
+ before do
+ project.add_maintainer(user)
+ merge_request.merge_request_reviewers.create!(reviewer: user, state: 'reviewed')
+ merge_request.approved_by_users << user
+ end
+
+ it 'returns appropriate data' do
+ post_graphql(query)
+ enum = ::Types::MergeRequestReviewStateEnum.values['REVIEWED']
+
+ expect(interaction_data).to contain_exactly a_hash_including(
+ 'canMerge' => true,
+ 'canUpdate' => true,
+ 'reviewState' => enum.graphql_name,
+ 'reviewed' => true,
+ 'approved' => true
+ )
+ end
+ end
+
+ describe 'scalability' do
+ let_it_be(:other_users) { create_list(:user, 3) }
+
+ let(:unreviewed) do
+ { 'reviewState' => 'UNREVIEWED' }
+ end
+
+ let(:reviewed) do
+ { 'reviewState' => 'REVIEWED' }
+ end
+
+ shared_examples 'scalable query for interaction fields' do
+ before do
+ ([user] + other_users).each { project.add_guest(_1) }
+ end
+
+ it 'does not suffer from N+1' do
+ merge_request.merge_request_reviewers.create!(reviewer: user, state: 'reviewed')
+
+ baseline = ActiveRecord::QueryRecorder.new do
+ post_graphql(query)
+ end
+
+ expect(interaction_data).to contain_exactly(include(reviewed))
+
+ other_users.each do |user|
+ merge_request.merge_request_reviewers.create!(reviewer: user)
+ end
+
+ expect { post_graphql(query) }.not_to exceed_query_limit(baseline)
+
+ expect(interaction_data).to contain_exactly(
+ include(unreviewed),
+ include(unreviewed),
+ include(unreviewed),
+ include(reviewed)
+ )
+ end
+ end
+
+ context 'when selecting only known scalable fields' do
+ let(:not_scalable) { %w[canUpdate canMerge] }
+ let(:selected_fields) do
+ all_graphql_fields_for('UserMergeRequestInteraction', excluded: not_scalable)
+ end
+
+ it_behaves_like 'scalable query for interaction fields'
+ end
+
+ context 'when selecting all fields' do
+ before do
+ pending "See: https://gitlab.com/gitlab-org/gitlab/-/issues/322549"
+ end
+
+ let(:selected_fields) { all_graphql_fields_for('UserMergeRequestInteraction') }
+
+ it_behaves_like 'scalable query for interaction fields'
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index d97a0ed9399..7fc1ef05fa7 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -47,10 +47,10 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
before do
- # We cannot call the whitelist here, since the transaction does not
+ # We cannot disable SQL query limiting here, since the transaction does not
# begin until we enter the controller.
headers = {
- 'X-GITLAB-QUERY-WHITELIST-ISSUE' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/322979'
+ 'X-GITLAB-DISABLE-SQL-QUERY-LIMIT' => 'https://gitlab.com/gitlab-org/gitlab/-/issues/322979'
}
post_graphql(query, current_user: current_user, headers: headers)
@@ -299,6 +299,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
reviewers { nodes { username } }
participants { nodes { username } }
headPipeline { status }
+ timelogs { nodes { timeSpent } }
SELECT
end
@@ -307,7 +308,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
query($first: Int) {
project(fullPath: "#{project.full_path}") {
mergeRequests(first: $first) {
- nodes { #{mr_fields} }
+ nodes { iid #{mr_fields} }
}
}
}
@@ -324,6 +325,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
mr.assignees << current_user
mr.reviewers << create(:user)
mr.reviewers << current_user
+ mr.timelogs << create(:merge_request_timelog, merge_request: mr)
end
end
@@ -345,7 +347,7 @@ RSpec.describe 'getting merge request listings nested in a project' do
end
def user_collection
- { 'nodes' => all(match(a_hash_including('username' => be_present))) }
+ { 'nodes' => be_present.and(all(match(a_hash_including('username' => be_present)))) }
end
it 'returns appropriate results' do
@@ -358,7 +360,8 @@ RSpec.describe 'getting merge request listings nested in a project' do
'assignees' => user_collection,
'reviewers' => user_collection,
'participants' => user_collection,
- 'headPipeline' => { 'status' => be_present }
+ 'headPipeline' => { 'status' => be_present },
+ 'timelogs' => { 'nodes' => be_one }
)))
end
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index cc028ff2ff9..0a5bcc7a965 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -5,24 +5,28 @@ require 'spec_helper'
RSpec.describe 'getting pipeline information nested in a project' do
include GraphqlHelpers
- let!(:project) { create(:project, :repository, :public) }
- let!(:pipeline) { create(:ci_pipeline, project: project) }
- let!(:current_user) { create(:user) }
- let(:pipeline_graphql_data) { graphql_data['project']['pipeline'] }
-
- let!(:query) do
- %(
- query {
- project(fullPath: "#{project.full_path}") {
- pipeline(iid: "#{pipeline.iid}") {
- configSource
- }
- }
- }
+ let_it_be(:project) { create(:project, :repository, :public) }
+ let_it_be(:pipeline) { create(:ci_pipeline, project: project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:build_job) { create(:ci_build, :trace_with_sections, name: 'build-a', pipeline: pipeline) }
+ let_it_be(:failed_build) { create(:ci_build, :failed, name: 'failed-build', pipeline: pipeline) }
+ let_it_be(:bridge) { create(:ci_bridge, name: 'ci-bridge-example', pipeline: pipeline) }
+
+ let(:path) { %i[project pipeline] }
+ let(:pipeline_graphql_data) { graphql_data_at(*path) }
+ let(:depth) { 3 }
+ let(:excluded) { %w[job project] } # Project is very expensive, due to the number of fields
+ let(:fields) { all_graphql_fields_for('Pipeline', excluded: excluded, max_depth: depth) }
+
+ let(:query) do
+ graphql_query_for(
+ :project,
+ { full_path: project.full_path },
+ query_graphql_field(:pipeline, { iid: pipeline.iid.to_s }, fields)
)
end
- it_behaves_like 'a working graphql query' do
+ it_behaves_like 'a working graphql query', :use_clean_rails_memory_store_caching, :request_store do
before do
post_graphql(query, current_user: current_user)
end
@@ -37,14 +41,18 @@ RSpec.describe 'getting pipeline information nested in a project' do
it 'contains configSource' do
post_graphql(query, current_user: current_user)
- expect(pipeline_graphql_data.dig('configSource')).to eq('UNKNOWN_SOURCE')
+ expect(pipeline_graphql_data['configSource']).to eq('UNKNOWN_SOURCE')
end
- context 'batching' do
- let!(:pipeline2) { create(:ci_pipeline, project: project, user: current_user, builds: [create(:ci_build, :success)]) }
- let!(:pipeline3) { create(:ci_pipeline, project: project, user: current_user, builds: [create(:ci_build, :success)]) }
+ context 'when batching' do
+ let!(:pipeline2) { successful_pipeline }
+ let!(:pipeline3) { successful_pipeline }
let!(:query) { build_query_to_find_pipeline_shas(pipeline, pipeline2, pipeline3) }
+ def successful_pipeline
+ create(:ci_pipeline, project: project, user: current_user, builds: [create(:ci_build, :success)])
+ end
+
it 'executes the finder once' do
mock = double(Ci::PipelinesFinder)
opts = { iids: [pipeline.iid, pipeline2.iid, pipeline3.iid].map(&:to_s) }
@@ -80,4 +88,198 @@ RSpec.describe 'getting pipeline information nested in a project' do
graphql_query_for('project', { 'fullPath' => project.full_path }, pipeline_fields)
end
+
+ context 'when enough data is requested' do
+ let(:fields) do
+ query_graphql_field(:jobs, nil,
+ query_graphql_field(:nodes, {}, all_graphql_fields_for('CiJob', max_depth: 3)))
+ end
+
+ it 'contains jobs' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data_at(*path, :jobs, :nodes)).to contain_exactly(
+ a_hash_including(
+ 'name' => build_job.name,
+ 'status' => build_job.status.upcase,
+ 'duration' => build_job.duration
+ ),
+ a_hash_including(
+ 'id' => global_id_of(failed_build),
+ 'status' => failed_build.status.upcase
+ ),
+ a_hash_including(
+ 'id' => global_id_of(bridge),
+ 'status' => bridge.status.upcase
+ )
+ )
+ end
+ end
+
+ context 'when requesting only builds with certain statuses' do
+ let(:variables) do
+ {
+ path: project.full_path,
+ pipelineIID: pipeline.iid.to_s,
+ status: :FAILED
+ }
+ end
+
+ let(:query) do
+ <<~GQL
+ query($path: ID!, $pipelineIID: ID!, $status: CiJobStatus!) {
+ project(fullPath: $path) {
+ pipeline(iid: $pipelineIID) {
+ jobs(statuses: [$status]) {
+ nodes {
+ #{all_graphql_fields_for('CiJob', max_depth: 1)}
+ }
+ }
+ }
+ }
+ }
+ GQL
+ end
+
+ it 'can filter build jobs by status' do
+ post_graphql(query, current_user: current_user, variables: variables)
+
+ expect(graphql_data_at(*path, :jobs, :nodes))
+ .to contain_exactly(a_hash_including('id' => global_id_of(failed_build)))
+ end
+ end
+
+ context 'when requesting a specific job' do
+ let(:variables) do
+ {
+ path: project.full_path,
+ pipelineIID: pipeline.iid.to_s
+ }
+ end
+
+ let(:build_fields) do
+ all_graphql_fields_for('CiJob', max_depth: 1)
+ end
+
+ let(:query) do
+ <<~GQL
+ query($path: ID!, $pipelineIID: ID!, $jobName: String, $jobID: JobID) {
+ project(fullPath: $path) {
+ pipeline(iid: $pipelineIID) {
+ job(id: $jobID, name: $jobName) {
+ #{build_fields}
+ }
+ }
+ }
+ }
+ GQL
+ end
+
+ let(:the_job) do
+ a_hash_including('name' => build_job.name, 'id' => global_id_of(build_job))
+ end
+
+ it 'can request a build by name' do
+ vars = variables.merge(jobName: build_job.name)
+
+ post_graphql(query, current_user: current_user, variables: vars)
+
+ expect(graphql_data_at(*path, :job)).to match(the_job)
+ end
+
+ it 'can request a build by ID' do
+ vars = variables.merge(jobID: global_id_of(build_job))
+
+ post_graphql(query, current_user: current_user, variables: vars)
+
+ expect(graphql_data_at(*path, :job)).to match(the_job)
+ end
+
+ context 'when we request nested fields of the build' do
+ let_it_be(:needy) { create(:ci_build, :dependent, pipeline: pipeline) }
+
+ let(:build_fields) { 'needs { nodes { name } }' }
+ let(:vars) { variables.merge(jobID: global_id_of(needy)) }
+
+ it 'returns the nested data' do
+ post_graphql(query, current_user: current_user, variables: vars)
+
+ expect(graphql_data_at(*path, :job, :needs, :nodes)).to contain_exactly(
+ a_hash_including('name' => needy.needs.first.name)
+ )
+ end
+
+ it 'requires a constant number of queries' do
+ fst_user = create(:user)
+ snd_user = create(:user)
+ path = %i[project pipeline job needs nodes name]
+
+ baseline = ActiveRecord::QueryRecorder.new do
+ post_graphql(query, current_user: fst_user, variables: vars)
+ end
+
+ expect(baseline.count).to be > 0
+ dep_names = graphql_dig_at(graphql_data(fresh_response_data), *path)
+
+ deps = create_list(:ci_build, 3, :unique_name, pipeline: pipeline)
+ deps.each { |d| create(:ci_build_need, build: needy, name: d.name) }
+
+ expect do
+ post_graphql(query, current_user: snd_user, variables: vars)
+ end.not_to exceed_query_limit(baseline)
+
+ more_names = graphql_dig_at(graphql_data(fresh_response_data), *path)
+
+ expect(more_names).to include(*dep_names)
+ expect(more_names.count).to be > dep_names.count
+ end
+ end
+ end
+
+ context 'when requesting a specific test suite' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :with_test_reports, project: project) }
+ let(:suite_name) { 'test' }
+ let_it_be(:build_ids) { pipeline.latest_builds.pluck(:id) }
+
+ let(:variables) do
+ {
+ path: project.full_path,
+ pipelineIID: pipeline.iid.to_s
+ }
+ end
+
+ let(:query) do
+ <<~GQL
+ query($path: ID!, $pipelineIID: ID!, $buildIds: [ID!]!) {
+ project(fullPath: $path) {
+ pipeline(iid: $pipelineIID) {
+ testSuite(buildIds: $buildIds) {
+ name
+ }
+ }
+ }
+ }
+ GQL
+ end
+
+ it 'can request a test suite by an array of build_ids' do
+ vars = variables.merge(buildIds: build_ids)
+
+ post_graphql(query, current_user: current_user, variables: vars)
+
+ expect(graphql_data_at(:project, :pipeline, :testSuite, :name)).to eq(suite_name)
+ end
+
+ context 'when pipeline has no builds that matches the given build_ids' do
+ let_it_be(:build_ids) { [non_existing_record_id] }
+
+ it 'returns nil' do
+ vars = variables.merge(buildIds: build_ids)
+
+ post_graphql(query, current_user: current_user, variables: vars)
+
+ expect(graphql_data_at(*path, :test_suite)).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/project/repository/blobs_spec.rb b/spec/requests/api/graphql/project/repository/blobs_spec.rb
new file mode 100644
index 00000000000..12f6fbd793e
--- /dev/null
+++ b/spec/requests/api/graphql/project/repository/blobs_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe 'getting blobs in a project repository' do
+ include GraphqlHelpers
+
+ let(:project) { create(:project, :repository) }
+ let(:current_user) { project.owner }
+ let(:paths) { ["CONTRIBUTING.md", "README.md"] }
+ let(:ref) { project.default_branch }
+ let(:fields) do
+ <<~QUERY
+ blobs(paths:#{paths.inspect}, ref:#{ref.inspect}) {
+ nodes {
+ #{all_graphql_fields_for('repository_blob'.classify)}
+ }
+ }
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_graphql_field('repository', {}, fields)
+ )
+ end
+
+ subject(:blobs) { graphql_data_at(:project, :repository, :blobs, :nodes) }
+
+ it 'returns the blob' do
+ post_graphql(query, current_user: current_user)
+
+ expect(blobs).to match_array(paths.map { |path| a_hash_including('path' => path) })
+ end
+end
diff --git a/spec/requests/api/graphql/user_spec.rb b/spec/requests/api/graphql/user_spec.rb
index d2d6b1fca66..a3b2b750bc3 100644
--- a/spec/requests/api/graphql/user_spec.rb
+++ b/spec/requests/api/graphql/user_spec.rb
@@ -42,7 +42,13 @@ RSpec.describe 'User' do
end
context 'when username and id parameter are used' do
- let_it_be(:query) { graphql_query_for(:user, { id: current_user.to_global_id.to_s, username: current_user.username }, 'id') }
+ let_it_be(:query) do
+ graphql_query_for(
+ :user,
+ { id: current_user.to_global_id.to_s, username: current_user.username },
+ 'id'
+ )
+ end
it 'displays an error' do
post_graphql(query)
diff --git a/spec/requests/api/graphql_spec.rb b/spec/requests/api/graphql_spec.rb
index 4eaf57a7d35..3a1bcfc69b8 100644
--- a/spec/requests/api/graphql_spec.rb
+++ b/spec/requests/api/graphql_spec.rb
@@ -3,16 +3,18 @@ require 'spec_helper'
RSpec.describe 'GraphQL' do
include GraphqlHelpers
+ include AfterNextHelpers
- let(:query) { graphql_query_for('echo', text: 'Hello world' ) }
+ let(:query) { graphql_query_for('echo', text: 'Hello world') }
- context 'logging' do
+ describe 'logging' do
shared_examples 'logging a graphql query' do
let(:expected_params) do
{
query_string: query,
variables: variables.to_s,
duration_s: anything,
+ operation_name: nil,
depth: 1,
complexity: 1,
used_fields: ['Query.echo'],
@@ -50,19 +52,25 @@ RSpec.describe 'GraphQL' do
context 'when there is an error in the logger' do
before do
- allow_any_instance_of(Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer).to receive(:process_variables).and_raise(StandardError.new("oh noes!"))
+ logger_analyzer = GitlabSchema.query_analyzers.find do |qa|
+ qa.is_a? Gitlab::Graphql::QueryAnalyzers::LoggerAnalyzer
+ end
+ allow(logger_analyzer).to receive(:process_variables)
+ .and_raise(StandardError.new("oh noes!"))
end
it 'logs the exception in Sentry and continues with the request' do
- expect(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception).at_least(:once)
- expect(Gitlab::GraphqlLogger).to receive(:info)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_and_raise_for_dev_exception).at_least(:once)
+ expect(Gitlab::GraphqlLogger)
+ .to receive(:info)
post_graphql(query, variables: {})
end
end
end
- context 'invalid variables' do
+ context 'with invalid variables' do
it 'returns an error' do
post_graphql(query, variables: "This is not JSON")
@@ -71,7 +79,7 @@ RSpec.describe 'GraphQL' do
end
end
- context 'authentication', :allow_forgery_protection do
+ describe 'authentication', :allow_forgery_protection do
let(:user) { create(:user) }
it 'allows access to public data without authentication' do
@@ -98,7 +106,7 @@ RSpec.describe 'GraphQL' do
expect(graphql_data['echo']).to eq("\"#{user.username}\" says: Hello world")
end
- context 'token authentication' do
+ context 'with token authentication' do
let(:token) { create(:personal_access_token) }
before do
@@ -118,7 +126,7 @@ RSpec.describe 'GraphQL' do
context 'when the personal access token has no api scope' do
it 'does not log the user in' do
- token.update(scopes: [:read_user])
+ token.update!(scopes: [:read_user])
post_graphql(query, headers: { 'PRIVATE-TOKEN' => token.token })
@@ -135,7 +143,11 @@ RSpec.describe 'GraphQL' do
let(:user) { create(:user) }
let(:query) do
- graphql_query_for('project', { 'fullPath' => project.full_path }, %w(id))
+ graphql_query_for(
+ :project,
+ { full_path: project.full_path },
+ 'id'
+ )
end
before do
@@ -196,13 +208,56 @@ RSpec.describe 'GraphQL' do
end
end
+ describe 'complexity limits' do
+ let_it_be(:project) { create(:project, :public) }
+ let!(:user) { create(:user) }
+
+ let(:query_fields) do
+ <<~QUERY
+ id
+ QUERY
+ end
+
+ let(:query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ query_fields
+ )
+ end
+
+ before do
+ stub_const('GitlabSchema::DEFAULT_MAX_COMPLEXITY', 1)
+ end
+
+ context 'unauthenticated user' do
+ subject { post_graphql(query) }
+
+ it 'raises a complexity error' do
+ subject
+
+ expect_graphql_errors_to_include(/which exceeds max complexity/)
+ end
+ end
+
+ context 'authenticated user' do
+ subject { post_graphql(query, current_user: user) }
+
+ it 'does not raise an error as it uses the `AUTHENTICATED_COMPLEXITY`' do
+ subject
+
+ expect(graphql_errors).to be_nil
+ end
+ end
+ end
+
describe 'keyset pagination' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:issues) { create_list(:issue, 10, project: project, created_at: Time.now.change(usec: 200)) }
let(:page_size) { 6 }
- let(:issues_edges) { %w(data project issues edges) }
- let(:end_cursor) { %w(data project issues pageInfo endCursor) }
+ let(:issues_edges) { %w[project issues edges] }
+ let(:end_cursor) { %w[project issues pageInfo endCursor] }
let(:query) do
<<~GRAPHQL
query project($fullPath: ID!, $first: Int, $after: String) {
@@ -216,16 +271,10 @@ RSpec.describe 'GraphQL' do
GRAPHQL
end
- # TODO: Switch this to use `post_graphql`
- # This is not performing an actual GraphQL request because the
- # variables end up being strings when passed through the `post_graphql`
- # helper.
- #
- # https://gitlab.com/gitlab-org/gitlab/-/issues/222432
def execute_query(after: nil)
- GitlabSchema.execute(
+ post_graphql(
query,
- context: { current_user: nil },
+ current_user: nil,
variables: {
fullPath: project.full_path,
first: page_size,
@@ -239,14 +288,16 @@ RSpec.describe 'GraphQL' do
expect(Gitlab::Graphql::Pagination::Keyset::QueryBuilder)
.to receive(:new).with(anything, anything, hash_including('created_at'), anything).and_call_original
- first_page = execute_query
+ execute_query
+ first_page = graphql_data
edges = first_page.dig(*issues_edges)
cursor = first_page.dig(*end_cursor)
expect(edges.count).to eq(6)
expect(edges.last['node']['iid']).to eq(issues[4].iid.to_s)
- second_page = execute_query(after: cursor)
+ execute_query(after: cursor)
+ second_page = graphql_data
edges = second_page.dig(*issues_edges)
expect(edges.count).to eq(4)
diff --git a/spec/requests/api/group_import_spec.rb b/spec/requests/api/group_import_spec.rb
index bb7436502ed..f632e49bf3a 100644
--- a/spec/requests/api/group_import_spec.rb
+++ b/spec/requests/api/group_import_spec.rb
@@ -218,12 +218,14 @@ RSpec.describe API::GroupImport do
stub_uploads_object_storage(ImportExportUploader, direct_upload: true)
end
+ # rubocop:disable Rails/SaveBang
let(:tmp_object) do
fog_connection.directories.new(key: 'uploads').files.create(
key: "tmp/uploads/#{file_name}",
body: file_upload
)
end
+ # rubocop:enable Rails/SaveBang
let(:fog_file) { fog_to_uploaded_file(tmp_object) }
let(:params) do
diff --git a/spec/requests/api/group_milestones_spec.rb b/spec/requests/api/group_milestones_spec.rb
index 7ed6e1a295f..e3e0164e5a7 100644
--- a/spec/requests/api/group_milestones_spec.rb
+++ b/spec/requests/api/group_milestones_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe API::GroupMilestones do
let_it_be(:params) { { include_parent_milestones: true } }
before_all do
- group.update(parent: ancestor_group)
+ group.update!(parent: ancestor_group)
end
shared_examples 'listing all milestones' do
@@ -64,10 +64,28 @@ RSpec.describe API::GroupMilestones do
end
end
+ describe 'GET /groups/:id/milestones/:milestone_id/issues' do
+ let!(:issue) { create(:issue, project: project, milestone: milestone) }
+
+ def perform_request
+ get api("/groups/#{group.id}/milestones/#{milestone.id}/issues", user)
+ end
+
+ it 'returns multiple issues without performing N + 1' do
+ perform_request
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+
+ create(:issue, project: project, milestone: milestone)
+
+ expect { perform_request }.not_to exceed_query_limit(control_count)
+ end
+ end
+
def setup_for_group
- context_group.update(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ context_group.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
context_group.add_developer(user)
- public_project.update(namespace: context_group)
+ public_project.update!(namespace: context_group)
context_group.reload
end
end
diff --git a/spec/requests/api/group_variables_spec.rb b/spec/requests/api/group_variables_spec.rb
index 0b6bf65ca44..6d5676bbe35 100644
--- a/spec/requests/api/group_variables_spec.rb
+++ b/spec/requests/api/group_variables_spec.rb
@@ -55,6 +55,7 @@ RSpec.describe API::GroupVariables do
expect(json_response['value']).to eq(variable.value)
expect(json_response['protected']).to eq(variable.protected?)
expect(json_response['variable_type']).to eq(variable.variable_type)
+ expect(json_response['environment_scope']).to eq(variable.environment_scope)
end
it 'responds with 404 Not Found if requesting non-existing variable' do
@@ -98,6 +99,7 @@ RSpec.describe API::GroupVariables do
expect(json_response['protected']).to be_truthy
expect(json_response['masked']).to be_truthy
expect(json_response['variable_type']).to eq('env_var')
+ expect(json_response['environment_scope']).to eq('*')
end
it 'creates variable with optional attributes' do
@@ -111,6 +113,7 @@ RSpec.describe API::GroupVariables do
expect(json_response['protected']).to be_falsey
expect(json_response['masked']).to be_falsey
expect(json_response['variable_type']).to eq('file')
+ expect(json_response['environment_scope']).to eq('*')
end
it 'does not allow to duplicate variable key' do
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index 86999c4adaa..6bedd43e5c4 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -644,7 +644,7 @@ RSpec.describe API::Internal::Base do
context 'with Project' do
it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: key.user.username, project: project.full_path } }
+ let(:expected_params) { { user: key.user.username, project: project.full_path, caller_id: "POST /api/:version/internal/allowed" } }
subject { push(key, project) }
end
@@ -652,7 +652,7 @@ RSpec.describe API::Internal::Base do
context 'with PersonalSnippet' do
it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: key.user.username } }
+ let(:expected_params) { { user: key.user.username, caller_id: "POST /api/:version/internal/allowed" } }
subject { push(key, personal_snippet) }
end
@@ -660,7 +660,7 @@ RSpec.describe API::Internal::Base do
context 'with ProjectSnippet' do
it_behaves_like 'storing arguments in the application context' do
- let(:expected_params) { { user: key.user.username, project: project_snippet.project.full_path } }
+ let(:expected_params) { { user: key.user.username, project: project_snippet.project.full_path, caller_id: "POST /api/:version/internal/allowed" } }
subject { push(key, project_snippet) }
end
@@ -887,7 +887,7 @@ RSpec.describe API::Internal::Base do
context 'project does not exist' do
context 'git pull' do
it 'returns a 200 response with status: false' do
- project.destroy
+ project.destroy!
pull(key, project)
@@ -1115,7 +1115,7 @@ RSpec.describe API::Internal::Base do
end
end
- context 'feature flag :user_mode_in_session is enabled' do
+ context 'application setting :admin_mode is enabled' do
context 'with an admin user' do
let(:user) { create(:admin) }
@@ -1147,9 +1147,9 @@ RSpec.describe API::Internal::Base do
end
end
- context 'feature flag :user_mode_in_session is disabled' do
+ context 'application setting :admin_mode is disabled' do
before do
- stub_feature_flags(user_mode_in_session: false)
+ stub_application_setting(admin_mode: false)
end
context 'with an admin user' do
@@ -1413,6 +1413,29 @@ RSpec.describe API::Internal::Base do
end
end
+ describe 'GET /internal/geo_proxy' do
+ subject { get api('/internal/geo_proxy'), params: { secret_token: secret_token } }
+
+ context 'with valid auth' do
+ it 'returns empty data' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response).to be_empty
+ end
+ end
+
+ context 'with invalid auth' do
+ let(:secret_token) { 'invalid_token' }
+
+ it 'returns unauthorized' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+ end
+
def lfs_auth_project(project)
post(
api("/internal/lfs_authenticate"),
diff --git a/spec/requests/api/internal/kubernetes_spec.rb b/spec/requests/api/internal/kubernetes_spec.rb
index 2e13016a0a6..47d0c872eb6 100644
--- a/spec/requests/api/internal/kubernetes_spec.rb
+++ b/spec/requests/api/internal/kubernetes_spec.rb
@@ -38,16 +38,22 @@ RSpec.describe API::Internal::Kubernetes do
end
shared_examples 'agent authentication' do
- it 'returns 403 if Authorization header not sent' do
+ it 'returns 401 if Authorization header not sent' do
send_request
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:unauthorized)
end
- it 'returns 403 if Authorization is for non-existent agent' do
+ it 'returns 401 if Authorization is for non-existent agent' do
send_request(headers: { 'Authorization' => 'Bearer NONEXISTENT' })
- expect(response).to have_gitlab_http_status(:forbidden)
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ shared_examples 'agent token tracking' do
+ it 'tracks token usage' do
+ expect { response }.to change { agent_token.reload.read_attribute(:last_used_at) }
end
end
@@ -101,6 +107,8 @@ RSpec.describe API::Internal::Kubernetes do
let(:agent) { agent_token.agent }
let(:project) { agent.project }
+ shared_examples 'agent token tracking'
+
it 'returns expected data', :aggregate_failures do
send_request(headers: { 'Authorization' => "Bearer #{agent_token.token}" })
@@ -169,6 +177,8 @@ RSpec.describe API::Internal::Kubernetes do
context 'an agent is found' do
let_it_be(:agent_token) { create(:cluster_agent_token) }
+ shared_examples 'agent token tracking'
+
context 'project is public' do
let(:project) { create(:project, :public) }
diff --git a/spec/requests/api/invitations_spec.rb b/spec/requests/api/invitations_spec.rb
index 98a7aa63b16..b0e54055854 100644
--- a/spec/requests/api/invitations_spec.rb
+++ b/spec/requests/api/invitations_spec.rb
@@ -102,7 +102,8 @@ RSpec.describe API::Invitations do
params: { email: stranger.email, access_level: Member::REPORTER }
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['message'][stranger.email]).to eq("Access level should be greater than or equal to Developer inherited membership from group #{parent.name}")
+ expect(json_response['message'][stranger.email])
+ .to eq("Access level should be greater than or equal to Developer inherited membership from group #{parent.name}")
end
it 'creates the member if group level is lower' do
@@ -153,10 +154,10 @@ RSpec.describe API::Invitations do
it "returns a message if member already exists" do
post api("/#{source_type.pluralize}/#{source.id}/invitations", maintainer),
- params: { email: maintainer.email, access_level: Member::MAINTAINER }
+ params: { email: developer.email, access_level: Member::MAINTAINER }
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['message'][maintainer.email]).to eq("Already a member of #{source.name}")
+ expect(json_response['message'][developer.email]).to eq("User already exists in source")
end
it 'returns 404 when the email is not valid' do
@@ -164,7 +165,7 @@ RSpec.describe API::Invitations do
params: { email: '', access_level: Member::MAINTAINER }
expect(response).to have_gitlab_http_status(:created)
- expect(json_response['message']).to eq('Email cannot be blank')
+ expect(json_response['message']).to eq('Emails cannot be blank')
end
it 'returns 404 when the email list is not a valid format' do
diff --git a/spec/requests/api/issue_links_spec.rb b/spec/requests/api/issue_links_spec.rb
index a4243766111..45583f5c7dc 100644
--- a/spec/requests/api/issue_links_spec.rb
+++ b/spec/requests/api/issue_links_spec.rb
@@ -12,26 +12,40 @@ RSpec.describe API::IssueLinks do
end
describe 'GET /links' do
+ def perform_request(user = nil, params = {})
+ get api("/projects/#{project.id}/issues/#{issue.iid}/links", user), params: params
+ end
+
context 'when unauthenticated' do
it 'returns 401' do
- get api("/projects/#{project.id}/issues/#{issue.iid}/links")
+ perform_request
expect(response).to have_gitlab_http_status(:unauthorized)
end
end
context 'when authenticated' do
- it 'returns related issues' do
- target_issue = create(:issue, project: project)
- create(:issue_link, source: issue, target: target_issue)
+ let_it_be(:issue_link1) { create(:issue_link, source: issue, target: create(:issue, project: project)) }
+ let_it_be(:issue_link2) { create(:issue_link, source: issue, target: create(:issue, project: project)) }
- get api("/projects/#{project.id}/issues/#{issue.iid}/links", user)
+ it 'returns related issues' do
+ perform_request(user)
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Array
- expect(json_response.length).to eq(1)
+ expect(json_response.length).to eq(2)
expect(response).to match_response_schema('public_api/v4/issue_links')
end
+
+ it 'returns multiple links without N + 1' do
+ perform_request(user)
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request(user) }.count
+
+ create(:issue_link, source: issue, target: create(:issue, project: project))
+
+ expect { perform_request(user) }.not_to exceed_query_limit(control_count)
+ end
end
end
@@ -82,7 +96,7 @@ RSpec.describe API::IssueLinks do
params: { target_project_id: unauthorized_project.id, target_issue_iid: target_issue.iid }
expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response['message']).to eq('No Issue found for given params')
+ expect(json_response['message']).to eq('No matching issue found. Make sure that you are adding a valid issue URL.')
end
end
diff --git a/spec/requests/api/issues/get_group_issues_spec.rb b/spec/requests/api/issues/get_group_issues_spec.rb
index 3870c78deee..cebde747210 100644
--- a/spec/requests/api/issues/get_group_issues_spec.rb
+++ b/spec/requests/api/issues/get_group_issues_spec.rb
@@ -754,7 +754,7 @@ RSpec.describe API::Issues do
let(:parent_group) { create(:group) }
before do
- group.update(parent_id: parent_group.id)
+ group.update!(parent_id: parent_group.id)
group_closed_issue.reload
end
diff --git a/spec/requests/api/issues/post_projects_issues_spec.rb b/spec/requests/api/issues/post_projects_issues_spec.rb
index 5b3e2363669..7f1db620d4f 100644
--- a/spec/requests/api/issues/post_projects_issues_spec.rb
+++ b/spec/requests/api/issues/post_projects_issues_spec.rb
@@ -111,7 +111,7 @@ RSpec.describe API::Issues do
let(:not_member) { create(:user) }
before do
- project.project_feature.update(issues_access_level: ProjectFeature::PRIVATE)
+ project.project_feature.update!(issues_access_level: ProjectFeature::PRIVATE)
end
it 'renders 403' do
diff --git a/spec/requests/api/jobs_spec.rb b/spec/requests/api/jobs_spec.rb
index fe00b654d3b..cff006bed94 100644
--- a/spec/requests/api/jobs_spec.rb
+++ b/spec/requests/api/jobs_spec.rb
@@ -100,6 +100,18 @@ RSpec.describe API::Jobs do
end
end
+ context 'when token is valid but not CI_JOB_TOKEN' do
+ let(:token) { create(:personal_access_token, user: user) }
+
+ include_context 'with auth headers' do
+ let(:header) { { 'Private-Token' => token.token } }
+ end
+
+ it 'returns not found' do
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
context 'with job token authentication header' do
include_context 'with auth headers' do
let(:header) { { API::Helpers::Runner::JOB_TOKEN_HEADER => running_job.token } }
@@ -215,7 +227,7 @@ RSpec.describe API::Jobs do
first_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: pipeline)
first_build.runner = create(:ci_runner)
first_build.user = create(:user)
- first_build.save
+ first_build.save!
control_count = ActiveRecord::QueryRecorder.new { go }.count
@@ -223,7 +235,7 @@ RSpec.describe API::Jobs do
second_build = create(:ci_build, :trace_artifact, :artifacts, :test_reports, pipeline: second_pipeline)
second_build.runner = create(:ci_runner)
second_build.user = create(:user)
- second_build.save
+ second_build.save!
expect { go }.not_to exceed_query_limit(control_count)
end
@@ -684,7 +696,7 @@ RSpec.describe API::Jobs do
context 'with regular branch' do
before do
pipeline.reload
- pipeline.update(ref: 'master',
+ pipeline.update!(ref: 'master',
sha: project.commit('master').sha)
get_for_ref('master')
@@ -696,7 +708,7 @@ RSpec.describe API::Jobs do
context 'with branch name containing slash' do
before do
pipeline.reload
- pipeline.update(ref: 'improve/awesome',
+ pipeline.update!(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
end
@@ -732,7 +744,7 @@ RSpec.describe API::Jobs do
stub_artifacts_object_storage
job.success
- project.update(visibility_level: visibility_level,
+ project.update!(visibility_level: visibility_level,
public_builds: public_builds)
get_artifact_file(artifact)
@@ -826,7 +838,7 @@ RSpec.describe API::Jobs do
context 'with branch name containing slash' do
before do
pipeline.reload
- pipeline.update(ref: 'improve/awesome',
+ pipeline.update!(ref: 'improve/awesome',
sha: project.commit('improve/awesome').sha)
end
diff --git a/spec/requests/api/labels_spec.rb b/spec/requests/api/labels_spec.rb
index e3fffd3e3fd..26377c40b73 100644
--- a/spec/requests/api/labels_spec.rb
+++ b/spec/requests/api/labels_spec.rb
@@ -119,7 +119,7 @@ RSpec.describe API::Labels do
expect(label).not_to be_nil
- label.priorities.create(project: label.project, priority: 1)
+ label.priorities.create!(project: label.project, priority: 1)
label.save!
request_params = {
@@ -139,7 +139,7 @@ RSpec.describe API::Labels do
expect(label).not_to be_nil
label_id = spec_params[:name] || spec_params[:label_id]
- label.priorities.create(project: label.project, priority: 1)
+ label.priorities.create!(project: label.project, priority: 1)
label.save!
request_params = {
@@ -383,7 +383,7 @@ RSpec.describe API::Labels do
it 'returns 409 if label already exists in group' do
group = create(:group)
group_label = create(:group_label, group: group)
- project.update(group: group)
+ project.update!(group: group)
post api("/projects/#{project.id}/labels", user),
params: {
diff --git a/spec/requests/api/lint_spec.rb b/spec/requests/api/lint_spec.rb
index cf8cac773f5..f26236e0253 100644
--- a/spec/requests/api/lint_spec.rb
+++ b/spec/requests/api/lint_spec.rb
@@ -166,7 +166,7 @@ RSpec.describe API::Lint do
included_config = YAML.safe_load(included_content, [Symbol])
root_config = YAML.safe_load(yaml_content, [Symbol])
- expected_yaml = included_config.merge(root_config).except(:include).to_yaml
+ expected_yaml = included_config.merge(root_config).except(:include).deep_stringify_keys.to_yaml
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
@@ -246,7 +246,7 @@ RSpec.describe API::Lint do
let(:dry_run) { false }
let(:included_content) do
- { another_test: { stage: 'test', script: 'echo 1' } }.to_yaml
+ { another_test: { stage: 'test', script: 'echo 1' } }.deep_stringify_keys.to_yaml
end
before do
@@ -299,7 +299,7 @@ RSpec.describe API::Lint do
end
let(:included_content) do
- { another_test: { stage: 'test', script: 'echo 1' } }.to_yaml
+ { another_test: { stage: 'test', script: 'echo 1' } }.deep_stringify_keys.to_yaml
end
before do
@@ -341,7 +341,7 @@ RSpec.describe API::Lint do
context 'with invalid .gitlab-ci.yml content' do
let(:yaml_content) do
- { image: 'ruby:2.7', services: ['postgres'] }.to_yaml
+ { image: 'ruby:2.7', services: ['postgres'] }.deep_stringify_keys.to_yaml
end
before do
@@ -385,7 +385,7 @@ RSpec.describe API::Lint do
included_config = YAML.safe_load(included_content, [Symbol])
root_config = YAML.safe_load(yaml_content, [Symbol])
- expected_yaml = included_config.merge(root_config).except(:include).to_yaml
+ expected_yaml = included_config.merge(root_config).except(:include).deep_stringify_keys.to_yaml
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an Hash
@@ -539,7 +539,7 @@ RSpec.describe API::Lint do
context 'with invalid .gitlab-ci.yml content' do
let(:yaml_content) do
- { image: 'ruby:2.7', services: ['postgres'] }.to_yaml
+ { image: 'ruby:2.7', services: ['postgres'] }.deep_stringify_keys.to_yaml
end
context 'when running as dry run' do
diff --git a/spec/requests/api/maven_packages_spec.rb b/spec/requests/api/maven_packages_spec.rb
index 7f0e4f18e3b..3a015e98fb1 100644
--- a/spec/requests/api/maven_packages_spec.rb
+++ b/spec/requests/api/maven_packages_spec.rb
@@ -47,7 +47,21 @@ RSpec.describe API::MavenPackages do
end
end
- shared_examples 'processing HEAD requests' do
+ shared_examples 'rejecting the request for non existing maven path' do |expected_status: :not_found|
+ before do
+ if Feature.enabled?(:check_maven_path_first)
+ expect(::Packages::Maven::PackageFinder).not_to receive(:new)
+ end
+ end
+
+ it 'rejects the request' do
+ subject
+
+ expect(response).to have_gitlab_http_status(expected_status)
+ end
+ end
+
+ shared_examples 'processing HEAD requests' do |instance_level: false|
subject { head api(url) }
before do
@@ -92,6 +106,12 @@ RSpec.describe API::MavenPackages do
subject
end
+
+ context 'with a non existing maven path' do
+ let(:path) { 'foo/bar/1.2.3' }
+
+ it_behaves_like 'rejecting the request for non existing maven path', expected_status: instance_level ? :forbidden : :not_found
+ end
end
end
@@ -99,9 +119,8 @@ RSpec.describe API::MavenPackages do
context 'successful download' do
subject do
download_file(
- package_file.file_name,
- {},
- Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => deploy_token.token
+ file_name: package_file.file_name,
+ request_headers: { Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => deploy_token.token }
)
end
@@ -126,7 +145,7 @@ RSpec.describe API::MavenPackages do
shared_examples 'downloads with a job token' do
context 'with a running job' do
it 'allows download with job token' do
- download_file(package_file.file_name, job_token: job.token)
+ download_file(file_name: package_file.file_name, params: { job_token: job.token })
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq('application/octet-stream')
@@ -139,7 +158,7 @@ RSpec.describe API::MavenPackages do
end
it 'returns unauthorized error' do
- download_file(package_file.file_name, job_token: job.token)
+ download_file(file_name: package_file.file_name, params: { job_token: job.token })
expect(response).to have_gitlab_http_status(:unauthorized)
end
@@ -147,133 +166,217 @@ RSpec.describe API::MavenPackages do
end
describe 'GET /api/v4/packages/maven/*path/:file_name' do
- context 'a public project' do
- subject { download_file(package_file.file_name) }
+ shared_examples 'handling all conditions' do
+ context 'a public project' do
+ subject { download_file(file_name: package_file.file_name) }
- it_behaves_like 'tracking the file download event'
+ it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
+ it 'returns the file' do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
- it 'returns sha1 of the file' do
- download_file(package_file.file_name + '.sha1')
+ it 'returns sha1 of the file' do
+ download_file(file_name: package_file.file_name + '.sha1')
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('text/plain')
- expect(response.body).to eq(package_file.file_sha1)
- end
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('text/plain')
+ expect(response.body).to eq(package_file.file_sha1)
+ end
- context 'internal project' do
- before do
- project.team.truncate
- project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ context 'with a non existing maven path' do
+ subject { download_file(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
+
+ it_behaves_like 'rejecting the request for non existing maven path', expected_status: :forbidden
+ end
end
- subject { download_file_with_token(package_file.file_name) }
+ context 'internal project' do
+ before do
+ project.team.truncate
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
- it_behaves_like 'tracking the file download event'
+ subject { download_file_with_token(file_name: package_file.file_name) }
- it 'returns the file' do
- subject
+ it_behaves_like 'tracking the file download event'
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it 'returns the file' do
+ subject
- it 'denies download when no private token' do
- download_file(package_file.file_name)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it 'denies download when no private token' do
+ download_file(file_name: package_file.file_name)
- it_behaves_like 'downloads with a job token'
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- it_behaves_like 'downloads with a deploy token'
- end
+ it_behaves_like 'downloads with a job token'
- context 'private project' do
- subject { download_file_with_token(package_file.file_name) }
+ it_behaves_like 'downloads with a deploy token'
- before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ context 'with a non existing maven path' do
+ subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
+
+ it_behaves_like 'rejecting the request for non existing maven path', expected_status: :forbidden
+ end
end
- it_behaves_like 'tracking the file download event'
+ context 'private project' do
+ subject { download_file_with_token(file_name: package_file.file_name) }
- it 'returns the file' do
- subject
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'tracking the file download event'
- it 'denies download when not enough permissions' do
- project.add_guest(user)
+ it 'returns the file' do
+ subject
- subject
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it 'denies download when not enough permissions' do
+ project.add_guest(user)
+
+ subject
- it 'denies download when no private token' do
- download_file(package_file.file_name)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
- end
+ it 'denies download when no private token' do
+ download_file(file_name: package_file.file_name)
- it_behaves_like 'downloads with a job token'
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- it_behaves_like 'downloads with a deploy token'
+ it_behaves_like 'downloads with a job token'
- it 'does not allow download by a unauthorized deploy token with same id as a user with access' do
- unauthorized_deploy_token = create(:deploy_token, read_package_registry: true, write_package_registry: true)
+ it_behaves_like 'downloads with a deploy token'
- another_user = create(:user)
- project.add_developer(another_user)
+ it 'does not allow download by a unauthorized deploy token with same id as a user with access' do
+ unauthorized_deploy_token = create(:deploy_token, read_package_registry: true, write_package_registry: true)
- # We force the id of the deploy token and the user to be the same
- unauthorized_deploy_token.update!(id: another_user.id)
+ another_user = create(:user)
+ project.add_developer(another_user)
- download_file(
- package_file.file_name,
- {},
- Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => unauthorized_deploy_token.token
- )
+ # We force the id of the deploy token and the user to be the same
+ unauthorized_deploy_token.update!(id: another_user.id)
- expect(response).to have_gitlab_http_status(:forbidden)
+ download_file(
+ file_name: package_file.file_name,
+ request_headers: { Gitlab::Auth::AuthFinders::DEPLOY_TOKEN_HEADER => unauthorized_deploy_token.token }
+ )
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+
+ context 'with a non existing maven path' do
+ subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
+
+ it_behaves_like 'rejecting the request for non existing maven path', expected_status: :forbidden
+ end
+ end
+
+ context 'project name is different from a package name' do
+ before do
+ maven_metadatum.update!(path: "wrong_name/#{package.version}")
+ end
+
+ it 'rejects request' do
+ download_file(file_name: package_file.file_name)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
end
end
- context 'project name is different from a package name' do
+ context 'with maven_packages_group_level_improvements enabled' do
before do
- maven_metadatum.update!(path: "wrong_name/#{package.version}")
+ stub_feature_flags(maven_packages_group_level_improvements: true)
end
- it 'rejects request' do
- download_file(package_file.file_name)
+ it_behaves_like 'handling all conditions'
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
end
+
+ it_behaves_like 'handling all conditions'
end
- def download_file(file_name, params = {}, request_headers = headers)
- get api("/packages/maven/#{maven_metadatum.path}/#{file_name}"), params: params, headers: request_headers
+ context 'with check_maven_path_first enabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: true)
+ end
+
+ it_behaves_like 'handling all conditions'
+ end
+
+ context 'with check_maven_path_first disabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: false)
+ end
+
+ it_behaves_like 'handling all conditions'
end
- def download_file_with_token(file_name, params = {}, request_headers = headers_with_token)
- download_file(file_name, params, request_headers)
+ def download_file(file_name:, params: {}, request_headers: headers, path: maven_metadatum.path)
+ get api("/packages/maven/#{path}/#{file_name}"), params: params, headers: request_headers
+ end
+
+ def download_file_with_token(file_name:, params: {}, request_headers: headers_with_token, path: maven_metadatum.path)
+ download_file(file_name: file_name, params: params, request_headers: request_headers, path: path)
end
end
describe 'HEAD /api/v4/packages/maven/*path/:file_name' do
- let(:url) { "/packages/maven/#{package.maven_metadatum.path}/#{package_file.file_name}" }
+ let(:path) { package.maven_metadatum.path }
+ let(:url) { "/packages/maven/#{path}/#{package_file.file_name}" }
+
+ it_behaves_like 'processing HEAD requests', instance_level: true
- it_behaves_like 'processing HEAD requests'
+ context 'with maven_packages_group_level_improvements enabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: true)
+ end
+
+ it_behaves_like 'processing HEAD requests', instance_level: true
+ end
+
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
+ end
+
+ it_behaves_like 'processing HEAD requests', instance_level: true
+ end
+
+ context 'with check_maven_path_first enabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: true)
+ end
+
+ it_behaves_like 'processing HEAD requests', instance_level: true
+ end
+
+ context 'with check_maven_path_first disabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: false)
+ end
+
+ it_behaves_like 'processing HEAD requests', instance_level: true
+ end
end
describe 'GET /api/v4/groups/:id/-/packages/maven/*path/:file_name' do
@@ -282,91 +385,299 @@ RSpec.describe API::MavenPackages do
group.add_developer(user)
end
- context 'a public project' do
- subject { download_file(package_file.file_name) }
+ shared_examples 'handling all conditions' do
+ context 'a public project' do
+ subject { download_file(file_name: package_file.file_name) }
- it_behaves_like 'tracking the file download event'
+ it_behaves_like 'tracking the file download event'
- it 'returns the file' do
- subject
+ it 'returns the file' do
+ subject
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ it 'returns sha1 of the file' do
+ download_file(file_name: package_file.file_name + '.sha1')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('text/plain')
+ expect(response.body).to eq(package_file.file_sha1)
+ end
+
+ context 'with a non existing maven path' do
+ subject { download_file(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
+
+ it_behaves_like 'rejecting the request for non existing maven path'
+ end
end
- it 'returns sha1 of the file' do
- download_file(package_file.file_name + '.sha1')
+ context 'internal project' do
+ before do
+ group.group_member(user).destroy!
+ project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ end
+
+ subject { download_file_with_token(file_name: package_file.file_name) }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('text/plain')
- expect(response.body).to eq(package_file.file_sha1)
+ it_behaves_like 'tracking the file download event'
+
+ it 'returns the file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ it 'denies download when no private token' do
+ download_file(file_name: package_file.file_name)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it_behaves_like 'downloads with a job token'
+
+ it_behaves_like 'downloads with a deploy token'
+
+ context 'with a non existing maven path' do
+ subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
+
+ it_behaves_like 'rejecting the request for non existing maven path'
+ end
end
- end
- context 'internal project' do
- before do
- group.group_member(user).destroy!
- project.update!(visibility_level: Gitlab::VisibilityLevel::INTERNAL)
+ context 'private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ subject { download_file_with_token(file_name: package_file.file_name) }
+
+ it_behaves_like 'tracking the file download event'
+
+ it 'returns the file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ it 'denies download when not enough permissions' do
+ group.add_guest(user)
+
+ subject
+
+ status = Feature.enabled?(:maven_packages_group_level_improvements, default_enabled: :yaml) ? :not_found : :forbidden
+ expect(response).to have_gitlab_http_status(status)
+ end
+
+ it 'denies download when no private token' do
+ download_file(file_name: package_file.file_name)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it_behaves_like 'downloads with a job token'
+
+ it_behaves_like 'downloads with a deploy token'
+
+ context 'with a non existing maven path' do
+ subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
+
+ it_behaves_like 'rejecting the request for non existing maven path'
+ end
+
+ context 'with group deploy token' do
+ subject { download_file_with_token(file_name: package_file.file_name, request_headers: group_deploy_token_headers) }
+
+ it 'returns the file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ it 'returns the file with only write_package_registry scope' do
+ deploy_token_for_group.update!(read_package_registry: false)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ context 'with a non existing maven path' do
+ subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3', request_headers: group_deploy_token_headers) }
+
+ it_behaves_like 'rejecting the request for non existing maven path'
+ end
+ end
+
+ context 'with a reporter from a subgroup accessing the root group' do
+ let_it_be(:root_group) { create(:group, :private) }
+ let_it_be(:group) { create(:group, :private, parent: root_group) }
+
+ subject { download_file_with_token(file_name: package_file.file_name, request_headers: headers_with_token, group_id: root_group.id) }
+
+ before do
+ project.update!(namespace: group)
+ group.add_reporter(user)
+ end
+
+ it 'returns the file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+
+ context 'with a non existing maven path' do
+ subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3', request_headers: headers_with_token, group_id: root_group.id) }
+
+ it_behaves_like 'rejecting the request for non existing maven path'
+ end
+ end
end
- subject { download_file_with_token(package_file.file_name) }
+ context 'maven metadata file' do
+ let_it_be(:sub_group1) { create(:group, parent: group) }
+ let_it_be(:sub_group2) { create(:group, parent: group) }
+ let_it_be(:project1) { create(:project, :private, group: sub_group1) }
+ let_it_be(:project2) { create(:project, :private, group: sub_group2) }
+ let_it_be(:project3) { create(:project, :private, group: sub_group1) }
+ let_it_be(:package_name) { 'foo' }
+ let_it_be(:package1) { create(:maven_package, project: project1, name: package_name, version: nil) }
+ let_it_be(:package_file1) { create(:package_file, :xml, package: package1, file_name: 'maven-metadata.xml') }
+ let_it_be(:package2) { create(:maven_package, project: project2, name: package_name, version: nil) }
+ let_it_be(:package_file2) { create(:package_file, :xml, package: package2, file_name: 'maven-metadata.xml') }
+ let_it_be(:package3) { create(:maven_package, project: project3, name: package_name, version: nil) }
+ let_it_be(:package_file3) { create(:package_file, :xml, package: package3, file_name: 'maven-metadata.xml') }
- it_behaves_like 'tracking the file download event'
+ let(:maven_metadatum) { package3.maven_metadatum }
- it 'returns the file' do
- subject
+ subject { download_file_with_token(file_name: package_file3.file_name) }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
+ before do
+ sub_group1.add_developer(user)
+ sub_group2.add_developer(user)
+ # the package with the most recently published file should be returned
+ create(:package_file, :xml, package: package2)
+ end
+
+ context 'in multiple versionless packages' do
+ it 'downloads the file' do
+ expect(::Packages::PackageFileFinder)
+ .to receive(:new).with(package2, 'maven-metadata.xml').and_call_original
+
+ subject
+ end
+ end
+
+ context 'in multiple snapshot packages' do
+ before do
+ version = '1.0.0-SNAPSHOT'
+ [package1, package2, package3].each do |pkg|
+ pkg.update!(version: version)
+
+ pkg.maven_metadatum.update!(path: "#{pkg.name}/#{pkg.version}")
+ end
+ end
+
+ it 'downloads the file' do
+ expect(::Packages::PackageFileFinder)
+ .to receive(:new).with(package3, 'maven-metadata.xml').and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+
+ context 'with maven_packages_group_level_improvements enabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: true)
end
- it 'denies download when no private token' do
- download_file(package_file.file_name)
+ it_behaves_like 'handling all conditions'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
end
- it_behaves_like 'downloads with a job token'
+ it_behaves_like 'handling all conditions'
+ end
+
+ context 'with check_maven_path_first enabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: true)
+ end
- it_behaves_like 'downloads with a deploy token'
+ it_behaves_like 'handling all conditions'
end
- context 'private project' do
+ context 'with check_maven_path_first disabled' do
before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ stub_feature_flags(check_maven_path_first: false)
end
- subject { download_file_with_token(package_file.file_name) }
+ it_behaves_like 'handling all conditions'
+ end
- it_behaves_like 'tracking the file download event'
+ def download_file(file_name:, params: {}, request_headers: headers, path: maven_metadatum.path, group_id: group.id)
+ get api("/groups/#{group_id}/-/packages/maven/#{path}/#{file_name}"), params: params, headers: request_headers
+ end
- it 'returns the file' do
- subject
+ def download_file_with_token(file_name:, params: {}, request_headers: headers_with_token, path: maven_metadatum.path, group_id: group.id)
+ download_file(file_name: file_name, params: params, request_headers: request_headers, path: path, group_id: group_id)
+ end
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
+ describe 'HEAD /api/v4/groups/:id/-/packages/maven/*path/:file_name' do
+ let(:path) { package.maven_metadatum.path }
+ let(:url) { "/groups/#{group.id}/-/packages/maven/#{path}/#{package_file.file_name}" }
+
+ context 'with maven_packages_group_level_improvements enabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: true)
end
- it 'denies download when not enough permissions' do
- group.add_guest(user)
+ it_behaves_like 'processing HEAD requests'
+ end
- subject
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
+ it_behaves_like 'processing HEAD requests'
+ end
+
+ context 'with check_maven_path_first enabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: true)
end
- it 'denies download when no private token' do
- download_file(package_file.file_name)
+ it_behaves_like 'processing HEAD requests'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ context 'with check_maven_path_first disabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: false)
end
- it_behaves_like 'downloads with a job token'
+ it_behaves_like 'processing HEAD requests'
+ end
+ end
- it_behaves_like 'downloads with a deploy token'
+ describe 'GET /api/v4/projects/:id/packages/maven/*path/:file_name' do
+ shared_examples 'handling all conditions' do
+ context 'a public project' do
+ subject { download_file(file_name: package_file.file_name) }
- context 'with group deploy token' do
- subject { download_file_with_token(package_file.file_name, {}, group_deploy_token_headers) }
+ it_behaves_like 'tracking the file download event'
it 'returns the file' do
subject
@@ -375,108 +686,145 @@ RSpec.describe API::MavenPackages do
expect(response.media_type).to eq('application/octet-stream')
end
- it 'returns the file with only write_package_registry scope' do
- deploy_token_for_group.update!(read_package_registry: false)
+ it 'returns sha1 of the file' do
+ download_file(file_name: package_file.file_name + '.sha1')
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('text/plain')
+ expect(response.body).to eq(package_file.file_sha1)
+ end
+
+ context 'with a non existing maven path' do
+ subject { download_file(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
+
+ it_behaves_like 'rejecting the request for non existing maven path'
+ end
+ end
+
+ context 'private project' do
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ end
+
+ subject { download_file_with_token(file_name: package_file.file_name) }
+
+ it_behaves_like 'tracking the file download event'
+ it 'returns the file' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(response.media_type).to eq('application/octet-stream')
end
- end
- end
- def download_file(file_name, params = {}, request_headers = headers)
- get api("/groups/#{group.id}/-/packages/maven/#{maven_metadatum.path}/#{file_name}"), params: params, headers: request_headers
- end
+ it 'denies download when not enough permissions' do
+ project.add_guest(user)
- def download_file_with_token(file_name, params = {}, request_headers = headers_with_token)
- download_file(file_name, params, request_headers)
- end
- end
-
- describe 'HEAD /api/v4/groups/:id/-/packages/maven/*path/:file_name' do
- let(:url) { "/groups/#{group.id}/-/packages/maven/#{package.maven_metadatum.path}/#{package_file.file_name}" }
+ subject
- it_behaves_like 'processing HEAD requests'
- end
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
- describe 'GET /api/v4/projects/:id/packages/maven/*path/:file_name' do
- context 'a public project' do
- subject { download_file(package_file.file_name) }
+ it 'denies download when no private token' do
+ download_file(file_name: package_file.file_name)
- it_behaves_like 'tracking the file download event'
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
- it 'returns the file' do
- subject
+ it_behaves_like 'downloads with a job token'
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'downloads with a deploy token'
- it 'returns sha1 of the file' do
- download_file(package_file.file_name + '.sha1')
+ context 'with a non existing maven path' do
+ subject { download_file_with_token(file_name: package_file.file_name, path: 'foo/bar/1.2.3') }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('text/plain')
- expect(response.body).to eq(package_file.file_sha1)
+ it_behaves_like 'rejecting the request for non existing maven path'
+ end
end
end
- context 'private project' do
+ context 'with maven_packages_group_level_improvements enabled' do
before do
- project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ stub_feature_flags(maven_packages_group_level_improvements: true)
end
- subject { download_file_with_token(package_file.file_name) }
-
- it_behaves_like 'tracking the file download event'
-
- it 'returns the file' do
- subject
+ it_behaves_like 'handling all conditions'
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
end
- it 'denies download when not enough permissions' do
- project.add_guest(user)
-
- subject
+ it_behaves_like 'handling all conditions'
+ end
- expect(response).to have_gitlab_http_status(:forbidden)
+ context 'with check_maven_path_first enabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: true)
end
- it 'denies download when no private token' do
- download_file(package_file.file_name)
+ it_behaves_like 'handling all conditions'
+ end
- expect(response).to have_gitlab_http_status(:not_found)
+ context 'with check_maven_path_first disabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: false)
end
- it_behaves_like 'downloads with a job token'
-
- it_behaves_like 'downloads with a deploy token'
+ it_behaves_like 'handling all conditions'
end
- def download_file(file_name, params = {}, request_headers = headers)
+ def download_file(file_name:, params: {}, request_headers: headers, path: maven_metadatum.path)
get api("/projects/#{project.id}/packages/maven/" \
- "#{maven_metadatum.path}/#{file_name}"), params: params, headers: request_headers
+ "#{path}/#{file_name}"), params: params, headers: request_headers
end
- def download_file_with_token(file_name, params = {}, request_headers = headers_with_token)
- download_file(file_name, params, request_headers)
+ def download_file_with_token(file_name:, params: {}, request_headers: headers_with_token, path: maven_metadatum.path)
+ download_file(file_name: file_name, params: params, request_headers: request_headers, path: path)
end
end
describe 'HEAD /api/v4/projects/:id/packages/maven/*path/:file_name' do
- let(:url) { "/projects/#{project.id}/packages/maven/#{package.maven_metadatum.path}/#{package_file.file_name}" }
+ let(:path) { package.maven_metadatum.path }
+ let(:url) { "/projects/#{project.id}/packages/maven/#{path}/#{package_file.file_name}" }
+
+ context 'with maven_packages_group_level_improvements enabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: true)
+ end
+
+ it_behaves_like 'processing HEAD requests'
+ end
+
+ context 'with maven_packages_group_level_improvements disabled' do
+ before do
+ stub_feature_flags(maven_packages_group_level_improvements: false)
+ end
+
+ it_behaves_like 'processing HEAD requests'
+ end
+
+ context 'with check_maven_path_first enabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: true)
+ end
+
+ it_behaves_like 'processing HEAD requests'
+ end
- it_behaves_like 'processing HEAD requests'
+ context 'with check_maven_path_first disabled' do
+ before do
+ stub_feature_flags(check_maven_path_first: false)
+ end
+
+ it_behaves_like 'processing HEAD requests'
+ end
end
describe 'PUT /api/v4/projects/:id/packages/maven/*path/:file_name/authorize' do
it 'rejects a malicious request' do
- put api("/projects/#{project.id}/packages/maven/com/example/my-app/#{version}/%2e%2e%2F.ssh%2Fauthorized_keys/authorize"), params: {}, headers: headers_with_token
+ put api("/projects/#{project.id}/packages/maven/com/example/my-app/#{version}/%2e%2e%2F.ssh%2Fauthorized_keys/authorize"), headers: headers_with_token
expect(response).to have_gitlab_http_status(:bad_request)
end
diff --git a/spec/requests/api/members_spec.rb b/spec/requests/api/members_spec.rb
index 919c8d29406..d488aee0c10 100644
--- a/spec/requests/api/members_spec.rb
+++ b/spec/requests/api/members_spec.rb
@@ -273,7 +273,7 @@ RSpec.describe API::Members do
user_ids = [stranger.id, access_requester.id].join(',')
allow_next_instance_of(::Members::CreateService) do |service|
- expect(service).to receive(:execute).with(source).and_return({ status: :error, message: error_message })
+ expect(service).to receive(:execute).and_return({ status: :error, message: error_message })
end
expect do
@@ -555,6 +555,34 @@ RSpec.describe API::Members do
end
end
+ describe 'DELETE /groups/:id/members/:user_id' do
+ let(:other_user) { create(:user) }
+ let(:nested_group) { create(:group, parent: group) }
+
+ before do
+ nested_group.add_developer(developer)
+ nested_group.add_developer(other_user)
+ end
+
+ it 'deletes only the member with skip_subresources=true' do
+ expect do
+ delete api("/groups/#{group.id}/members/#{developer.id}", maintainer), params: { skip_subresources: true }
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { group.members.count }.by(-1)
+ .and change { nested_group.members.count }.by(0)
+ end
+
+ it 'deletes member and its sub memberships with skip_subresources=false' do
+ expect do
+ delete api("/groups/#{group.id}/members/#{developer.id}", maintainer), params: { skip_subresources: false }
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ end.to change { group.members.count }.by(-1)
+ .and change { nested_group.members.count }.by(-1)
+ end
+ end
+
[false, true].each do |all|
it_behaves_like 'GET /:source_type/:id/members/(all)', 'project', all do
let(:source) { project }
diff --git a/spec/requests/api/merge_request_diffs_spec.rb b/spec/requests/api/merge_request_diffs_spec.rb
index 971fb5e991c..caef946273a 100644
--- a/spec/requests/api/merge_request_diffs_spec.rb
+++ b/spec/requests/api/merge_request_diffs_spec.rb
@@ -75,5 +75,13 @@ RSpec.describe API::MergeRequestDiffs, 'MergeRequestDiffs' do
let(:url) { "/projects/#{project.id}/merge_requests/#{merge_request.iid}/versions/#{merge_request_diff.id}" }
end
end
+
+ context 'caching merge request diffs', :use_clean_rails_redis_caching do
+ it 'is performed' do
+ get api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/versions/#{merge_request_diff.id}", user)
+
+ expect(Rails.cache.fetch(merge_request_diff.cache_key)).to be_present
+ end
+ end
end
end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 09177dd1710..37cb8fb7ee5 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -2151,6 +2151,23 @@ RSpec.describe API::MergeRequests do
let(:entity) { merge_request }
end
+ context 'when only assignee_ids are provided' do
+ let(:params) do
+ {
+ assignee_ids: [user2.id]
+ }
+ end
+
+ it 'sets the assignees' do
+ put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}", user), params: params
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['assignees']).to contain_exactly(
+ a_hash_including('name' => user2.name)
+ )
+ end
+ end
+
context 'accepts reviewer_ids' do
let(:params) do
{
@@ -2533,7 +2550,7 @@ RSpec.describe API::MergeRequests do
it "results in a default squash commit message when not set" do
put api("/projects/#{project.id}/merge_requests/#{merge_request.iid}/merge", user), params: { squash: true }
- expect(squash_commit.message).to eq(merge_request.default_squash_commit_message)
+ expect(squash_commit.message.chomp).to eq(merge_request.default_squash_commit_message.chomp)
end
end
diff --git a/spec/requests/api/namespaces_spec.rb b/spec/requests/api/namespaces_spec.rb
index 2ac76d469d5..1ed06a40f16 100644
--- a/spec/requests/api/namespaces_spec.rb
+++ b/spec/requests/api/namespaces_spec.rb
@@ -216,4 +216,77 @@ RSpec.describe API::Namespaces do
end
end
end
+
+ describe 'GET /namespaces/:namespace/exists' do
+ let!(:namespace1) { create(:group, name: 'Namespace 1', path: 'namespace-1') }
+ let!(:namespace2) { create(:group, name: 'Namespace 2', path: 'namespace-2') }
+ let!(:namespace1sub) { create(:group, name: 'Sub Namespace 1', path: 'sub-namespace-1', parent: namespace1) }
+ let!(:namespace2sub) { create(:group, name: 'Sub Namespace 2', path: 'sub-namespace-2', parent: namespace2) }
+
+ context 'when unauthenticated' do
+ it 'returns authentication error' do
+ get api("/namespaces/#{namespace1.path}/exists")
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when authenticated' do
+ it 'returns JSON indicating the namespace exists and a suggestion' do
+ get api("/namespaces/#{namespace1.path}/exists", user)
+
+ expected_json = { exists: true, suggests: ["#{namespace1.path}1"] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+
+ it 'returns JSON indicating the namespace does not exist without a suggestion' do
+ get api("/namespaces/non-existing-namespace/exists", user)
+
+ expected_json = { exists: false, suggests: [] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+
+ it 'checks the existence of a namespace in case-insensitive manner' do
+ get api("/namespaces/#{namespace1.path.upcase}/exists", user)
+
+ expected_json = { exists: true, suggests: ["#{namespace1.path.upcase}1"] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+
+ it 'checks the existence within the parent namespace only' do
+ get api("/namespaces/#{namespace1sub.path}/exists", user), params: { parent_id: namespace1.id }
+
+ expected_json = { exists: true, suggests: ["#{namespace1sub.path}1"] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+
+ it 'ignores nested namespaces when checking for top-level namespace' do
+ get api("/namespaces/#{namespace1sub.path}/exists", user)
+
+ expected_json = { exists: false, suggests: [] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+
+ it 'ignores top-level namespaces when checking with parent_id' do
+ get api("/namespaces/#{namespace1.path}/exists", user), params: { parent_id: namespace1.id }
+
+ expected_json = { exists: false, suggests: [] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+
+ it 'ignores namespaces of other parent namespaces when checking with parent_id' do
+ get api("/namespaces/#{namespace2sub.path}/exists", user), params: { parent_id: namespace1.id }
+
+ expected_json = { exists: false, suggests: [] }.to_json
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.body).to eq(expected_json)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/notes_spec.rb b/spec/requests/api/notes_spec.rb
index baab72d106f..d4f8b841c96 100644
--- a/spec/requests/api/notes_spec.rb
+++ b/spec/requests/api/notes_spec.rb
@@ -251,7 +251,7 @@ RSpec.describe API::Notes do
expect { subject }.not_to change { Note.where(system: false).count }
end
- it 'does however create a system note about the change' do
+ it 'does however create a system note about the change', :sidekiq_inline do
expect { subject }.to change { Note.system.count }.by(1)
end
diff --git a/spec/requests/api/npm_project_packages_spec.rb b/spec/requests/api/npm_project_packages_spec.rb
index e64b5ddc374..10271719a15 100644
--- a/spec/requests/api/npm_project_packages_spec.rb
+++ b/spec/requests/api/npm_project_packages_spec.rb
@@ -41,6 +41,15 @@ RSpec.describe API::NpmProjectPackages do
project.add_developer(user)
end
+ shared_examples 'successfully downloads the file' do
+ it 'returns the file' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/octet-stream')
+ end
+ end
+
shared_examples 'a package file that requires auth' do
it 'denies download with no token' do
subject
@@ -51,35 +60,28 @@ RSpec.describe API::NpmProjectPackages do
context 'with access token' do
let(:headers) { build_token_auth_header(token.token) }
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully downloads the file'
end
context 'with job token' do
let(:headers) { build_token_auth_header(job.token) }
- it 'returns the file' do
- subject
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ it_behaves_like 'successfully downloads the file'
end
end
context 'a public project' do
- it 'returns the file with no token needed' do
- subject
+ it_behaves_like 'successfully downloads the file'
+ it_behaves_like 'a package tracking event', 'API::NpmPackages', 'pull_package'
- expect(response).to have_gitlab_http_status(:ok)
- expect(response.media_type).to eq('application/octet-stream')
- end
+ context 'with a job token for a different user' do
+ let_it_be(:other_user) { create(:user) }
+ let_it_be_with_reload(:other_job) { create(:ci_build, :running, user: other_user) }
- it_behaves_like 'a package tracking event', 'API::NpmPackages', 'pull_package'
+ let(:headers) { build_token_auth_header(other_job.token) }
+
+ it_behaves_like 'successfully downloads the file'
+ end
end
context 'private project' do
diff --git a/spec/requests/api/nuget_group_packages_spec.rb b/spec/requests/api/nuget_group_packages_spec.rb
index f7e81494660..aefbc89dc3b 100644
--- a/spec/requests/api/nuget_group_packages_spec.rb
+++ b/spec/requests/api/nuget_group_packages_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe API::NugetGroupPackages do
let(:take) { 26 }
let(:skip) { 0 }
let(:include_prereleases) { true }
- let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases } }
+ let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases }.compact }
subject { get api(url), headers: {}}
@@ -113,6 +113,45 @@ RSpec.describe API::NugetGroupPackages do
end
end
+ context 'with a reporter of subgroup' do
+ let_it_be(:package_name) { 'Dummy.Package' }
+ let_it_be(:package) { create(:nuget_package, :with_metadatum, name: package_name, project: project) }
+
+ let(:headers) { basic_auth_header(user.username, personal_access_token.token) }
+
+ subject { get api(url), headers: headers }
+
+ before do
+ subgroup.add_reporter(user)
+ project.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
+ subgroup.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
+ group.update_column(:visibility_level, Gitlab::VisibilityLevel.level_value('private'))
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/index' do
+ let(:url) { "/groups/#{group.id}/-/packages/nuget/metadata/#{package_name}/index.json" }
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/*package_version' do
+ let(:url) { "/groups/#{group.id}/-/packages/nuget/metadata/#{package_name}/#{package.version}.json" }
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+
+ describe 'GET /api/v4/groups/:id/-/packages/nuget/query' do
+ let(:search_term) { 'uMmy' }
+ let(:take) { 26 }
+ let(:skip) { 0 }
+ let(:include_prereleases) { false }
+ let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases }.compact }
+ let(:url) { "/groups/#{group.id}/-/packages/nuget/query?#{query_parameters.to_query}" }
+
+ it_behaves_like 'returning response status', :forbidden
+ end
+ end
+
def update_visibility_to(visibility)
project.update!(visibility_level: visibility)
subgroup.update!(visibility_level: visibility)
diff --git a/spec/requests/api/nuget_project_packages_spec.rb b/spec/requests/api/nuget_project_packages_spec.rb
index 0277aa73220..54fe0b985df 100644
--- a/spec/requests/api/nuget_project_packages_spec.rb
+++ b/spec/requests/api/nuget_project_packages_spec.rb
@@ -188,6 +188,10 @@ RSpec.describe API::NugetProjectPackages do
it_behaves_like 'deploy token for package uploads'
+ it_behaves_like 'job token for package uploads', authorize_endpoint: true do
+ let_it_be(:job) { create(:ci_build, :running, user: user) }
+ end
+
it_behaves_like 'rejects nuget access with unknown target id'
it_behaves_like 'rejects nuget access with invalid target id'
@@ -251,6 +255,10 @@ RSpec.describe API::NugetProjectPackages do
it_behaves_like 'deploy token for package uploads'
+ it_behaves_like 'job token for package uploads' do
+ let_it_be(:job) { create(:ci_build, :running, user: user) }
+ end
+
it_behaves_like 'rejects nuget access with unknown target id'
it_behaves_like 'rejects nuget access with invalid target id'
diff --git a/spec/requests/api/project_attributes.yml b/spec/requests/api/project_attributes.yml
index 6c9a845b217..f9eb9de94db 100644
--- a/spec/requests/api/project_attributes.yml
+++ b/spec/requests/api/project_attributes.yml
@@ -140,6 +140,7 @@ project_setting:
- squash_option
- updated_at
- cve_id_request_enabled
+ - mr_default_target_self
build_service_desk_setting: # service_desk_setting
unexposed_attributes:
diff --git a/spec/requests/api/project_import_spec.rb b/spec/requests/api/project_import_spec.rb
index a049d7d7515..f6cdf370e5c 100644
--- a/spec/requests/api/project_import_spec.rb
+++ b/spec/requests/api/project_import_spec.rb
@@ -235,12 +235,14 @@ RSpec.describe API::ProjectImport do
stub_uploads_object_storage(ImportExportUploader, direct_upload: true)
end
+ # rubocop:disable Rails/SaveBang
let(:tmp_object) do
fog_connection.directories.new(key: 'uploads').files.create(
key: "tmp/uploads/#{file_name}",
body: fixture_file_upload(file)
)
end
+ # rubocop:enable Rails/SaveBang
let(:file_upload) { fog_to_uploaded_file(tmp_object) }
@@ -285,7 +287,7 @@ RSpec.describe API::ProjectImport do
it 'returns the import status and the error if failed' do
project = create(:project, :import_failed)
project.add_maintainer(user)
- project.import_state.update(last_error: 'error')
+ project.import_state.update!(last_error: 'error')
get api("/projects/#{project.id}/import", user)
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index d2a33e32b30..b0ecb711283 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.shared_examples 'languages and percentages JSON response' do
- let(:expected_languages) { project.repository.languages.map { |language| language.values_at(:label, :value)}.to_h }
+ let(:expected_languages) { project.repository.languages.to_h { |language| language.values_at(:label, :value) } }
before do
allow(project.repository).to receive(:languages).and_return(
@@ -810,6 +810,54 @@ RSpec.describe API::Projects do
end
end
end
+
+ context 'with forked projects', :use_clean_rails_memory_store_caching do
+ include ProjectForksHelper
+
+ let_it_be(:admin) { create(:admin) }
+
+ it 'avoids N+1 queries' do
+ get api('/projects', admin)
+
+ base_project = create(:project, :public, namespace: admin.namespace)
+
+ fork_project1 = fork_project(base_project, admin, namespace: create(:user).namespace)
+ fork_project2 = fork_project(fork_project1, admin, namespace: create(:user).namespace)
+
+ control = ActiveRecord::QueryRecorder.new do
+ get api('/projects', admin)
+ end
+
+ fork_project(fork_project2, admin, namespace: create(:user).namespace)
+
+ expect do
+ get api('/projects', admin)
+ end.not_to exceed_query_limit(control.count)
+ end
+ end
+
+ context 'when service desk is enabled', :use_clean_rails_memory_store_caching do
+ let_it_be(:admin) { create(:admin) }
+
+ it 'avoids N+1 queries' do
+ allow(Gitlab::ServiceDeskEmail).to receive(:enabled?).and_return(true)
+ allow(Gitlab::IncomingEmail).to receive(:enabled?).and_return(true)
+
+ get api('/projects', admin)
+
+ create(:project, :public, :service_desk_enabled, namespace: admin.namespace)
+
+ control = ActiveRecord::QueryRecorder.new do
+ get api('/projects', admin)
+ end
+
+ create_list(:project, 2, :public, :service_desk_enabled, namespace: admin.namespace)
+
+ expect do
+ get api('/projects', admin)
+ end.not_to exceed_query_limit(control.count)
+ end
+ end
end
describe 'POST /projects' do
@@ -1461,21 +1509,139 @@ RSpec.describe API::Projects do
end
end
+ describe "POST /projects/:id/uploads/authorize" do
+ include WorkhorseHelpers
+
+ let(:headers) { workhorse_internal_api_request_header.merge({ 'HTTP_GITLAB_WORKHORSE' => 1 }) }
+
+ context 'with authorized user' do
+ it "returns 200" do
+ post api("/projects/#{project.id}/uploads/authorize", user), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['MaximumSize']).to eq(project.max_attachment_size)
+ end
+ end
+
+ context 'with unauthorized user' do
+ it "returns 404" do
+ post api("/projects/#{project.id}/uploads/authorize", user2), headers: headers
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'with exempted project' do
+ before do
+ stub_env('GITLAB_UPLOAD_API_ALLOWLIST', project.id)
+ end
+
+ it "returns 200" do
+ post api("/projects/#{project.id}/uploads/authorize", user), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['MaximumSize']).to eq(1.gigabyte)
+ end
+ end
+
+ context 'with upload size enforcement disabled' do
+ before do
+ stub_feature_flags(enforce_max_attachment_size_upload_api: false)
+ end
+
+ it "returns 200" do
+ post api("/projects/#{project.id}/uploads/authorize", user), headers: headers
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['MaximumSize']).to eq(1.gigabyte)
+ end
+ end
+
+ context 'with no Workhorse headers' do
+ it "returns 403" do
+ post api("/projects/#{project.id}/uploads/authorize", user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
describe "POST /projects/:id/uploads" do
+ let(:file) { fixture_file_upload("spec/fixtures/dk.png", "image/png") }
+
before do
project
end
it "uploads the file and returns its info" do
- post api("/projects/#{project.id}/uploads", user), params: { file: fixture_file_upload("spec/fixtures/dk.png", "image/png") }
+ expect_next_instance_of(UploadService) do |instance|
+ expect(instance).to receive(:override_max_attachment_size=).with(project.max_attachment_size).and_call_original
+ end
+
+ post api("/projects/#{project.id}/uploads", user), params: { file: file }
expect(response).to have_gitlab_http_status(:created)
expect(json_response['alt']).to eq("dk")
expect(json_response['url']).to start_with("/uploads/")
expect(json_response['url']).to end_with("/dk.png")
-
expect(json_response['full_path']).to start_with("/#{project.namespace.path}/#{project.path}/uploads")
end
+
+ it "does not leave the temporary file in place after uploading, even when the tempfile reaper does not run" do
+ stub_env('GITLAB_TEMPFILE_IMMEDIATE_UNLINK', '1')
+ tempfile = Tempfile.new('foo')
+ path = tempfile.path
+
+ allow_any_instance_of(Rack::TempfileReaper).to receive(:call) do |instance, env|
+ instance.instance_variable_get(:@app).call(env)
+ end
+
+ expect(path).not_to be(nil)
+ expect(Rack::Multipart::Parser::TEMPFILE_FACTORY).to receive(:call).and_return(tempfile)
+
+ post api("/projects/#{project.id}/uploads", user), params: { file: fixture_file_upload("spec/fixtures/dk.png", "image/png") }
+
+ expect(tempfile.path).to be(nil)
+ expect(File.exist?(path)).to be(false)
+ end
+
+ shared_examples 'capped upload attachments' do |upload_allowed|
+ it "limits the upload to 1 GB" do
+ expect_next_instance_of(UploadService) do |instance|
+ expect(instance).to receive(:override_max_attachment_size=).with(1.gigabyte).and_call_original
+ end
+
+ post api("/projects/#{project.id}/uploads", user), params: { file: file }
+
+ expect(response).to have_gitlab_http_status(:created)
+ end
+
+ it "logs a warning if file exceeds attachment size" do
+ allow(Gitlab::CurrentSettings).to receive(:max_attachment_size).and_return(0)
+
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(message: 'File exceeds maximum size', upload_allowed: upload_allowed))
+ .and_call_original
+
+ post api("/projects/#{project.id}/uploads", user), params: { file: file }
+ end
+ end
+
+ context 'with exempted project' do
+ before do
+ stub_env('GITLAB_UPLOAD_API_ALLOWLIST', project.id)
+ end
+
+ it_behaves_like 'capped upload attachments', true
+ end
+
+ context 'with upload size enforcement disabled' do
+ before do
+ stub_feature_flags(enforce_max_attachment_size_upload_api: false)
+ end
+
+ it_behaves_like 'capped upload attachments', false
+ end
end
describe "GET /projects/:id/groups" do
diff --git a/spec/requests/api/pypi_packages_spec.rb b/spec/requests/api/pypi_packages_spec.rb
index ae5b132f409..718004a0087 100644
--- a/spec/requests/api/pypi_packages_spec.rb
+++ b/spec/requests/api/pypi_packages_spec.rb
@@ -118,7 +118,7 @@ RSpec.describe API::PypiPackages do
it_behaves_like 'deploy token for package uploads'
- it_behaves_like 'job token for package uploads'
+ it_behaves_like 'job token for package uploads', authorize_endpoint: true
it_behaves_like 'rejects PyPI access with unknown project id'
end
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 31f0d7cec2a..a12b4dc9848 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -6,6 +6,7 @@ require 'mime/types'
RSpec.describe API::Repositories do
include RepoHelpers
include WorkhorseHelpers
+ include ProjectForksHelper
let(:user) { create(:user) }
let(:guest) { create(:user).tap { |u| create(:project_member, :guest, user: u, project: project) } }
@@ -392,6 +393,28 @@ RSpec.describe API::Repositories do
expect(json_response['diffs']).to be_present
end
+ it "compare commits between different projects with non-forked relation" do
+ public_project = create(:project, :repository, :public)
+
+ get api(route, current_user), params: { from: sample_commit.parent_id, to: sample_commit.id, from_project_id: public_project.id }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+
+ it "compare commits between different projects" do
+ group = create(:group)
+ group.add_owner(current_user)
+
+ forked_project = fork_project(project, current_user, repository: true, namespace: group)
+ forked_project.repository.create_ref('refs/heads/improve/awesome', 'refs/heads/improve/more-awesome')
+
+ get api(route, current_user), params: { from: 'improve/awesome', to: 'feature', from_project_id: forked_project.id }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['commits']).to be_present
+ expect(json_response['diffs']).to be_present
+ end
+
it "compares same refs" do
get api(route, current_user), params: { from: 'master', to: 'master' }
diff --git a/spec/requests/api/resource_access_tokens_spec.rb b/spec/requests/api/resource_access_tokens_spec.rb
index 79549bfc5e0..1a3c805fe9f 100644
--- a/spec/requests/api/resource_access_tokens_spec.rb
+++ b/spec/requests/api/resource_access_tokens_spec.rb
@@ -151,6 +151,23 @@ RSpec.describe API::ResourceAccessTokens do
expect(User.exists?(project_bot.id)).to be_falsy
end
+ context "when using project access token to DELETE other project access token" do
+ let_it_be(:other_project_bot) { create(:user, :project_bot) }
+ let_it_be(:other_token) { create(:personal_access_token, user: other_project_bot) }
+ let_it_be(:token_id) { other_token.id }
+
+ before do
+ project.add_maintainer(other_project_bot)
+ end
+
+ it "deletes the project access token from the project" do
+ delete_token
+
+ expect(response).to have_gitlab_http_status(:no_content)
+ expect(User.exists?(other_project_bot.id)).to be_falsy
+ end
+ end
+
context "when attempting to delete a non-existent project access token" do
let_it_be(:token_id) { non_existing_record_id }
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index 3b84c812010..48f5bd114a1 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Settings, 'Settings' do
+RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting do
let(:user) { create(:user) }
let_it_be(:admin) { create(:admin) }
@@ -44,6 +44,7 @@ RSpec.describe API::Settings, 'Settings' do
expect(json_response['wiki_page_max_content_bytes']).to be_a(Integer)
expect(json_response['require_admin_approval_after_user_signup']).to eq(true)
expect(json_response['personal_access_token_prefix']).to be_nil
+ expect(json_response['admin_mode']).to be(false)
end
end
@@ -124,7 +125,8 @@ RSpec.describe API::Settings, 'Settings' do
disabled_oauth_sign_in_sources: 'unknown',
import_sources: 'github,bitbucket',
wiki_page_max_content_bytes: 12345,
- personal_access_token_prefix: "GL-"
+ personal_access_token_prefix: "GL-",
+ admin_mode: true
}
expect(response).to have_gitlab_http_status(:ok)
@@ -169,6 +171,7 @@ RSpec.describe API::Settings, 'Settings' do
expect(json_response['import_sources']).to match_array(%w(github bitbucket))
expect(json_response['wiki_page_max_content_bytes']).to eq(12345)
expect(json_response['personal_access_token_prefix']).to eq("GL-")
+ expect(json_response['admin_mode']).to be(true)
end
end
diff --git a/spec/requests/api/tags_spec.rb b/spec/requests/api/tags_spec.rb
index b029c0f5793..3c698cf577e 100644
--- a/spec/requests/api/tags_spec.rb
+++ b/spec/requests/api/tags_spec.rb
@@ -17,131 +17,197 @@ RSpec.describe API::Tags do
end
describe 'GET /projects/:id/repository/tags' do
- let(:route) { "/projects/#{project_id}/repository/tags" }
+ shared_examples "get repository tags" do
+ let(:route) { "/projects/#{project_id}/repository/tags" }
- context 'sorting' do
- let(:current_user) { user }
+ context 'sorting' do
+ let(:current_user) { user }
- it 'sorts by descending order by default' do
- get api(route, current_user)
+ it 'sorts by descending order by default' do
+ get api(route, current_user)
- desc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
- desc_order_tags.reverse!.map! { |tag| tag.dereferenced_target.id }
+ desc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
+ desc_order_tags.reverse!.map! { |tag| tag.dereferenced_target.id }
- expect(json_response.map { |tag| tag['commit']['id'] }).to eq(desc_order_tags)
- end
+ expect(json_response.map { |tag| tag['commit']['id'] }).to eq(desc_order_tags)
+ end
- it 'sorts by ascending order if specified' do
- get api("#{route}?sort=asc", current_user)
+ it 'sorts by ascending order if specified' do
+ get api("#{route}?sort=asc", current_user)
- asc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
- asc_order_tags.map! { |tag| tag.dereferenced_target.id }
+ asc_order_tags = project.repository.tags.sort_by { |tag| tag.dereferenced_target.committed_date }
+ asc_order_tags.map! { |tag| tag.dereferenced_target.id }
- expect(json_response.map { |tag| tag['commit']['id'] }).to eq(asc_order_tags)
- end
+ expect(json_response.map { |tag| tag['commit']['id'] }).to eq(asc_order_tags)
+ end
- it 'sorts by name in descending order when requested' do
- get api("#{route}?order_by=name", current_user)
+ it 'sorts by name in descending order when requested' do
+ get api("#{route}?order_by=name", current_user)
- ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort.reverse
+ ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort.reverse
- expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
- end
+ expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
+ end
- it 'sorts by name in ascending order when requested' do
- get api("#{route}?order_by=name&sort=asc", current_user)
+ it 'sorts by name in ascending order when requested' do
+ get api("#{route}?order_by=name&sort=asc", current_user)
- ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort
+ ordered_by_name = project.repository.tags.map { |tag| tag.name }.sort
- expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
+ expect(json_response.map { |tag| tag['name'] }).to eq(ordered_by_name)
+ end
end
- end
- context 'searching' do
- it 'only returns searched tags' do
- get api("#{route}", user), params: { search: 'v1.1.0' }
+ context 'searching' do
+ it 'only returns searched tags' do
+ get api("#{route}", user), params: { search: 'v1.1.0' }
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to include_pagination_headers
- expect(json_response).to be_an Array
- expect(json_response.size).to eq(1)
- expect(json_response[0]['name']).to eq('v1.1.0')
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(1)
+ expect(json_response[0]['name']).to eq('v1.1.0')
+ end
end
- end
- shared_examples_for 'repository tags' do
- it 'returns the repository tags' do
- get api(route, current_user)
+ shared_examples_for 'repository tags' do
+ it 'returns the repository tags' do
+ get api(route, current_user)
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/tags')
- expect(response).to include_pagination_headers
- expect(json_response.map { |r| r['name'] }).to include(tag_name)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/tags')
+ expect(response).to include_pagination_headers
+ expect(json_response.map { |r| r['name'] }).to include(tag_name)
+ end
+
+ context 'when repository is disabled' do
+ include_context 'disabled repository'
+
+ it_behaves_like '403 response' do
+ let(:request) { get api(route, current_user) }
+ end
+ end
end
- context 'when repository is disabled' do
- include_context 'disabled repository'
+ context 'when unauthenticated', 'and project is public' do
+ let(:project) { create(:project, :public, :repository) }
- it_behaves_like '403 response' do
- let(:request) { get api(route, current_user) }
+ it_behaves_like 'repository tags'
+ end
+
+ context 'when unauthenticated', 'and project is private' do
+ it_behaves_like '404 response' do
+ let(:request) { get api(route) }
+ let(:message) { '404 Project Not Found' }
end
end
- end
- context 'when unauthenticated', 'and project is public' do
- let(:project) { create(:project, :public, :repository) }
+ context 'when authenticated', 'as a maintainer' do
+ let(:current_user) { user }
- it_behaves_like 'repository tags'
- end
+ it_behaves_like 'repository tags'
- context 'when unauthenticated', 'and project is private' do
- it_behaves_like '404 response' do
- let(:request) { get api(route) }
- let(:message) { '404 Project Not Found' }
+ context 'requesting with the escaped project full path' do
+ let(:project_id) { CGI.escape(project.full_path) }
+
+ it_behaves_like 'repository tags'
+ end
end
- end
- context 'when authenticated', 'as a maintainer' do
- let(:current_user) { user }
+ context 'when authenticated', 'as a guest' do
+ it_behaves_like '403 response' do
+ let(:request) { get api(route, guest) }
+ end
+ end
- it_behaves_like 'repository tags'
+ context 'with releases' do
+ let(:description) { 'Awesome release!' }
- context 'requesting with the escaped project full path' do
- let(:project_id) { CGI.escape(project.full_path) }
+ let!(:release) do
+ create(:release,
+ :legacy,
+ project: project,
+ tag: tag_name,
+ description: description)
+ end
- it_behaves_like 'repository tags'
+ it 'returns an array of project tags with release info' do
+ get api(route, user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to match_response_schema('public_api/v4/tags')
+ expect(response).to include_pagination_headers
+
+ expected_tag = json_response.find { |r| r['name'] == tag_name }
+ expect(expected_tag['message']).to eq(tag_message)
+ expect(expected_tag['release']['description']).to eq(description)
+ end
end
end
- context 'when authenticated', 'as a guest' do
- it_behaves_like '403 response' do
- let(:request) { get api(route, guest) }
+ context ":api_caching_tags flag enabled", :use_clean_rails_memory_store_caching do
+ before do
+ stub_feature_flags(api_caching_tags: true)
end
- end
- context 'with releases' do
- let(:description) { 'Awesome release!' }
+ it_behaves_like "get repository tags"
- let!(:release) do
- create(:release,
- :legacy,
- project: project,
- tag: tag_name,
- description: description)
- end
+ describe "cache expiry" do
+ let(:route) { "/projects/#{project_id}/repository/tags" }
+ let(:current_user) { user }
- it 'returns an array of project tags with release info' do
- get api(route, user)
+ before do
+ # Set the cache
+ get api(route, current_user)
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(response).to match_response_schema('public_api/v4/tags')
- expect(response).to include_pagination_headers
+ it "is cached" do
+ expect(API::Entities::Tag).not_to receive(:represent)
+
+ get api(route, current_user)
+ end
+
+ shared_examples "cache expired" do
+ it "isn't cached" do
+ expect(API::Entities::Tag).to receive(:represent).exactly(3).times
+
+ get api(route, current_user)
+ end
+ end
+
+ context "when protected tag is changed" do
+ before do
+ create(:protected_tag, name: tag_name, project: project)
+ end
+
+ it_behaves_like "cache expired"
+ end
- expected_tag = json_response.find { |r| r['name'] == tag_name }
- expect(expected_tag['message']).to eq(tag_message)
- expect(expected_tag['release']['description']).to eq(description)
+ context "when release is changed" do
+ before do
+ create(:release, :legacy, project: project, tag: tag_name)
+ end
+
+ it_behaves_like "cache expired"
+ end
+
+ context "when project is changed" do
+ before do
+ project.touch
+ end
+
+ it_behaves_like "cache expired"
+ end
end
end
+
+ context ":api_caching_tags flag disabled" do
+ before do
+ stub_feature_flags(api_caching_tags: false)
+ end
+
+ it_behaves_like "get repository tags"
+ end
end
describe 'GET /projects/:id/repository/tags/:tag_name' do
diff --git a/spec/requests/api/triggers_spec.rb b/spec/requests/api/triggers_spec.rb
index 55d17fabc9a..4318f106996 100644
--- a/spec/requests/api/triggers_spec.rb
+++ b/spec/requests/api/triggers_spec.rb
@@ -49,8 +49,6 @@ RSpec.describe API::Triggers do
expect(response).to have_gitlab_http_status(:created)
expect(json_response).to include('id' => pipeline.id)
- pipeline.builds.reload
- expect(pipeline.builds.pending.size).to eq(2)
expect(pipeline.builds.size).to eq(5)
end
@@ -126,6 +124,39 @@ RSpec.describe API::Triggers do
end
end
+ describe 'adding arguments to the application context' do
+ subject { subject_proc.call }
+
+ let(:expected_params) { { client_id: "user/#{user.id}", project: project.full_path } }
+ let(:subject_proc) { proc { post api("/projects/#{project.id}/ref/master/trigger/pipeline?token=#{trigger_token}"), params: { ref: 'refs/heads/other-branch' } } }
+
+ context 'when triggering a pipeline from a trigger token' do
+ it_behaves_like 'storing arguments in the application context'
+ it_behaves_like 'not executing any extra queries for the application context'
+ end
+
+ context 'when triggered from another running job' do
+ let!(:trigger) { }
+ let!(:trigger_request) { }
+
+ context 'when other job is triggered by a user' do
+ let(:trigger_token) { create(:ci_build, :running, project: project, user: user).token }
+
+ it_behaves_like 'storing arguments in the application context'
+ it_behaves_like 'not executing any extra queries for the application context'
+ end
+
+ context 'when other job is triggered by a runner' do
+ let(:trigger_token) { create(:ci_build, :running, project: project, runner: runner).token }
+ let(:runner) { create(:ci_runner) }
+ let(:expected_params) { { client_id: "runner/#{runner.id}", project: project.full_path } }
+
+ it_behaves_like 'storing arguments in the application context'
+ it_behaves_like 'not executing any extra queries for the application context', 1
+ end
+ end
+ end
+
context 'when is triggered by a pipeline hook' do
it 'does not create a new pipeline' do
expect do
diff --git a/spec/requests/api/usage_data_non_sql_metrics_spec.rb b/spec/requests/api/usage_data_non_sql_metrics_spec.rb
new file mode 100644
index 00000000000..225af57a267
--- /dev/null
+++ b/spec/requests/api/usage_data_non_sql_metrics_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::UsageDataNonSqlMetrics do
+ include UsageDataHelpers
+
+ let_it_be(:admin) { create(:user, admin: true) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ stub_usage_data_connections
+ end
+
+ describe 'GET /usage_data/non_sql_metrics' do
+ let(:endpoint) { '/usage_data/non_sql_metrics' }
+
+ context 'with authentication' do
+ before do
+ stub_feature_flags(usage_data_non_sql_metrics: true)
+ end
+
+ it 'returns non sql metrics if user is admin' do
+ get api(endpoint, admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['counts']).to be_a(Hash)
+ end
+
+ it 'returns forbidden if user is not admin' do
+ get api(endpoint, user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'without authentication' do
+ before do
+ stub_feature_flags(usage_data_non_sql_metrics: true)
+ end
+
+ it 'returns unauthorized' do
+ get api(endpoint)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when feature_flag is disabled' do
+ before do
+ stub_feature_flags(usage_data_non_sql_metrics: false)
+ end
+
+ it 'returns not_found for admin' do
+ get api(endpoint, admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns forbidden for non-admin' do
+ get api(endpoint, user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/usage_data_queries_spec.rb b/spec/requests/api/usage_data_queries_spec.rb
new file mode 100644
index 00000000000..0ba4a37bc9b
--- /dev/null
+++ b/spec/requests/api/usage_data_queries_spec.rb
@@ -0,0 +1,67 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::UsageDataQueries do
+ include UsageDataHelpers
+
+ let_it_be(:admin) { create(:user, admin: true) }
+ let_it_be(:user) { create(:user) }
+
+ before do
+ stub_usage_data_connections
+ end
+
+ describe 'GET /usage_data/usage_data_queries' do
+ let(:endpoint) { '/usage_data/queries' }
+
+ context 'with authentication' do
+ before do
+ stub_feature_flags(usage_data_queries_api: true)
+ end
+
+ it 'returns queries if user is admin' do
+ get api(endpoint, admin)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['active_user_count']).to start_with('SELECT COUNT("users"."id") FROM "users"')
+ end
+
+ it 'returns forbidden if user is not admin' do
+ get api(endpoint, user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'without authentication' do
+ before do
+ stub_feature_flags(usage_data_queries_api: true)
+ end
+
+ it 'returns unauthorized' do
+ get api(endpoint)
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when feature_flag is disabled' do
+ before do
+ stub_feature_flags(usage_data_queries_api: false)
+ end
+
+ it 'returns not_found for admin' do
+ get api(endpoint, admin)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'returns forbidden for non-admin' do
+ get api(endpoint, user)
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/usage_data_spec.rb b/spec/requests/api/usage_data_spec.rb
index d44f179eed8..bacaf960e6a 100644
--- a/spec/requests/api/usage_data_spec.rb
+++ b/spec/requests/api/usage_data_spec.rb
@@ -161,4 +161,23 @@ RSpec.describe API::UsageData do
end
end
end
+
+ describe 'GET /usage_data/metric_definitions' do
+ let(:endpoint) { '/usage_data/metric_definitions' }
+ let(:metric_yaml) do
+ { 'key_path' => 'counter.category.event', 'description' => 'Metric description' }.to_yaml
+ end
+
+ context 'without authentication' do
+ it 'returns a YAML file', :aggregate_failures do
+ allow(Gitlab::Usage::MetricDefinition).to receive(:dump_metrics_yaml).and_return(metric_yaml)
+
+ get api(endpoint)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response.media_type).to eq('application/yaml')
+ expect(response.body).to eq(metric_yaml)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/users_preferences_spec.rb b/spec/requests/api/users_preferences_spec.rb
new file mode 100644
index 00000000000..db03786ed2a
--- /dev/null
+++ b/spec/requests/api/users_preferences_spec.rb
@@ -0,0 +1,65 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Users do
+ let_it_be(:user) { create(:user) }
+
+ describe 'PUT /user/preferences/' do
+ context "with correct attributes and a logged in user" do
+ it 'returns a success status and the value has been changed' do
+ put api("/user/preferences", user), params: { view_diffs_file_by_file: true }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['view_diffs_file_by_file']).to eq(true)
+ expect(user.reload.view_diffs_file_by_file).to be_truthy
+ end
+ end
+
+ context "missing a preference" do
+ it 'returns a bad request status' do
+ put api("/user/preferences", user), params: {}
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context "without a logged in user" do
+ it 'returns an unauthorized status' do
+ put api("/user/preferences"), params: { view_diffs_file_by_file: true }
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context "with an unsupported preference" do
+ it 'returns a bad parameter' do
+ put api("/user/preferences", user), params: { jawn: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context "with an unsupported value" do
+ it 'returns a bad parameter' do
+ put api("/user/preferences", user), params: { view_diffs_file_by_file: 3 }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+
+ context "with an update service failure" do
+ it 'returns a bad request' do
+ bad_service = double("Failed Service", success?: false)
+
+ allow_next_instance_of(::UserPreferences::UpdateService) do |instance|
+ allow(instance).to receive(:execute).and_return(bad_service)
+ end
+
+ put api("/user/preferences", user), params: { view_diffs_file_by_file: true }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index 2a7689eaddf..01a24be9f20 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -928,7 +928,8 @@ RSpec.describe API::Users do
end
it "creates user with random password" do
- params = attributes_for(:user, force_random_password: true, reset_password: true)
+ params = attributes_for(:user, force_random_password: true)
+ params.delete(:password)
post api('/users', admin), params: params
expect(response).to have_gitlab_http_status(:created)
@@ -936,8 +937,7 @@ RSpec.describe API::Users do
user_id = json_response['id']
new_user = User.find(user_id)
- expect(new_user.valid_password?(params[:password])).to eq(false)
- expect(new_user.recently_sent_password_reset?).to eq(true)
+ expect(new_user.encrypted_password).to be_present
end
it "creates user with private profile" do
@@ -1795,8 +1795,7 @@ RSpec.describe API::Users do
post api("/users/#{user.id}/emails", admin), params: email_attrs
end.to change { user.emails.count }.by(1)
- email = Email.find_by(user_id: user.id, email: email_attrs[:email])
- expect(email).not_to be_confirmed
+ expect(json_response['confirmed_at']).to be_nil
end
it "returns a 400 for invalid ID" do
@@ -1813,8 +1812,7 @@ RSpec.describe API::Users do
expect(response).to have_gitlab_http_status(:created)
- email = Email.find_by(user_id: user.id, email: email_attrs[:email])
- expect(email).to be_confirmed
+ expect(json_response['confirmed_at']).not_to be_nil
end
end
diff --git a/spec/requests/api/v3/github_spec.rb b/spec/requests/api/v3/github_spec.rb
index 197c6cbb0eb..4100b246218 100644
--- a/spec/requests/api/v3/github_spec.rb
+++ b/spec/requests/api/v3/github_spec.rb
@@ -3,10 +3,10 @@
require 'spec_helper'
RSpec.describe API::V3::Github do
- let(:user) { create(:user) }
- let(:unauthorized_user) { create(:user) }
- let(:admin) { create(:user, :admin) }
- let(:project) { create(:project, :repository, creator: user) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:unauthorized_user) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:project) { create(:project, :repository, creator: user) }
before do
project.add_maintainer(user)
@@ -210,14 +210,14 @@ RSpec.describe API::V3::Github do
end
describe 'repo pulls' do
- let(:project2) { create(:project, :repository, creator: user) }
- let(:assignee) { create(:user) }
- let(:assignee2) { create(:user) }
- let!(:merge_request) do
+ let_it_be(:project2) { create(:project, :repository, creator: user) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:assignee2) { create(:user) }
+ let_it_be(:merge_request) do
create(:merge_request, source_project: project, target_project: project, author: user, assignees: [assignee])
end
- let!(:merge_request_2) do
+ let_it_be(:merge_request_2) do
create(:merge_request, source_project: project2, target_project: project2, author: user, assignees: [assignee, assignee2])
end
@@ -225,26 +225,57 @@ RSpec.describe API::V3::Github do
project2.add_maintainer(user)
end
+ def perform_request
+ jira_get v3_api(route, user)
+ end
+
describe 'GET /-/jira/pulls' do
+ let(:route) { '/repos/-/jira/pulls' }
+
it 'returns an array of merge requests with github format' do
- jira_get v3_api('/repos/-/jira/pulls', user)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an(Array)
expect(json_response.size).to eq(2)
expect(response).to match_response_schema('entities/github/pull_requests')
end
+
+ it 'returns multiple merge requests without N + 1' do
+ perform_request
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+
+ project3 = create(:project, :repository, creator: user)
+ project3.add_maintainer(user)
+ assignee3 = create(:user)
+ create(:merge_request, source_project: project3, target_project: project3, author: user, assignees: [assignee3])
+
+ expect { perform_request }.not_to exceed_query_limit(control_count)
+ end
end
describe 'GET /repos/:namespace/:project/pulls' do
+ let(:route) { "/repos/#{project.namespace.path}/#{project.path}/pulls" }
+
it 'returns an array of merge requests for the proper project in github format' do
- jira_get v3_api("/repos/#{project.namespace.path}/#{project.path}/pulls", user)
+ perform_request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response).to be_an(Array)
expect(json_response.size).to eq(1)
expect(response).to match_response_schema('entities/github/pull_requests')
end
+
+ it 'returns multiple merge requests without N + 1' do
+ perform_request
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+
+ create(:merge_request, source_project: project, source_branch: 'fix')
+
+ expect { perform_request }.not_to exceed_query_limit(control_count)
+ end
end
describe 'GET /repos/:namespace/:project/pulls/:id' do
diff --git a/spec/requests/customers_dot/proxy_controller_spec.rb b/spec/requests/customers_dot/proxy_controller_spec.rb
new file mode 100644
index 00000000000..4938c67e0c3
--- /dev/null
+++ b/spec/requests/customers_dot/proxy_controller_spec.rb
@@ -0,0 +1,37 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe CustomersDot::ProxyController, type: :request do
+ describe 'POST graphql' do
+ let_it_be(:customers_dot) { "#{Gitlab::SubscriptionPortal::SUBSCRIPTIONS_URL}/graphql" }
+
+ it 'forwards request body to customers dot' do
+ request_params = '{ "foo" => "bar" }'
+
+ stub_request(:post, customers_dot)
+
+ post customers_dot_proxy_graphql_path, params: request_params
+
+ expect(WebMock).to have_requested(:post, customers_dot).with(body: request_params)
+ end
+
+ it 'responds with customers dot status' do
+ stub_request(:post, customers_dot).to_return(status: 500)
+
+ post customers_dot_proxy_graphql_path
+
+ expect(response).to have_gitlab_http_status(:internal_server_error)
+ end
+
+ it 'responds with customers dot response body' do
+ customers_dot_response = 'foo'
+
+ stub_request(:post, customers_dot).to_return(body: customers_dot_response)
+
+ post customers_dot_proxy_graphql_path
+
+ expect(response.body).to eq(customers_dot_response)
+ end
+ end
+end
diff --git a/spec/requests/groups/clusters/integrations_controller_spec.rb b/spec/requests/groups/clusters/integrations_controller_spec.rb
new file mode 100644
index 00000000000..29e37e2e48c
--- /dev/null
+++ b/spec/requests/groups/clusters/integrations_controller_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::Clusters::IntegrationsController do
+ include AccessMatchersForController
+
+ shared_examples 'a secure endpoint' do
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { subject }.to be_allowed_for(:admin)
+ end
+
+ it 'is denied for admin when admin mode disabled' do
+ expect { subject }.to be_denied_for(:admin)
+ end
+
+ context 'it is allowed for group maintainers' do
+ it { expect { subject }.to be_allowed_for(:owner).of(group) }
+ it { expect { subject }.to be_allowed_for(:maintainer).of(group) }
+ it { expect { subject }.to be_denied_for(:developer).of(group) }
+ it { expect { subject }.to be_denied_for(:reporter).of(group) }
+ it { expect { subject }.to be_denied_for(:guest).of(group) }
+ it { expect { subject }.to be_denied_for(:user) }
+ it { expect { subject }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'POST create_or_update' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:member) { create(:group_member, user: user, group: group) }
+
+ let(:cluster) { create(:cluster, :group, :provided_by_gcp, groups: [group]) }
+
+ it_behaves_like '#create_or_update action' do
+ let(:path) { create_or_update_group_cluster_integration_path(group, cluster) }
+ let(:redirect_path) { group_cluster_path(group, cluster, params: { tab: 'integrations' }) }
+ end
+ end
+end
diff --git a/spec/requests/groups/email_campaigns_controller_spec.rb b/spec/requests/groups/email_campaigns_controller_spec.rb
index 930e645f6c0..a77f600ea1e 100644
--- a/spec/requests/groups/email_campaigns_controller_spec.rb
+++ b/spec/requests/groups/email_campaigns_controller_spec.rb
@@ -13,19 +13,19 @@ RSpec.describe Groups::EmailCampaignsController do
let(:track) { 'create' }
let(:series) { '0' }
let(:schema) { described_class::EMAIL_CAMPAIGNS_SCHEMA_URL }
+ let(:subject_line_text) { subject_line(track.to_sym, series.to_i) }
let(:data) do
{
namespace_id: group.id,
track: track.to_sym,
series: series.to_i,
- subject_line: subject_line(track.to_sym, series.to_i)
+ subject_line: subject_line_text
}
end
before do
sign_in(user)
group.add_developer(user)
- allow(Gitlab::Tracking).to receive(:self_describing_event)
end
subject do
@@ -34,16 +34,59 @@ RSpec.describe Groups::EmailCampaignsController do
end
shared_examples 'track and redirect' do
- it do
- is_expected.to track_self_describing_event(schema, data)
- is_expected.to have_gitlab_http_status(:redirect)
+ it 'redirects' do
+ expect(subject).to have_gitlab_http_status(:redirect)
+ end
+
+ context 'on .com' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'emits a snowplow event', :snowplow do
+ subject
+
+ expect_snowplow_event(
+ category: described_class.name,
+ action: 'click',
+ context: [{
+ schema: described_class::EMAIL_CAMPAIGNS_SCHEMA_URL,
+ data: { namespace_id: group.id, series: series.to_i, subject_line: subject_line_text, track: track.to_s }
+ }]
+ )
+ end
+
+ it 'does not save the cta_click' do
+ expect(Users::InProductMarketingEmail).not_to receive(:save_cta_click)
+
+ subject
+ end
+ end
+
+ context 'when not on.com' do
+ it 'saves the cta_click' do
+ expect(Users::InProductMarketingEmail).to receive(:save_cta_click)
+
+ subject
+ end
+
+ it 'does not track snowplow events' do
+ subject
+
+ expect_no_snowplow_event
+ end
end
end
shared_examples 'no track and 404' do
- it do
- is_expected.not_to track_self_describing_event
- is_expected.to have_gitlab_http_status(:not_found)
+ it 'returns 404' do
+ expect(subject).to have_gitlab_http_status(:not_found)
+ end
+
+ it 'does not emit a snowplow event', :snowplow do
+ subject
+
+ expect_no_snowplow_event
end
end
diff --git a/spec/requests/groups/milestones_controller_spec.rb b/spec/requests/groups/milestones_controller_spec.rb
index 4afdde8be04..43f0fc714b3 100644
--- a/spec/requests/groups/milestones_controller_spec.rb
+++ b/spec/requests/groups/milestones_controller_spec.rb
@@ -4,8 +4,8 @@ require 'spec_helper'
RSpec.describe Groups::MilestonesController do
context 'N+1 DB queries' do
- let(:user) { create(:user) }
- let!(:public_group) { create(:group, :public) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:public_group) { create(:group, :public) }
let!(:public_project_with_private_issues_and_mrs) do
create(:project, :public, :issues_private, :merge_requests_private, group: public_group)
@@ -53,5 +53,25 @@ RSpec.describe Groups::MilestonesController do
expect { get show_path }.not_to exceed_all_query_limit(control)
end
end
+
+ describe 'GET #merge_requests' do
+ let(:milestone) { create(:milestone, group: public_group) }
+ let(:project) { create(:project, :public, :merge_requests_enabled, :issues_enabled, group: public_group) }
+ let!(:merge_request) { create(:merge_request, milestone: milestone, source_project: project) }
+
+ def perform_request
+ get merge_requests_group_milestone_path(public_group, milestone, format: :json)
+ end
+
+ it 'avoids N+1 database queries' do
+ perform_request # warm up the cache
+
+ control_count = ActiveRecord::QueryRecorder.new { perform_request }.count
+
+ create(:merge_request, milestone: milestone, source_project: project, source_branch: 'fix')
+
+ expect { perform_request }.not_to exceed_query_limit(control_count)
+ end
+ end
end
end
diff --git a/spec/requests/ide_controller_spec.rb b/spec/requests/ide_controller_spec.rb
index 4f127e07b6b..9b0d8dcd828 100644
--- a/spec/requests/ide_controller_spec.rb
+++ b/spec/requests/ide_controller_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe IdeController do
let_it_be(:other_user) { create(:user) }
let(:user) { creator }
+ let(:branch) { '' }
before do
sign_in(user)
@@ -28,24 +29,33 @@ RSpec.describe IdeController do
let(:user) { other_user }
context 'when user does not have fork' do
- it 'does not instantiate forked_project instance var and return 200' do
+ it 'instantiates fork_info instance var with fork_path and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to eq({ fork_path: controller.helpers.ide_fork_and_edit_path(project, branch, '', with_notice: false) })
+ end
+
+ it 'has nil fork_info if user cannot fork' do
+ project.project_feature.update!(forking_access_level: ProjectFeature::DISABLED)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(assigns(:fork_info)).to be_nil
end
end
- context 'when user has have fork' do
- let!(:fork) { fork_project(project, user, repository: true) }
+ context 'when user has fork' do
+ let!(:fork) { fork_project(project, user, repository: true, namespace: user.namespace) }
- it 'instantiates forked_project instance var and return 200' do
+ it 'instantiates fork_info instance var with ide_path and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
- expect(assigns(:forked_project)).to eq fork
+ expect(assigns(:fork_info)).to eq({ ide_path: controller.helpers.ide_edit_path(fork, branch, '') })
end
end
end
@@ -61,7 +71,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
end
@@ -76,7 +86,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
end
@@ -91,7 +101,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
@@ -108,55 +118,58 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
context "/-/ide/project/:project/#{action}/:branch" do
- let(:route) { "/-/ide/project/#{project.full_path}/#{action}/master" }
+ let(:branch) { 'master' }
+ let(:route) { "/-/ide/project/#{project.full_path}/#{action}/#{branch}" }
it 'instantiates project and branch instance vars and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
- expect(assigns(:branch)).to eq 'master'
+ expect(assigns(:branch)).to eq branch
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
context "/-/ide/project/:project/#{action}/:branch/-" do
- let(:route) { "/-/ide/project/#{project.full_path}/#{action}/branch/slash/-" }
+ let(:branch) { 'branch/slash' }
+ let(:route) { "/-/ide/project/#{project.full_path}/#{action}/#{branch}/-" }
it 'instantiates project and branch instance vars and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
- expect(assigns(:branch)).to eq 'branch/slash'
+ expect(assigns(:branch)).to eq branch
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to be_nil
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
context "/-/ide/project/:project/#{action}/:branch/-/:path" do
- let(:route) { "/-/ide/project/#{project.full_path}/#{action}/master/-/foo/.bar" }
+ let(:branch) { 'master' }
+ let(:route) { "/-/ide/project/#{project.full_path}/#{action}/#{branch}/-/foo/.bar" }
it 'instantiates project, branch, and path instance vars and return 200' do
subject
expect(response).to have_gitlab_http_status(:ok)
expect(assigns(:project)).to eq project
- expect(assigns(:branch)).to eq 'master'
+ expect(assigns(:branch)).to eq branch
expect(assigns(:path)).to eq 'foo/.bar'
expect(assigns(:merge_request)).to be_nil
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
@@ -179,7 +192,7 @@ RSpec.describe IdeController do
expect(assigns(:branch)).to be_nil
expect(assigns(:path)).to be_nil
expect(assigns(:merge_request)).to eq merge_request.id.to_s
- expect(assigns(:forked_project)).to be_nil
+ expect(assigns(:fork_info)).to be_nil
end
it_behaves_like 'user cannot push code'
diff --git a/spec/requests/jwt_controller_spec.rb b/spec/requests/jwt_controller_spec.rb
index e154e691d5f..8be26784a3d 100644
--- a/spec/requests/jwt_controller_spec.rb
+++ b/spec/requests/jwt_controller_spec.rb
@@ -180,10 +180,11 @@ RSpec.describe JwtController do
end
context 'when internal auth is disabled' do
+ before do
+ stub_application_setting(password_authentication_enabled_for_git: false)
+ end
+
it 'rejects the authorization attempt with personal access token message' do
- allow_next_instance_of(ApplicationSetting) do |instance|
- allow(instance).to receive(:password_authentication_enabled_for_git?) { false }
- end
get '/jwt/auth', params: parameters, headers: headers
expect(response).to have_gitlab_http_status(:unauthorized)
diff --git a/spec/requests/projects/clusters/integrations_controller_spec.rb b/spec/requests/projects/clusters/integrations_controller_spec.rb
new file mode 100644
index 00000000000..323c61b9af3
--- /dev/null
+++ b/spec/requests/projects/clusters/integrations_controller_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::Clusters::IntegrationsController do
+ include AccessMatchersForController
+
+ shared_examples 'a secure endpoint' do
+ it 'is allowed for admin when admin mode enabled', :enable_admin_mode do
+ expect { subject }.to be_allowed_for(:admin)
+ end
+
+ it 'is denied for admin when admin mode disabled' do
+ expect { subject }.to be_denied_for(:admin)
+ end
+
+ context 'it is allowed for project maintainers' do
+ it { expect { subject }.to be_allowed_for(:owner).of(project) }
+ it { expect { subject }.to be_allowed_for(:maintainer).of(project) }
+ it { expect { subject }.to be_denied_for(:developer).of(project) }
+ it { expect { subject }.to be_denied_for(:reporter).of(project) }
+ it { expect { subject }.to be_denied_for(:guest).of(project) }
+ it { expect { subject }.to be_denied_for(:user) }
+ it { expect { subject }.to be_denied_for(:external) }
+ end
+ end
+
+ describe 'POST create_or_update' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:project) { cluster.project }
+ let(:user) { project.owner }
+
+ it_behaves_like '#create_or_update action' do
+ let(:path) { create_or_update_project_cluster_integration_path(project, cluster) }
+ let(:redirect_path) { project_cluster_path(project, cluster, params: { tab: 'integrations' }) }
+ end
+ end
+end
diff --git a/spec/requests/projects/cycle_analytics_events_spec.rb b/spec/requests/projects/cycle_analytics_events_spec.rb
index 565576f3091..7921fdcb0de 100644
--- a/spec/requests/projects/cycle_analytics_events_spec.rb
+++ b/spec/requests/projects/cycle_analytics_events_spec.rb
@@ -75,7 +75,7 @@ RSpec.describe 'value stream analytics events' do
context 'with private project and builds' do
before do
- project.members.last.update(access_level: Gitlab::Access::GUEST)
+ project.members.last.update!(access_level: Gitlab::Access::GUEST)
end
it 'does not list the test events' do
@@ -100,7 +100,7 @@ RSpec.describe 'value stream analytics events' do
def create_cycle
milestone = create(:milestone, project: project)
- issue.update(milestone: milestone)
+ issue.update!(milestone: milestone)
mr = create_merge_request_closing_issue(user, project, issue, commit_message: "References #{issue.to_reference}")
pipeline = create(:ci_empty_pipeline, status: 'created', project: project, ref: mr.source_branch, sha: mr.source_branch_sha, head_pipeline_of: mr)
diff --git a/spec/requests/projects/issue_links_controller_spec.rb b/spec/requests/projects/issue_links_controller_spec.rb
index a21c676f000..d22955718f8 100644
--- a/spec/requests/projects/issue_links_controller_spec.rb
+++ b/spec/requests/projects/issue_links_controller_spec.rb
@@ -71,7 +71,7 @@ RSpec.describe Projects::IssueLinksController do
list_service_response = IssueLinks::ListService.new(issue, user).execute
expect(response).to have_gitlab_http_status(:not_found)
- expect(json_response).to eq('message' => 'No Issue found for given params', 'issuables' => list_service_response.as_json)
+ expect(json_response).to eq('message' => 'No matching issue found. Make sure that you are adding a valid issue URL.', 'issuables' => list_service_response.as_json)
end
end
end
diff --git a/spec/requests/projects/merge_requests_discussions_spec.rb b/spec/requests/projects/merge_requests_discussions_spec.rb
index 6ec586ed22c..eb8cf9f797d 100644
--- a/spec/requests/projects/merge_requests_discussions_spec.rb
+++ b/spec/requests/projects/merge_requests_discussions_spec.rb
@@ -26,6 +26,10 @@ RSpec.describe 'merge requests discussions' do
# https://docs.gitlab.com/ee/development/query_recorder.html#use-request-specs-instead-of-controller-specs
it 'avoids N+1 DB queries', :request_store do
+ send_request # warm up
+
+ create(:diff_note_on_merge_request, noteable: merge_request,
+ project: merge_request.project)
control = ActiveRecord::QueryRecorder.new { send_request }
create(:diff_note_on_merge_request, noteable: merge_request,
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 1bb260b5ea1..972caec6eb3 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -143,6 +143,31 @@ RSpec.describe 'Rack Attack global throttles', :use_clean_rails_memory_store_cac
end
end
+ context 'when the request is to a container registry notification endpoint' do
+ let(:secret_token) { 'secret_token' }
+ let(:events) { [{ action: 'push' }] }
+ let(:registry_endpoint) { '/api/v4/container_registry_event/events' }
+ let(:registry_headers) { { 'Content-Type' => ::API::ContainerRegistryEvent::DOCKER_DISTRIBUTION_EVENTS_V1_JSON } }
+
+ before do
+ allow(Gitlab.config.registry).to receive(:notification_secret) { secret_token }
+
+ event = spy(:event)
+ allow(::ContainerRegistry::Event).to receive(:new).and_return(event)
+ allow(event).to receive(:supported?).and_return(true)
+ end
+
+ it 'does not throttle the requests' do
+ (1 + requests_per_period).times do
+ post registry_endpoint,
+ params: { events: events }.to_json,
+ headers: registry_headers.merge('Authorization' => secret_token)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+ end
+
it 'logs RackAttack info into structured logs' do
requests_per_period.times do
get url_that_does_not_require_authentication
diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index f3ddcbef1c2..f092cbf26a4 100644
--- a/spec/requests/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -663,7 +663,7 @@ RSpec.describe UsersController do
end
context 'when a user changed their username' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'old-username') }
it 'returns JSON indicating a user by that username does not exist' do
get user_exists_url 'old-username'
@@ -705,7 +705,7 @@ RSpec.describe UsersController do
end
context 'when a user changed their username' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-username') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'old-username') }
it 'returns JSON indicating a user by that username does not exist' do
get user_suggests_url 'old-username'
@@ -755,19 +755,19 @@ RSpec.describe UsersController do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'old-path') }
it_behaves_like 'redirects to the canonical path'
context 'when the old path is a substring of the scheme or host' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'http') }
# it does not modify the requested host and ...
it_behaves_like 'redirects to the canonical path'
end
context 'when the old path is substring of users' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'ser') }
it_behaves_like 'redirects to the canonical path'
end
@@ -806,19 +806,19 @@ RSpec.describe UsersController do
end
context 'when requesting a redirected path' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'old-path') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'old-path') }
it_behaves_like 'redirects to the canonical path'
context 'when the old path is a substring of the scheme or host' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'http') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'http') }
# it does not modify the requested host and ...
it_behaves_like 'redirects to the canonical path'
end
context 'when the old path is substring of users' do
- let(:redirect_route) { user.namespace.redirect_routes.create(path: 'ser') }
+ let(:redirect_route) { user.namespace.redirect_routes.create!(path: 'ser') }
# it does not modify the /users part of the path
# (i.e. /users/ser should not become /ufoos/ser) and ...
diff --git a/spec/requests/whats_new_controller_spec.rb b/spec/requests/whats_new_controller_spec.rb
index ba7b5d4c000..ffb31bdf9bb 100644
--- a/spec/requests/whats_new_controller_spec.rb
+++ b/spec/requests/whats_new_controller_spec.rb
@@ -35,16 +35,5 @@ RSpec.describe WhatsNewController, :clean_gitlab_redis_cache do
expect(response).to have_gitlab_http_status(:not_found)
end
end
-
- context 'with version param' do
- it 'returns items without pagination headers' do
- allow(ReleaseHighlight).to receive(:for_version).with(version: '42').and_return(highlights)
-
- get whats_new_path(version: 42), xhr: true
-
- expect(response.body).to eq(highlights.items.to_json)
- expect(response.headers['X-Next-Page']).to be_nil
- end
- end
end
end
diff --git a/spec/routing/project_routing_spec.rb b/spec/routing/project_routing_spec.rb
index f7ed8d7d5dc..056f4d30ea5 100644
--- a/spec/routing/project_routing_spec.rb
+++ b/spec/routing/project_routing_spec.rb
@@ -126,26 +126,6 @@ RSpec.describe 'project routing' do
it 'to #archive with "/" in route' do
expect(get('/gitlab/gitlabhq/-/archive/improve/awesome/gitlabhq-improve-awesome.tar.gz')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.gz', id: 'improve/awesome/gitlabhq-improve-awesome')
end
-
- it 'to #archive_alternative' do
- expect(get('/gitlab/gitlabhq/-/repository/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', append_sha: true)
- end
-
- it 'to #archive_deprecated' do
- expect(get('/gitlab/gitlabhq/-/repository/master/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'master', append_sha: true)
- end
-
- it 'to #archive_deprecated format:zip' do
- expect(get('/gitlab/gitlabhq/-/repository/master/archive.zip')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'zip', id: 'master', append_sha: true)
- end
-
- it 'to #archive_deprecated format:tar.bz2' do
- expect(get('/gitlab/gitlabhq/-/repository/master/archive.tar.bz2')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', format: 'tar.bz2', id: 'master', append_sha: true)
- end
-
- it 'to #archive_deprecated with "/" in route' do
- expect(get('/gitlab/gitlabhq/-/repository/improve/awesome/archive')).to route_to('projects/repositories#archive', namespace_id: 'gitlab', project_id: 'gitlabhq', id: 'improve/awesome', append_sha: true)
- end
end
describe Projects::BranchesController, 'routing' do
@@ -335,33 +315,37 @@ RSpec.describe 'project routing' do
end
end
- # test_project_hook POST /:project_id/hooks/:id/test(.:format) hooks#test
- # project_hooks GET /:project_id/hooks(.:format) hooks#index
- # POST /:project_id/hooks(.:format) hooks#create
- # edit_project_hook GET /:project_id/hooks/:id/edit(.:format) hooks#edit
- # project_hook PUT /:project_id/hooks/:id(.:format) hooks#update
- # DELETE /:project_id/hooks/:id(.:format) hooks#destroy
+ # test_project_hook POST /:project_id/-/hooks/:id/test(.:format) hooks#test
+ # project_hooks GET /:project_id/-/hooks(.:format) hooks#index
+ # POST /:project_id/-/hooks(.:format) hooks#create
+ # edit_project_hook GET /:project_id/-/hooks/:id/edit(.:format) hooks#edit
+ # project_hook PUT /:project_id/-/hooks/:id(.:format) hooks#update
+ # DELETE /:project_id/-/hooks/:id(.:format) hooks#destroy
describe Projects::HooksController, 'routing' do
it 'to #test' do
- expect(post('/gitlab/gitlabhq/hooks/1/test')).to route_to('projects/hooks#test', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
+ expect(post('/gitlab/gitlabhq/-/hooks/1/test')).to route_to('projects/hooks#test', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '1')
end
it_behaves_like 'resource routing' do
let(:actions) { %i[index create destroy edit update] }
- let(:base_path) { '/gitlab/gitlabhq/hooks' }
+ let(:base_path) { '/gitlab/gitlabhq/-/hooks' }
end
+
+ it_behaves_like 'redirecting a legacy path', '/gitlab/gitlabhq/hooks', '/gitlab/gitlabhq/-/hooks'
end
- # retry_namespace_project_hook_hook_log POST /:project_id/hooks/:hook_id/hook_logs/:id/retry(.:format) projects/hook_logs#retry
- # namespace_project_hook_hook_log GET /:project_id/hooks/:hook_id/hook_logs/:id(.:format) projects/hook_logs#show
+ # retry_namespace_project_hook_hook_log POST /:project_id/-/hooks/:hook_id/hook_logs/:id/retry(.:format) projects/hook_logs#retry
+ # namespace_project_hook_hook_log GET /:project_id/-/hooks/:hook_id/hook_logs/:id(.:format) projects/hook_logs#show
describe Projects::HookLogsController, 'routing' do
it 'to #retry' do
- expect(post('/gitlab/gitlabhq/hooks/1/hook_logs/1/retry')).to route_to('projects/hook_logs#retry', namespace_id: 'gitlab', project_id: 'gitlabhq', hook_id: '1', id: '1')
+ expect(post('/gitlab/gitlabhq/-/hooks/1/hook_logs/1/retry')).to route_to('projects/hook_logs#retry', namespace_id: 'gitlab', project_id: 'gitlabhq', hook_id: '1', id: '1')
end
it 'to #show' do
- expect(get('/gitlab/gitlabhq/hooks/1/hook_logs/1')).to route_to('projects/hook_logs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', hook_id: '1', id: '1')
+ expect(get('/gitlab/gitlabhq/-/hooks/1/hook_logs/1')).to route_to('projects/hook_logs#show', namespace_id: 'gitlab', project_id: 'gitlabhq', hook_id: '1', id: '1')
end
+
+ it_behaves_like 'redirecting a legacy path', '/gitlab/gitlabhq/hooks/hook_logs/1', '/gitlab/gitlabhq/-/hooks/hook_logs/1'
end
# project_commit GET /:project_id/commit/:id(.:format) commit#show {id: /\h{7,40}/, project_id: /[^\/]+/}
@@ -700,6 +684,26 @@ RSpec.describe 'project routing' do
end
end
+ describe Projects::PipelinesController, 'routing' do
+ it 'to #index' do
+ expect(get('/gitlab/gitlabhq/-/pipelines')).to route_to('projects/pipelines#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+
+ it 'to #show' do
+ expect(get('/gitlab/gitlabhq/-/pipelines/12')).to route_to('projects/pipelines#show', namespace_id: 'gitlab', project_id: 'gitlabhq', id: '12')
+ end
+
+ it_behaves_like 'redirecting a legacy path', '/gitlab/gitlabhq/pipelines', '/gitlab/gitlabhq/-/pipelines'
+ end
+
+ describe Projects::PipelineSchedulesController, 'routing' do
+ it 'to #index' do
+ expect(get('/gitlab/gitlabhq/-/pipeline_schedules')).to route_to('projects/pipeline_schedules#index', namespace_id: 'gitlab', project_id: 'gitlabhq')
+ end
+
+ it_behaves_like 'redirecting a legacy path', '/gitlab/gitlabhq/pipeline_schedules', '/gitlab/gitlabhq/-/pipeline_schedules'
+ end
+
describe Projects::Settings::OperationsController, 'routing' do
it 'to #reset_alerting_token' do
expect(post('/gitlab/gitlabhq/-/settings/operations/reset_alerting_token')).to route_to('projects/settings/operations#reset_alerting_token', namespace_id: 'gitlab', project_id: 'gitlabhq')
diff --git a/spec/rubocop/cop/gitlab/delegate_predicate_methods_spec.rb b/spec/rubocop/cop/gitlab/delegate_predicate_methods_spec.rb
new file mode 100644
index 00000000000..1ceff0dd681
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/delegate_predicate_methods_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require_relative '../../../../rubocop/cop/gitlab/delegate_predicate_methods'
+
+RSpec.describe RuboCop::Cop::Gitlab::DelegatePredicateMethods do
+ subject(:cop) { described_class.new }
+
+ it 'registers offense for single predicate method with allow_nil:true' do
+ expect_offense(<<~SOURCE)
+ delegate :is_foo?, :do_foo, to: :bar, allow_nil: true
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Using `delegate` with `allow_nil` on the following predicate methods is discouraged: is_foo?.
+ SOURCE
+ end
+
+ it 'registers offense for multiple predicate methods with allow_nil:true' do
+ expect_offense(<<~SOURCE)
+ delegate :is_foo?, :is_bar?, to: :bar, allow_nil: true
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Using `delegate` with `allow_nil` on the following predicate methods is discouraged: is_foo?, is_bar?.
+ SOURCE
+ end
+
+ it 'registers no offense for non-predicate method with allow_nil:true' do
+ expect_no_offenses(<<~SOURCE)
+ delegate :do_foo, to: :bar, allow_nil: true
+ SOURCE
+ end
+
+ it 'registers no offense with predicate method with allow_nil:false' do
+ expect_no_offenses(<<~SOURCE)
+ delegate :is_foo?, to: :bar, allow_nil: false
+ SOURCE
+ end
+
+ it 'registers no offense with predicate method without allow_nil' do
+ expect_no_offenses(<<~SOURCE)
+ delegate :is_foo?, to: :bar
+ SOURCE
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/feature_available_usage_spec.rb b/spec/rubocop/cop/gitlab/feature_available_usage_spec.rb
new file mode 100644
index 00000000000..514ef357785
--- /dev/null
+++ b/spec/rubocop/cop/gitlab/feature_available_usage_spec.rb
@@ -0,0 +1,96 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+require 'rubocop'
+require 'rubocop/rspec/support'
+require_relative '../../../../rubocop/cop/gitlab/feature_available_usage'
+
+RSpec.describe RuboCop::Cop::Gitlab::FeatureAvailableUsage do
+ subject(:cop) { described_class.new }
+
+ context 'no arguments given' do
+ it 'does not flag the use of Gitlab::Sourcegraph.feature_available? with no arguments' do
+ expect_no_offenses('Gitlab::Sourcegraph.feature_available?')
+ expect_no_offenses('subject { described_class.feature_available? }')
+ end
+ end
+
+ context 'one argument given' do
+ it 'does not flag the use of License.feature_available?' do
+ expect_no_offenses('License.feature_available?(:push_rules)')
+ end
+
+ it 'flags the use with a dynamic feature as nil' do
+ expect_offense(<<~SOURCE)
+ feature_available?(nil)
+ ^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`nil` given), use `licensed_feature_available?(feature)` instead.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ project.feature_available?(nil)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`nil` given), use `licensed_feature_available?(feature)` instead.
+ SOURCE
+ end
+
+ it 'flags the use with an OSS project feature' do
+ expect_offense(<<~SOURCE)
+ project.feature_available?(:issues)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should be called with two arguments: `feature` and `user`.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ feature_available?(:issues)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should be called with two arguments: `feature` and `user`.
+ SOURCE
+ end
+
+ it 'flags the use with a feature that is not a project feature' do
+ expect_offense(<<~SOURCE)
+ feature_available?(:foo)
+ ^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`:foo` given), use `licensed_feature_available?(feature)` instead.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ project.feature_available?(:foo)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`:foo` given), use `licensed_feature_available?(feature)` instead.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ feature_available?(foo)
+ ^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`foo` isn't a literal so we cannot say if it's legit or not), using `licensed_feature_available?(feature)` may be more appropriate.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ foo = :feature
+ feature_available?(foo)
+ ^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`foo` isn't a literal so we cannot say if it's legit or not), using `licensed_feature_available?(feature)` may be more appropriate.
+ SOURCE
+ end
+ end
+
+ context 'two arguments given' do
+ it 'does not flag the use with an OSS project feature' do
+ expect_no_offenses('feature_available?(:issues, user)')
+ expect_no_offenses('project.feature_available?(:issues, user)')
+ end
+
+ it 'does not flag the use with an EE project feature' do
+ expect_no_offenses('feature_available?(:requirements, user)')
+ expect_no_offenses('project.feature_available?(:requirements, user)')
+ end
+
+ it 'flags the use with a dynamic feature as a method call with two args' do
+ expect_offense(<<~SOURCE)
+ feature_available?(:foo, current_user)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`:foo` given), use `licensed_feature_available?(feature)` instead.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ project.feature_available?(:foo, current_user)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`:foo` given), use `licensed_feature_available?(feature)` instead.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ feature_available?(foo, current_user)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`foo` isn't a literal so we cannot say if it's legit or not), using `licensed_feature_available?(feature)` may be more appropriate.
+ SOURCE
+ expect_offense(<<~SOURCE)
+ project.feature_available?(foo, current_user)
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ `feature_available?` should not be called for features that can be licensed (`foo` isn't a literal so we cannot say if it's legit or not), using `licensed_feature_available?(feature)` may be more appropriate.
+ SOURCE
+ end
+ end
+end
diff --git a/spec/rubocop/cop/graphql/descriptions_spec.rb b/spec/rubocop/cop/graphql/descriptions_spec.rb
index af660aee165..9709a253bdc 100644
--- a/spec/rubocop/cop/graphql/descriptions_spec.rb
+++ b/spec/rubocop/cop/graphql/descriptions_spec.rb
@@ -6,7 +6,7 @@ require_relative '../../../../rubocop/cop/graphql/descriptions'
RSpec.describe RuboCop::Cop::Graphql::Descriptions do
subject(:cop) { described_class.new }
- context 'fields' do
+ context 'with fields' do
it 'adds an offense when there is no description' do
expect_offense(<<~TYPE)
module Types
@@ -46,9 +46,19 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
end
TYPE
end
+
+ it 'does not add an offense when there is a resolver' do
+ expect_no_offenses(<<~TYPE.strip)
+ module Types
+ class FakeType < BaseObject
+ field :a_thing, resolver: ThingResolver
+ end
+ end
+ TYPE
+ end
end
- context 'arguments' do
+ context 'with arguments' do
it 'adds an offense when there is no description' do
expect_offense(<<~TYPE)
module Types
@@ -90,7 +100,7 @@ RSpec.describe RuboCop::Cop::Graphql::Descriptions do
end
end
- context 'enum values' do
+ context 'with enum values' do
it 'adds an offense when there is no description' do
expect_offense(<<~TYPE)
module Types
diff --git a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
index f4695ff8d2d..899872859a9 100644
--- a/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
+++ b/spec/rubocop/cop/migration/add_limit_to_text_columns_spec.rb
@@ -17,7 +17,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
it 'registers an offense' do
expect_offense(<<~RUBY)
class TestTextLimits < ActiveRecord::Migration[6.0]
- DOWNTIME = false
disable_ddl_transaction!
def up
@@ -54,7 +53,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class TestTextLimits < ActiveRecord::Migration[6.0]
- DOWNTIME = false
disable_ddl_transaction!
def up
@@ -90,8 +88,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class TestTextLimits < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :test_text_limits, id: false do |t|
t.integer :test_id, null: false
@@ -113,7 +109,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
it 'registers an offense' do
expect_offense(<<~RUBY)
class TestTextLimits < ActiveRecord::Migration[6.0]
- DOWNTIME = false
disable_ddl_transaction!
def up
@@ -146,7 +141,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
it 'registers no offenses' do
expect_no_offenses(<<~RUBY)
class TestTextLimits < ActiveRecord::Migration[6.0]
- DOWNTIME = false
disable_ddl_transaction!
def up
@@ -168,8 +162,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class TestTextLimits < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
drop_table :no_offense_on_down
end
@@ -194,7 +186,6 @@ RSpec.describe RuboCop::Cop::Migration::AddLimitToTextColumns do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class TestTextLimits < ActiveRecord::Migration[6.0]
- DOWNTIME = false
disable_ddl_transaction!
def up
diff --git a/spec/rubocop/cop/migration/add_timestamps_spec.rb b/spec/rubocop/cop/migration/add_timestamps_spec.rb
index ef5a856722f..2a11d46be6e 100644
--- a/spec/rubocop/cop/migration/add_timestamps_spec.rb
+++ b/spec/rubocop/cop/migration/add_timestamps_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe RuboCop::Cop::Migration::AddTimestamps do
let(:migration_with_add_timestamps) do
%q(
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_timestamps(:users)
@@ -22,8 +20,6 @@ RSpec.describe RuboCop::Cop::Migration::AddTimestamps do
let(:migration_without_add_timestamps) do
%q(
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
end
@@ -34,8 +30,6 @@ RSpec.describe RuboCop::Cop::Migration::AddTimestamps do
let(:migration_with_add_timestamps_with_timezone) do
%q(
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_timestamps_with_timezone(:users)
@@ -52,8 +46,6 @@ RSpec.describe RuboCop::Cop::Migration::AddTimestamps do
it 'registers an offense when the "add_timestamps" method is used' do
expect_offense(<<~RUBY)
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_timestamps(:users)
diff --git a/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb b/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb
index 15e947a1e53..ac814c10550 100644
--- a/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb
+++ b/spec/rubocop/cop/migration/complex_indexes_require_name_spec.rb
@@ -18,8 +18,6 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do
it 'registers an offense' do
expect_offense(<<~RUBY)
class TestComplexIndexes < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :test_table do |t|
t.integer :column1, null: false
@@ -46,8 +44,6 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class TestComplexIndexes < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :test_table do |t|
t.integer :column1, null: false
@@ -74,8 +70,6 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do
it 'registers an offense' do
expect_offense(<<~RUBY)
class TestComplexIndexes < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
disable_ddl_transaction!
def up
@@ -101,8 +95,6 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do
it 'registers no offenses' do
expect_no_offenses(<<~RUBY)
class TestComplexIndexes < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
INDEX_NAME = 'my_test_name'
disable_ddl_transaction!
@@ -135,8 +127,6 @@ RSpec.describe RuboCop::Cop::Migration::ComplexIndexesRequireName do
it 'registers no offenses' do
expect_no_offenses(<<~RUBY)
class TestComplexIndexes < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
disable_ddl_transaction!
def up
diff --git a/spec/rubocop/cop/migration/datetime_spec.rb b/spec/rubocop/cop/migration/datetime_spec.rb
index 3854ddfe99c..95a875b3baa 100644
--- a/spec/rubocop/cop/migration/datetime_spec.rb
+++ b/spec/rubocop/cop/migration/datetime_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
let(:create_table_migration_without_datetime) do
%q(
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
@@ -24,8 +22,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
let(:create_table_migration_with_datetime_with_timezone) do
%q(
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
@@ -39,8 +35,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
let(:add_column_migration_with_datetime) do
%q(
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_column(:users, :last_sign_in, :datetime)
@@ -52,8 +46,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
let(:add_column_migration_with_timestamp) do
%q(
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_column(:users, :last_sign_in, :timestamp)
@@ -65,8 +57,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
let(:add_column_migration_without_datetime) do
%q(
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
end
@@ -77,8 +67,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
let(:add_column_migration_with_datetime_with_timezone) do
%q(
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_column(:users, :last_sign_in, :datetime_with_timezone)
@@ -95,8 +83,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
it 'registers an offense when the ":datetime" data type is used on create_table' do
expect_offense(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
@@ -111,8 +97,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
it 'registers an offense when the ":timestamp" data type is used on create_table' do
expect_offense(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
@@ -135,8 +119,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
it 'registers an offense when the ":datetime" data type is used on add_column' do
expect_offense(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_column(:users, :last_sign_in, :datetime)
@@ -149,8 +131,6 @@ RSpec.describe RuboCop::Cop::Migration::Datetime do
it 'registers an offense when the ":timestamp" data type is used on add_column' do
expect_offense(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def change
add_column(:users, :username, :text)
add_column(:users, :last_sign_in, :timestamp)
diff --git a/spec/rubocop/cop/migration/prevent_strings_spec.rb b/spec/rubocop/cop/migration/prevent_strings_spec.rb
index a9b62f23a77..d1760c2db88 100644
--- a/spec/rubocop/cop/migration/prevent_strings_spec.rb
+++ b/spec/rubocop/cop/migration/prevent_strings_spec.rb
@@ -15,8 +15,6 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do
it 'registers an offense' do
expect_offense(<<~RUBY, msg: "Do not use the `string` data type, use `text` instead.[...]")
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :users do |t|
t.string :username, null: false
@@ -46,8 +44,6 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :users do |t|
t.integer :not_a_string, null: false
@@ -65,8 +61,6 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :users do |t|
t.text :username, null: false
@@ -87,8 +81,6 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class TestStringArrays < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :test_string_arrays, id: false do |t|
t.integer :test_id, null: false
@@ -108,8 +100,6 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
remove_column :users, :bio
remove_column :users, :url
@@ -137,8 +127,6 @@ RSpec.describe RuboCop::Cop::Migration::PreventStrings do
it 'registers no offense' do
expect_no_offenses(<<~RUBY)
class Users < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
def up
create_table :users do |t|
t.string :username, null: false
diff --git a/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb b/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb
index b3e66492d83..c65f86d1e13 100644
--- a/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb
+++ b/spec/rubocop/cop/migration/refer_to_index_by_name_spec.rb
@@ -15,8 +15,6 @@ RSpec.describe RuboCop::Cop::Migration::ReferToIndexByName do
it 'registers an offense' do
expect_offense(<<~RUBY, msg: 'migration methods that refer to existing indexes must do so by name')
class TestReferToIndexByName < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
INDEX_NAME = 'my_test_name'
disable_ddl_transaction!
@@ -63,8 +61,6 @@ RSpec.describe RuboCop::Cop::Migration::ReferToIndexByName do
it 'registers no offenses' do
expect_no_offenses(<<~RUBY)
class TestReferToIndexByName < ActiveRecord::Migration[6.0]
- DOWNTIME = false
-
disable_ddl_transaction!
def up
diff --git a/spec/rubocop/cop/migration/timestamps_spec.rb b/spec/rubocop/cop/migration/timestamps_spec.rb
index 91bb5c1b05b..2f99a3ff35b 100644
--- a/spec/rubocop/cop/migration/timestamps_spec.rb
+++ b/spec/rubocop/cop/migration/timestamps_spec.rb
@@ -9,8 +9,6 @@ RSpec.describe RuboCop::Cop::Migration::Timestamps do
let(:migration_with_timestamps) do
%q(
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
@@ -25,8 +23,6 @@ RSpec.describe RuboCop::Cop::Migration::Timestamps do
let(:migration_without_timestamps) do
%q(
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
@@ -40,8 +36,6 @@ RSpec.describe RuboCop::Cop::Migration::Timestamps do
let(:migration_with_timestamps_with_timezone) do
%q(
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
@@ -61,8 +55,6 @@ RSpec.describe RuboCop::Cop::Migration::Timestamps do
it 'registers an offense when the "timestamps" method is used' do
expect_offense(<<~RUBY)
class Users < ActiveRecord::Migration[4.2]
- DOWNTIME = false
-
def change
create_table :users do |t|
t.string :username, null: false
diff --git a/spec/rubocop/cop/rspec/env_assignment_spec.rb b/spec/rubocop/cop/rspec/env_assignment_spec.rb
index da6bb2fa2fb..0fd09eeae11 100644
--- a/spec/rubocop/cop/rspec/env_assignment_spec.rb
+++ b/spec/rubocop/cop/rspec/env_assignment_spec.rb
@@ -5,8 +5,8 @@ require 'fast_spec_helper'
require_relative '../../../../rubocop/cop/rspec/env_assignment'
RSpec.describe RuboCop::Cop::RSpec::EnvAssignment do
- offense_call_single_quotes_key = %(ENV['FOO'] = 'bar').freeze
- offense_call_double_quotes_key = %(ENV["FOO"] = 'bar').freeze
+ offense_call_single_quotes_key = %(ENV['FOO'] = 'bar')
+ offense_call_double_quotes_key = %(ENV["FOO"] = 'bar')
let(:source_file) { 'spec/foo_spec.rb' }
diff --git a/spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb b/spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb
new file mode 100644
index 00000000000..384a834a512
--- /dev/null
+++ b/spec/rubocop/cop/style/regexp_literal_mixed_preserve_spec.rb
@@ -0,0 +1,131 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require_relative '../../../../rubocop/cop/style/regexp_literal_mixed_preserve'
+
+# This spec contains only relevant examples.
+#
+# See also https://github.com/rubocop/rubocop/pull/9688
+RSpec.describe RuboCop::Cop::Style::RegexpLiteralMixedPreserve, :config do
+ let(:config) do
+ supported_styles = { 'SupportedStyles' => %w[slashes percent_r mixed mixed_preserve] }
+ RuboCop::Config.new('Style/PercentLiteralDelimiters' =>
+ percent_literal_delimiters_config,
+ 'Style/RegexpLiteralMixedPreserve' =>
+ cop_config.merge(supported_styles))
+ end
+
+ let(:percent_literal_delimiters_config) { { 'PreferredDelimiters' => { '%r' => '{}' } } }
+
+ context 'when EnforcedStyle is set to mixed_preserve' do
+ let(:cop_config) { { 'EnforcedStyle' => 'mixed_preserve' } }
+
+ describe 'a single-line `//` regex without slashes' do
+ it 'is accepted' do
+ expect_no_offenses('foo = /a/')
+ end
+ end
+
+ describe 'a single-line `//` regex with slashes' do
+ it 'registers an offense and corrects' do
+ expect_offense(<<~'RUBY')
+ foo = /home\//
+ ^^^^^^^^ Use `%r` around regular expression.
+ RUBY
+
+ expect_correction(<<~'RUBY')
+ foo = %r{home/}
+ RUBY
+ end
+
+ describe 'when configured to allow inner slashes' do
+ before do
+ cop_config['AllowInnerSlashes'] = true
+ end
+
+ it 'is accepted' do
+ expect_no_offenses('foo = /home\\//')
+ end
+ end
+ end
+
+ describe 'a multi-line `//` regex without slashes' do
+ it 'is accepted' do
+ expect_no_offenses(<<~'RUBY')
+ foo = /
+ foo
+ bar
+ /x
+ RUBY
+ end
+ end
+
+ describe 'a multi-line `//` regex with slashes' do
+ it 'registers an offense and corrects' do
+ expect_offense(<<~'RUBY')
+ foo = /
+ ^ Use `%r` around regular expression.
+ https?:\/\/
+ example\.com
+ /x
+ RUBY
+
+ expect_correction(<<~'RUBY')
+ foo = %r{
+ https?://
+ example\.com
+ }x
+ RUBY
+ end
+ end
+
+ describe 'a single-line %r regex without slashes' do
+ it 'is accepted' do
+ expect_no_offenses(<<~RUBY)
+ foo = %r{a}
+ RUBY
+ end
+ end
+
+ describe 'a single-line %r regex with slashes' do
+ it 'is accepted' do
+ expect_no_offenses('foo = %r{home/}')
+ end
+
+ describe 'when configured to allow inner slashes' do
+ before do
+ cop_config['AllowInnerSlashes'] = true
+ end
+
+ it 'is accepted' do
+ expect_no_offenses(<<~RUBY)
+ foo = %r{home/}
+ RUBY
+ end
+ end
+ end
+
+ describe 'a multi-line %r regex without slashes' do
+ it 'is accepted' do
+ expect_no_offenses(<<~RUBY)
+ foo = %r{
+ foo
+ bar
+ }x
+ RUBY
+ end
+ end
+
+ describe 'a multi-line %r regex with slashes' do
+ it 'is accepted' do
+ expect_no_offenses(<<~RUBY)
+ foo = %r{
+ https?://
+ example\.com
+ }x
+ RUBY
+ end
+ end
+ end
+end
diff --git a/spec/rubocop/cop/user_admin_spec.rb b/spec/rubocop/cop/user_admin_spec.rb
new file mode 100644
index 00000000000..3bf458348f3
--- /dev/null
+++ b/spec/rubocop/cop/user_admin_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'fast_spec_helper'
+
+require 'rubocop'
+require_relative '../../../rubocop/cop/user_admin'
+
+RSpec.describe RuboCop::Cop::UserAdmin do
+ subject(:cop) { described_class.new }
+
+ it 'flags a method call' do
+ expect_offense(<<~SOURCE)
+ user.admin?
+ ^^^^^^ #{described_class::MSG}
+ SOURCE
+ end
+
+ it 'flags a method call with safe operator' do
+ expect_offense(<<~SOURCE)
+ user&.admin?
+ ^^^^^^ #{described_class::MSG}
+ SOURCE
+ end
+end
diff --git a/spec/serializers/admin/user_entity_spec.rb b/spec/serializers/admin/user_entity_spec.rb
index 42efe0eec54..79b41d90e6a 100644
--- a/spec/serializers/admin/user_entity_spec.rb
+++ b/spec/serializers/admin/user_entity_spec.rb
@@ -4,6 +4,7 @@ require "spec_helper"
RSpec.describe Admin::UserEntity do
let_it_be(:user) { build_stubbed(:user) }
+
let(:request) { double('request') }
let(:entity) do
@@ -14,7 +15,7 @@ RSpec.describe Admin::UserEntity do
subject { entity.as_json&.keys }
it 'exposes correct attributes' do
- is_expected.to contain_exactly(
+ is_expected.to include(
:id,
:name,
:created_at,
diff --git a/spec/serializers/admin/user_serializer_spec.rb b/spec/serializers/admin/user_serializer_spec.rb
index 53a9457409c..ed78ea67bd1 100644
--- a/spec/serializers/admin/user_serializer_spec.rb
+++ b/spec/serializers/admin/user_serializer_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Admin::UserSerializer do
context 'when there is a single object provided' do
it 'contains important elements for the admin user table' do
- is_expected.to contain_exactly(
+ is_expected.to include(
:id,
:name,
:created_at,
diff --git a/spec/serializers/build_artifact_entity_spec.rb b/spec/serializers/build_artifact_entity_spec.rb
index 02c172d723f..8835d4d834e 100644
--- a/spec/serializers/build_artifact_entity_spec.rb
+++ b/spec/serializers/build_artifact_entity_spec.rb
@@ -3,11 +3,13 @@
require 'spec_helper'
RSpec.describe BuildArtifactEntity do
- let(:job) { create(:ci_build) }
- let(:artifact) { create(:ci_job_artifact, :codequality, expire_at: 1.hour.from_now, job: job) }
+ let_it_be(:job) { create(:ci_build) }
+ let_it_be(:artifact) { create(:ci_job_artifact, :codequality, expire_at: 1.hour.from_now, job: job) }
+
+ let(:options) { { request: double } }
let(:entity) do
- described_class.new(artifact, request: double)
+ described_class.represent(artifact, options)
end
describe '#as_json' do
@@ -21,15 +23,18 @@ RSpec.describe BuildArtifactEntity do
expect(subject).to include(:expired, :expire_at)
end
- it 'contains paths to the artifacts' do
- expect(subject[:path])
- .to include "jobs/#{job.id}/artifacts/download?file_type=codequality"
+ it 'exposes the artifact download path' do
+ expect(subject[:path]).to include "jobs/#{job.id}/artifacts/download?file_type=codequality"
+ end
+
+ context 'when project is specified in options' do
+ let(:options) { super().merge(project: job.project) }
- expect(subject[:keep_path])
- .to include "jobs/#{job.id}/artifacts/keep"
+ it 'doesnt get a project from the artifact' do
+ expect(artifact).not_to receive(:project)
- expect(subject[:browse_path])
- .to include "jobs/#{job.id}/artifacts/browse"
+ subject
+ end
end
end
end
diff --git a/spec/serializers/ci/dag_pipeline_entity_spec.rb b/spec/serializers/ci/dag_pipeline_entity_spec.rb
index fdc2f5e1a04..31a0dc5c048 100644
--- a/spec/serializers/ci/dag_pipeline_entity_spec.rb
+++ b/spec/serializers/ci/dag_pipeline_entity_spec.rb
@@ -76,8 +76,8 @@ RSpec.describe Ci::DagPipelineEntity do
it 'performs the smallest number of queries', :request_store do
log = ActiveRecord::QueryRecorder.new { subject }
- # stages, project, builds, build_needs, feature_flag
- expect(log.count).to eq 5
+ # stages, project, builds, build_needs
+ expect(log.count).to eq 4
end
it 'contains all the data' do
diff --git a/spec/serializers/ci/group_variable_entity_spec.rb b/spec/serializers/ci/group_variable_entity_spec.rb
index a7e12905924..9b64e263992 100644
--- a/spec/serializers/ci/group_variable_entity_spec.rb
+++ b/spec/serializers/ci/group_variable_entity_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::GroupVariableEntity do
subject { entity.as_json }
it 'contains required fields' do
- expect(subject).to include(:id, :key, :value, :protected, :variable_type)
+ expect(subject).to include(:id, :key, :value, :protected, :variable_type, :environment_scope)
end
end
end
diff --git a/spec/serializers/ci/lint/result_serializer_spec.rb b/spec/serializers/ci/lint/result_serializer_spec.rb
index 7aa95a574bf..a834ea05e14 100644
--- a/spec/serializers/ci/lint/result_serializer_spec.rb
+++ b/spec/serializers/ci/lint/result_serializer_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Ci::Lint::ResultSerializer, :aggregate_failures do
let_it_be(:project) { create(:project, :repository) }
+
let(:result) do
Gitlab::Ci::Lint
.new(project: project, current_user: project.owner)
diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb
index 6ce3cef5f44..83ea0d649e8 100644
--- a/spec/serializers/ci/pipeline_entity_spec.rb
+++ b/spec/serializers/ci/pipeline_entity_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Ci::PipelineEntity do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
+
let(:request) { double('request', current_user: user) }
let(:entity) { described_class.represent(pipeline, request: request) }
@@ -165,6 +166,7 @@ RSpec.describe Ci::PipelineEntity do
context 'when pipeline is detached merge request pipeline' do
let_it_be(:merge_request) { create(:merge_request, :with_detached_merge_request_pipeline) }
+
let(:project) { merge_request.target_project }
let(:pipeline) { merge_request.pipelines_for_merge_request.first }
@@ -213,6 +215,7 @@ RSpec.describe Ci::PipelineEntity do
context 'when pipeline is merge request pipeline' do
let_it_be(:merge_request) { create(:merge_request, :with_merge_request_pipeline, merge_sha: 'abc') }
+
let(:project) { merge_request.target_project }
let(:pipeline) { merge_request.pipelines_for_merge_request.first }
diff --git a/spec/serializers/container_repository_entity_spec.rb b/spec/serializers/container_repository_entity_spec.rb
index 43969c63471..9ea00bc79e1 100644
--- a/spec/serializers/container_repository_entity_spec.rb
+++ b/spec/serializers/container_repository_entity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ContainerRepositoryEntity do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:repository) { create(:container_repository, project: project) }
+
let(:entity) { described_class.new(repository, request: request) }
let(:request) { double('request') }
diff --git a/spec/serializers/container_tag_entity_spec.rb b/spec/serializers/container_tag_entity_spec.rb
index 8e47a6269bc..6d0ee2cffe5 100644
--- a/spec/serializers/container_tag_entity_spec.rb
+++ b/spec/serializers/container_tag_entity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ContainerTagEntity do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:repository) { create(:container_repository, name: 'image', project: project) }
+
let(:entity) { described_class.new(tag, request: request) }
let(:request) { double('request') }
let(:tag) { repository.tag('test') }
diff --git a/spec/serializers/deployment_serializer_spec.rb b/spec/serializers/deployment_serializer_spec.rb
index cfd43227b18..55b02b76b4a 100644
--- a/spec/serializers/deployment_serializer_spec.rb
+++ b/spec/serializers/deployment_serializer_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe DeploymentSerializer do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, email: project.commit.author_email) }
+
let(:resource) { create(:deployment, project: project, sha: project.commit.id) }
let(:serializer) { described_class.new(request) }
diff --git a/spec/serializers/diff_file_entity_spec.rb b/spec/serializers/diff_file_entity_spec.rb
index 1b8456e5c49..c15c9324f94 100644
--- a/spec/serializers/diff_file_entity_spec.rb
+++ b/spec/serializers/diff_file_entity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe DiffFileEntity do
include RepoHelpers
let_it_be(:project) { create(:project, :repository) }
+
let(:repository) { project.repository }
let(:commit) { project.commit(sample_commit.id) }
let(:diff_refs) { commit.diff_refs }
@@ -22,6 +23,7 @@ RSpec.describe DiffFileEntity do
context 'when there is a merge request' do
let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+
let(:user) { create(:user) }
let(:code_navigation_path) { Gitlab::CodeNavigationPath.new(project, project.commit.sha) }
let(:request) { EntityRequest.new(project: project, current_user: user) }
@@ -49,6 +51,14 @@ RSpec.describe DiffFileEntity do
expect(subject).to include(:load_collapsed_diff_url)
end
+
+ context 'when diff_view is unknown' do
+ let(:options) { { diff_view: :unknown } }
+
+ it 'hides highlighted_diff_lines and parallel_diff_lines' do
+ is_expected.not_to include(:highlighted_diff_lines, :parallel_diff_lines)
+ end
+ end
end
describe '#parallel_diff_lines' do
diff --git a/spec/serializers/environment_serializer_spec.rb b/spec/serializers/environment_serializer_spec.rb
index 1eba9ae4e5e..985e18f27a0 100644
--- a/spec/serializers/environment_serializer_spec.rb
+++ b/spec/serializers/environment_serializer_spec.rb
@@ -3,8 +3,10 @@
require 'spec_helper'
RSpec.describe EnvironmentSerializer do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
+ include CreateEnvironmentsHelpers
+
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project, reload: true) { create(:project, :repository) }
let(:json) do
described_class
@@ -12,43 +14,18 @@ RSpec.describe EnvironmentSerializer do
.represent(resource)
end
- before do
+ before_all do
project.add_developer(user)
end
- context 'when there is a single object provided' do
- let(:project) { create(:project, :repository) }
- let(:deployable) { create(:ci_build) }
- let(:deployment) do
- create(:deployment, :success,
- deployable: deployable,
- user: user,
- project: project,
- sha: project.commit.id)
- end
-
- let(:resource) { deployment.environment }
-
- before do
- create(:ci_build, :manual, name: 'manual1', pipeline: deployable.pipeline)
- end
-
- it 'contains important elements of environment' do
- expect(json)
- .to include(:name, :external_url, :environment_path, :last_deployment)
- end
+ it_behaves_like 'avoid N+1 on environments serialization'
- it 'contains relevant information about last deployment' do
- last_deployment = json.fetch(:last_deployment)
+ context 'when there is a collection of objects provided' do
+ let(:resource) { project.environments }
- expect(last_deployment)
- .to include(:ref, :user, :commit, :deployable, :manual_actions)
+ before_all do
+ create_list(:environment, 2, project: project)
end
- end
-
- context 'when there is a collection of objects provided' do
- let(:project) { create(:project) }
- let(:resource) { create_list(:environment, 2) }
it 'contains important elements of environment' do
expect(json.first)
@@ -207,4 +184,11 @@ RSpec.describe EnvironmentSerializer do
end
end
end
+
+ def create_environment_with_associations(project)
+ create(:environment, project: project).tap do |environment|
+ create(:deployment, :success, environment: environment, project: project)
+ create(:deployment, :running, environment: environment, project: project)
+ end
+ end
end
diff --git a/spec/serializers/evidences/evidence_entity_spec.rb b/spec/serializers/evidences/evidence_entity_spec.rb
index 8ec0422fea2..2962b58c1b7 100644
--- a/spec/serializers/evidences/evidence_entity_spec.rb
+++ b/spec/serializers/evidences/evidence_entity_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Evidences::EvidenceEntity do
let_it_be(:project) { create(:project) }
+
let(:release) { create(:release, project: project) }
let(:evidence) { build(:evidence, release: release) }
let(:schema_file) { 'evidences/evidence' }
diff --git a/spec/serializers/fork_namespace_entity_spec.rb b/spec/serializers/fork_namespace_entity_spec.rb
index 7740ed77540..5e9918a89ff 100644
--- a/spec/serializers/fork_namespace_entity_spec.rb
+++ b/spec/serializers/fork_namespace_entity_spec.rb
@@ -9,6 +9,7 @@ RSpec.describe ForkNamespaceEntity do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:namespace) { create(:group, :with_avatar, description: 'test') }
+
let(:memberships) do
user.members.index_by(&:source_id)
end
diff --git a/spec/serializers/group_link/group_group_link_entity_spec.rb b/spec/serializers/group_link/group_group_link_entity_spec.rb
index 15bcbbcb1d6..2821c433784 100644
--- a/spec/serializers/group_link/group_group_link_entity_spec.rb
+++ b/spec/serializers/group_link/group_group_link_entity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe GroupLink::GroupGroupLinkEntity do
include_context 'group_group_link'
let_it_be(:current_user) { create(:user) }
+
let(:entity) { described_class.new(group_group_link) }
before do
diff --git a/spec/serializers/group_link/project_group_link_entity_spec.rb b/spec/serializers/group_link/project_group_link_entity_spec.rb
index 0bb3d06933b..e7e42d79b5e 100644
--- a/spec/serializers/group_link/project_group_link_entity_spec.rb
+++ b/spec/serializers/group_link/project_group_link_entity_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe GroupLink::ProjectGroupLinkEntity do
let_it_be(:current_user) { create(:user) }
let_it_be(:project_group_link) { create(:project_group_link) }
+
let(:entity) { described_class.new(project_group_link) }
before do
diff --git a/spec/serializers/issue_board_entity_spec.rb b/spec/serializers/issue_board_entity_spec.rb
index e60a063b9eb..138b8174d81 100644
--- a/spec/serializers/issue_board_entity_spec.rb
+++ b/spec/serializers/issue_board_entity_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe IssueBoardEntity do
let_it_be(:user) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
let_it_be(:label) { create(:label, project: project, title: 'Test Label') }
+
let(:request) { double('request', current_user: user) }
subject { described_class.new(resource, request: request).as_json }
diff --git a/spec/serializers/member_entity_spec.rb b/spec/serializers/member_entity_spec.rb
index 883cb511abc..dc7aa4611f2 100644
--- a/spec/serializers/member_entity_spec.rb
+++ b/spec/serializers/member_entity_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe MemberEntity do
let_it_be(:current_user) { create(:user) }
+
let(:entity) { described_class.new(member, { current_user: current_user, group: group, source: source }) }
let(:entity_hash) { entity.as_json }
diff --git a/spec/serializers/member_serializer_spec.rb b/spec/serializers/member_serializer_spec.rb
index af209c0191f..f7415214e95 100644
--- a/spec/serializers/member_serializer_spec.rb
+++ b/spec/serializers/member_serializer_spec.rb
@@ -7,28 +7,77 @@ RSpec.describe MemberSerializer do
let_it_be(:current_user) { create(:user) }
- subject { described_class.new.represent(members, { current_user: current_user, group: group, source: source }) }
+ subject(:representation) do
+ described_class.new.represent(members, { current_user: current_user, group: group, source: source }).to_json
+ end
shared_examples 'members.json' do
- it 'matches json schema' do
- expect(subject.to_json).to match_schema('members')
- end
+ it { is_expected.to match_schema('members') }
end
context 'group member' do
- let(:group) { create(:group) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:members) { present_members(create_list(:group_member, 1, group: group)) }
+
let(:source) { group }
- let(:members) { present_members(create_list(:group_member, 1, group: group)) }
it_behaves_like 'members.json'
+
+ it 'handles last group owner assignment' do
+ group_member = members.last
+
+ expect { representation }.to change(group_member, :last_owner)
+ .from(nil).to(true)
+ .and change(group_member, :last_blocked_owner).from(nil).to(false)
+ end
+
+ context "with LastGroupOwnerAssigner query improvements" do
+ it "avoids N+1 database queries for last group owner assignment in MembersPresenter" do
+ group_member = create(:group_member, group: group)
+ control_count = ActiveRecord::QueryRecorder.new { member_last_owner_with_preload([group_member]) }.count
+ group_members = create_list(:group_member, 3, group: group)
+
+ expect { member_last_owner_with_preload(group_members) }.not_to exceed_query_limit(control_count)
+ end
+
+ it "avoids N+1 database queries for last blocked owner assignment in MembersPresenter" do
+ group_member = create(:group_member, group: group)
+ control_count = ActiveRecord::QueryRecorder.new { member_last_blocked_owner_with_preload([group_member]) }.count
+ group_members = create_list(:group_member, 3, group: group)
+
+ expect { member_last_blocked_owner_with_preload(group_members) }.not_to exceed_query_limit(control_count)
+ end
+
+ def member_last_owner_with_preload(members)
+ assigner_with_preload(members)
+ members.map { |m| group.member_last_owner?(m) }
+ end
+
+ def member_last_blocked_owner_with_preload(members)
+ assigner_with_preload(members)
+ members.map { |m| group.member_last_blocked_owner?(m) }
+ end
+
+ def assigner_with_preload(members)
+ MembersPreloader.new(members).preload_all
+ Members::LastGroupOwnerAssigner.new(group, members).execute
+ end
+ end
end
context 'project member' do
- let(:project) { create(:project) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:members) { present_members(create_list(:project_member, 1, project: project)) }
+
let(:source) { project }
let(:group) { project.group }
- let(:members) { present_members(create_list(:project_member, 1, project: project)) }
it_behaves_like 'members.json'
+
+ it 'does not invoke group owner assignment' do
+ expect(Members::LastGroupOwnerAssigner).not_to receive(:new)
+
+ representation
+ end
end
end
diff --git a/spec/serializers/member_user_entity_spec.rb b/spec/serializers/member_user_entity_spec.rb
index 1c000c06bb6..b505571cbf2 100644
--- a/spec/serializers/member_user_entity_spec.rb
+++ b/spec/serializers/member_user_entity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe MemberUserEntity do
let_it_be(:user) { create(:user) }
let_it_be(:emoji) { 'slight_smile' }
let_it_be(:user_status) { create(:user_status, user: user, emoji: emoji) }
+
let(:entity) { described_class.new(user) }
let(:entity_hash) { entity.as_json }
diff --git a/spec/serializers/merge_request_diff_entity_spec.rb b/spec/serializers/merge_request_diff_entity_spec.rb
index 542ef6cb3c3..a3b356505b8 100644
--- a/spec/serializers/merge_request_diff_entity_spec.rb
+++ b/spec/serializers/merge_request_diff_entity_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe MergeRequestDiffEntity do
let_it_be(:project) { create(:project, :repository) }
+
let(:request) { EntityRequest.new(project: project) }
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
diff --git a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
index 8c72430ff5c..5845a868e53 100644
--- a/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_cached_widget_entity_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
let_it_be(:project, refind: true) { create :project, :repository }
let_it_be(:resource, refind: true) { create(:merge_request, source_project: project, target_project: project) }
let_it_be(:user) { create(:user) }
+
let(:pipeline) { create(:ci_empty_pipeline, project: project) }
let(:request) { double('request', current_user: user, project: project) }
@@ -248,14 +249,6 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
expect(subject[:pipeline]).to eq(pipeline_payload)
end
-
- context 'when merge_request_cached_pipeline_serializer is disabled' do
- it 'does not return pipeline' do
- stub_feature_flags(merge_request_cached_pipeline_serializer: false)
-
- expect(subject[:pipeline]).to be_nil
- end
- end
end
context 'when user does not have access to pipelines' do
@@ -276,4 +269,83 @@ RSpec.describe MergeRequestPollCachedWidgetEntity do
end
end
end
+
+ describe 'merge_pipeline' do
+ it 'returns nil' do
+ expect(subject[:merge_pipeline]).to be_nil
+ end
+
+ context 'when is merged' do
+ let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
+ let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
+
+ before do
+ project.add_maintainer(user)
+ end
+
+ it 'returns merge_pipeline' do
+ pipeline.reload
+ pipeline_payload =
+ MergeRequests::PipelineEntity
+ .represent(pipeline, request: request)
+ .as_json
+
+ expect(subject[:merge_pipeline]).to eq(pipeline_payload)
+ end
+
+ context 'when user cannot read pipelines on target project' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns nil' do
+ expect(subject[:merge_pipeline]).to be_nil
+ end
+ end
+
+ context 'when merge_request_cached_merge_pipeline_serializer is disabled' do
+ before do
+ stub_feature_flags(merge_request_cached_merge_pipeline_serializer: false)
+ end
+
+ it 'returns nil' do
+ expect(subject[:merge_pipeline]).to be_nil
+ end
+ end
+ end
+ end
+
+ describe 'ci related paths' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:path_field, :method_for_existence_check) do
+ :terraform_reports_path | :has_terraform_reports?
+ :accessibility_report_path | :has_accessibility_reports?
+ :exposed_artifacts_path | :has_exposed_artifacts?
+ :test_reports_path | :has_test_reports?
+ :codequality_reports_path | :has_codequality_reports?
+ end
+
+ with_them do
+ context 'when merge request has reports' do
+ before do
+ allow(resource).to receive(method_for_existence_check).and_return(true)
+ end
+
+ it 'set the path to poll data' do
+ expect(subject[path_field]).to be_present
+ end
+ end
+
+ context 'when merge request has no reports' do
+ before do
+ allow(resource).to receive(method_for_existence_check).and_return(false)
+ end
+
+ it 'does not set reports path' do
+ expect(subject[path_field]).to be_nil
+ end
+ end
+ end
+ end
end
diff --git a/spec/serializers/merge_request_poll_widget_entity_spec.rb b/spec/serializers/merge_request_poll_widget_entity_spec.rb
index 1e5a8915da0..c88555226a9 100644
--- a/spec/serializers/merge_request_poll_widget_entity_spec.rb
+++ b/spec/serializers/merge_request_poll_widget_entity_spec.rb
@@ -6,14 +6,15 @@ RSpec.describe MergeRequestPollWidgetEntity do
include ProjectForksHelper
using RSpec::Parameterized::TableSyntax
- let(:project) { create :project, :repository }
- let(:resource) { create(:merge_request, source_project: project, target_project: project) }
- let(:user) { create(:user) }
+ let_it_be(:project) { create :project, :repository }
+ let_it_be(:resource) { create(:merge_request, source_project: project, target_project: project) }
+ let_it_be(:user) { create(:user) }
let(:request) { double('request', current_user: user, project: project) }
+ let(:options) { {} }
subject do
- described_class.new(resource, request: request).as_json
+ described_class.new(resource, { request: request }.merge(options)).as_json
end
it 'has default_merge_commit_message_with_description' do
@@ -22,20 +23,33 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
describe 'merge_pipeline' do
+ before do
+ stub_feature_flags(merge_request_cached_merge_pipeline_serializer: false)
+ end
+
it 'returns nil' do
expect(subject[:merge_pipeline]).to be_nil
end
context 'when is merged' do
- let(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
- let(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
+ let_it_be(:resource) { create(:merged_merge_request, source_project: project, merge_commit_sha: project.commit.id) }
+ let_it_be(:pipeline) { create(:ci_empty_pipeline, project: project, ref: resource.target_branch, sha: resource.merge_commit_sha) }
before do
project.add_maintainer(user)
end
+ context 'when user cannot read pipelines on target project' do
+ before do
+ project.team.truncate
+ end
+
+ it 'returns nil' do
+ expect(subject[:merge_pipeline]).to be_nil
+ end
+ end
+
it 'returns merge_pipeline' do
- pipeline.reload
pipeline_payload =
MergeRequests::PipelineEntity
.represent(pipeline, request: request)
@@ -44,9 +58,9 @@ RSpec.describe MergeRequestPollWidgetEntity do
expect(subject[:merge_pipeline]).to eq(pipeline_payload)
end
- context 'when user cannot read pipelines on target project' do
+ context 'when merge_request_cached_merge_pipeline_serializer is enabled' do
before do
- project.add_guest(user)
+ stub_feature_flags(merge_request_cached_merge_pipeline_serializer: true)
end
it 'returns nil' do
@@ -73,72 +87,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
end
end
- describe 'terraform_reports_path' do
- context 'when merge request has terraform reports' do
- before do
- allow(resource).to receive(:has_terraform_reports?).and_return(true)
- end
-
- it 'set the path to poll data' do
- expect(subject[:terraform_reports_path]).to be_present
- end
- end
-
- context 'when merge request has no terraform reports' do
- before do
- allow(resource).to receive(:has_terraform_reports?).and_return(false)
- end
-
- it 'set the path to poll data' do
- expect(subject[:terraform_reports_path]).to be_nil
- end
- end
- end
-
- describe 'accessibility_report_path' do
- context 'when merge request has accessibility reports' do
- before do
- allow(resource).to receive(:has_accessibility_reports?).and_return(true)
- end
-
- it 'set the path to poll data' do
- expect(subject[:accessibility_report_path]).to be_present
- end
- end
-
- context 'when merge request has no accessibility reports' do
- before do
- allow(resource).to receive(:has_accessibility_reports?).and_return(false)
- end
-
- it 'set the path to poll data' do
- expect(subject[:accessibility_report_path]).to be_nil
- end
- end
- end
-
- describe 'exposed_artifacts_path' do
- context 'when merge request has exposed artifacts' do
- before do
- expect(resource).to receive(:has_exposed_artifacts?).and_return(true)
- end
-
- it 'set the path to poll data' do
- expect(subject[:exposed_artifacts_path]).to be_present
- end
- end
-
- context 'when merge request has no exposed artifacts' do
- before do
- expect(resource).to receive(:has_exposed_artifacts?).and_return(false)
- end
-
- it 'set the path to poll data' do
- expect(subject[:exposed_artifacts_path]).to be_nil
- end
- end
- end
-
describe 'auto merge' do
before do
project.add_maintainer(user)
@@ -226,19 +174,6 @@ RSpec.describe MergeRequestPollWidgetEntity do
expect(subject[:pipeline]).to be_nil
end
- context 'when merge_request_cached_pipeline_serializer is disabled' do
- it 'returns detailed info about pipeline' do
- stub_feature_flags(merge_request_cached_pipeline_serializer: false)
-
- pipeline_payload =
- MergeRequests::PipelineEntity
- .represent(pipeline, request: req)
- .as_json
-
- expect(subject[:pipeline]).to eq(pipeline_payload)
- end
- end
-
it 'returns ci_status' do
expect(subject[:ci_status]).to eq('pending')
end
@@ -278,4 +213,39 @@ RSpec.describe MergeRequestPollWidgetEntity do
])
end
end
+
+ describe '#mergeable' do
+ it 'shows whether a merge request is mergeable' do
+ expect(subject[:mergeable]).to eq(true)
+ end
+
+ context 'when merge request is in checking state' do
+ before do
+ resource.mark_as_unchecked!
+ resource.mark_as_checking!
+ end
+
+ it 'calculates mergeability and returns true' do
+ expect(subject[:mergeable]).to eq(true)
+ end
+
+ context 'when async_mergeability_check is passed' do
+ let(:options) { { async_mergeability_check: true } }
+
+ it 'returns false' do
+ expect(subject[:mergeable]).to eq(false)
+ end
+
+ context 'when check_mergeability_async_in_widget is disabled' do
+ before do
+ stub_feature_flags(check_mergeability_async_in_widget: false)
+ end
+
+ it 'calculates mergeability and returns true' do
+ expect(subject[:mergeable]).to eq(true)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/serializers/merge_request_user_entity_spec.rb b/spec/serializers/merge_request_user_entity_spec.rb
index 697fa3001e3..026a229322e 100644
--- a/spec/serializers/merge_request_user_entity_spec.rb
+++ b/spec/serializers/merge_request_user_entity_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe MergeRequestUserEntity do
let_it_be(:user) { create(:user) }
let_it_be(:merge_request) { create(:merge_request) }
+
let(:request) { EntityRequest.new(project: merge_request.target_project, current_user: user) }
let(:entity) do
diff --git a/spec/serializers/merge_requests/pipeline_entity_spec.rb b/spec/serializers/merge_requests/pipeline_entity_spec.rb
index b91d5e7679f..03a049401c1 100644
--- a/spec/serializers/merge_requests/pipeline_entity_spec.rb
+++ b/spec/serializers/merge_requests/pipeline_entity_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe MergeRequests::PipelineEntity do
)
expect(subject[:commit]).to include(:short_id, :commit_path)
expect(subject[:ref]).to include(:branch)
- expect(subject[:details]).to include(:artifacts, :name, :status, :stages)
+ expect(subject[:details]).to include(:artifacts, :name, :status, :stages, :finished_at)
expect(subject[:details][:status]).to include(:icon, :favicon, :text, :label, :tooltip)
expect(subject[:flags]).to include(:merge_request_pipeline)
end
diff --git a/spec/serializers/namespace_basic_entity_spec.rb b/spec/serializers/namespace_basic_entity_spec.rb
index 8b69af5696a..379a49e364e 100644
--- a/spec/serializers/namespace_basic_entity_spec.rb
+++ b/spec/serializers/namespace_basic_entity_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe NamespaceBasicEntity do
let_it_be(:group) { create(:group) }
+
let(:entity) do
described_class.represent(group)
end
diff --git a/spec/serializers/pipeline_details_entity_spec.rb b/spec/serializers/pipeline_details_entity_spec.rb
index 2f54f45866b..5756656d146 100644
--- a/spec/serializers/pipeline_details_entity_spec.rb
+++ b/spec/serializers/pipeline_details_entity_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe PipelineDetailsEntity do
let_it_be(:user) { create(:user) }
+
let(:request) { double('request') }
let(:entity) do
diff --git a/spec/serializers/pipeline_serializer_spec.rb b/spec/serializers/pipeline_serializer_spec.rb
index bcaaa61eb04..6028da301f3 100644
--- a/spec/serializers/pipeline_serializer_spec.rb
+++ b/spec/serializers/pipeline_serializer_spec.rb
@@ -202,13 +202,32 @@ RSpec.describe PipelineSerializer do
# Existing numbers are high and require performance optimization
# Ongoing issue:
# https://gitlab.com/gitlab-org/gitlab/-/issues/225156
- expected_queries = Gitlab.ee? ? 85 : 76
+ expected_queries = Gitlab.ee? ? 82 : 76
expect(recorded.count).to be_within(2).of(expected_queries)
expect(recorded.cached_count).to eq(0)
end
end
+ context 'with build environments' do
+ let(:ref) { 'feature' }
+
+ it 'verifies number of queries', :request_store do
+ stub_licensed_features(protected_environments: true)
+
+ env = create(:environment, project: project)
+ create(:ci_build, :scheduled, project: project, environment: env.name)
+ create(:ci_build, :scheduled, project: project, environment: env.name)
+ create(:ci_build, :scheduled, project: project, environment: env.name)
+
+ recorded = ActiveRecord::QueryRecorder.new { subject }
+ expected_queries = Gitlab.ee? ? 61 : 57
+
+ expect(recorded.count).to be_within(1).of(expected_queries)
+ expect(recorded.cached_count).to eq(0)
+ end
+ end
+
context 'with scheduled and manual builds' do
let(:ref) { 'feature' }
diff --git a/spec/serializers/project_import_entity_spec.rb b/spec/serializers/project_import_entity_spec.rb
index 1481d4122ae..94af9f1cbd8 100644
--- a/spec/serializers/project_import_entity_spec.rb
+++ b/spec/serializers/project_import_entity_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe ProjectImportEntity do
include ImportHelper
let_it_be(:project) { create(:project, import_status: :started, import_source: 'namespace/project') }
+
let(:provider_url) { 'https://provider.com' }
let(:entity) { described_class.represent(project, provider_url: provider_url) }
diff --git a/spec/serializers/project_serializer_spec.rb b/spec/serializers/project_serializer_spec.rb
index 4bf0657129f..317a3714f0c 100644
--- a/spec/serializers/project_serializer_spec.rb
+++ b/spec/serializers/project_serializer_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ProjectSerializer do
let_it_be(:project) { create(:project) }
+
let(:provider_url) { 'http://provider.com' }
context 'when serializer option is :import' do
diff --git a/spec/serializers/review_app_setup_entity_spec.rb b/spec/serializers/review_app_setup_entity_spec.rb
index 0893d7ee47f..9b068a2e9dd 100644
--- a/spec/serializers/review_app_setup_entity_spec.rb
+++ b/spec/serializers/review_app_setup_entity_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe ReviewAppSetupEntity do
let_it_be(:user) { create(:admin) }
+
let(:project) { create(:project) }
let(:presenter) { ProjectPresenter.new(project, current_user: user) }
let(:entity) { described_class.new(presenter) }
diff --git a/spec/serializers/runner_entity_spec.rb b/spec/serializers/runner_entity_spec.rb
index e864b52c0f2..39cac65c5ac 100644
--- a/spec/serializers/runner_entity_spec.rb
+++ b/spec/serializers/runner_entity_spec.rb
@@ -20,6 +20,7 @@ RSpec.describe RunnerEntity do
it 'contains required fields' do
expect(subject).to include(:id, :description)
expect(subject).to include(:edit_path)
+ expect(subject).to include(:short_sha)
end
end
end
diff --git a/spec/serializers/service_event_entity_spec.rb b/spec/serializers/service_event_entity_spec.rb
index 09bb8bca43b..64baa57fd6d 100644
--- a/spec/serializers/service_event_entity_spec.rb
+++ b/spec/serializers/service_event_entity_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe ServiceEventEntity do
let(:event) { 'push' }
it 'exposes correct attributes' do
- expect(subject[:description]).to eq('Event will be triggered by a push to the repository')
+ expect(subject[:description]).to eq('Trigger event for pushes to the repository.')
expect(subject[:name]).to eq('push_events')
expect(subject[:title]).to eq('push')
expect(subject[:value]).to be(true)
@@ -29,7 +29,7 @@ RSpec.describe ServiceEventEntity do
let(:event) { 'note' }
it 'exposes correct attributes' do
- expect(subject[:description]).to eq('Event will be triggered when someone adds a comment')
+ expect(subject[:description]).to eq('Trigger event for new comments.')
expect(subject[:name]).to eq('note_events')
expect(subject[:title]).to eq('note')
expect(subject[:value]).to eq(false)
diff --git a/spec/serializers/service_field_entity_spec.rb b/spec/serializers/service_field_entity_spec.rb
index f10639dfa1b..007042e1087 100644
--- a/spec/serializers/service_field_entity_spec.rb
+++ b/spec/serializers/service_field_entity_spec.rb
@@ -23,10 +23,10 @@ RSpec.describe ServiceFieldEntity do
type: 'text',
name: 'username',
title: 'Username or Email',
- placeholder: 'Use a username for server version and an email for cloud version',
+ placeholder: nil,
+ help: 'Use a username for server version and an email for cloud version.',
required: true,
choices: nil,
- help: nil,
value: 'jira_username'
}
@@ -41,11 +41,11 @@ RSpec.describe ServiceFieldEntity do
expected_hash = {
type: 'password',
name: 'password',
- title: 'Password or API token',
- placeholder: 'Use a password for server version and an API token for cloud version',
+ title: 'Enter new password or API token',
+ placeholder: nil,
+ help: 'Leave blank to use your current password or API token.',
required: true,
choices: nil,
- help: nil,
value: 'true'
}
@@ -72,7 +72,7 @@ RSpec.describe ServiceFieldEntity do
}
is_expected.to include(expected_hash)
- expect(subject[:help]).to include("Send notifications from the committer's email address if the domain is part of the domain GitLab is running on")
+ expect(subject[:help]).to include("Send notifications from the committer's email address if the domain matches the domain used by your GitLab instance")
end
end
diff --git a/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
new file mode 100644
index 00000000000..8a53d9fbf7c
--- /dev/null
+++ b/spec/services/authorized_project_update/find_records_due_for_refresh_service_spec.rb
@@ -0,0 +1,295 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe AuthorizedProjectUpdate::FindRecordsDueForRefreshService do
+ # We're using let! here so that any expectations for the service class are not
+ # triggered twice.
+ let!(:project) { create(:project) }
+
+ let(:user) { project.namespace.owner }
+ let(:service) { described_class.new(user) }
+
+ describe '#execute' do
+ context 'callbacks' do
+ let(:callback) { double('callback') }
+
+ context 'incorrect_auth_found_callback callback' do
+ let(:user) { create(:user) }
+ let(:service) do
+ described_class.new(user,
+ incorrect_auth_found_callback: callback)
+ end
+
+ it 'is called' do
+ access_level = Gitlab::Access::DEVELOPER
+ create(:project_authorization, user: user, project: project, access_level: access_level)
+
+ expect(callback).to receive(:call).with(project.id, access_level).once
+
+ service.execute
+ end
+ end
+
+ context 'missing_auth_found_callback callback' do
+ let(:service) do
+ described_class.new(user,
+ missing_auth_found_callback: callback)
+ end
+
+ it 'is called' do
+ ProjectAuthorization.delete_all
+
+ expect(callback).to receive(:call).with(project.id, Gitlab::Access::MAINTAINER).once
+
+ service.execute
+ end
+ end
+ end
+
+ context 'finding project authorizations due for refresh' do
+ context 'when there are changes to be made' do
+ before do
+ user.project_authorizations.delete_all
+ end
+
+ it 'finds projects authorizations that needs to be refreshed' do
+ project2 = create(:project)
+ user.project_authorizations
+ .create!(project: project2, access_level: Gitlab::Access::MAINTAINER)
+
+ to_be_removed = [project2.id]
+ to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]]
+
+ expect(service.execute).to eq([to_be_removed, to_be_added])
+ end
+
+ it 'finds duplicate entries that has to be removed' do
+ [Gitlab::Access::MAINTAINER, Gitlab::Access::REPORTER].each do |access_level|
+ user.project_authorizations.create!(project: project, access_level: access_level)
+ end
+
+ to_be_removed = [project.id]
+ to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]]
+
+ expect(service.execute).to eq([to_be_removed, to_be_added])
+ end
+
+ it 'finds entries with wrong access levels' do
+ user.project_authorizations
+ .create!(project: project, access_level: Gitlab::Access::DEVELOPER)
+
+ to_be_removed = [project.id]
+ to_be_added = [[user.id, project.id, Gitlab::Access::MAINTAINER]]
+
+ expect(service.execute).to eq([to_be_removed, to_be_added])
+ end
+ end
+
+ context 'when there are no changes to be made' do
+ it 'returns empty arrays' do
+ expect(service.execute).to eq([[], []])
+ end
+ end
+ end
+ end
+
+ describe '#needs_refresh?' do
+ subject { service.needs_refresh? }
+
+ context 'when there are records due for either removal or addition' do
+ context 'when there are both removals and additions to be made' do
+ before do
+ user.project_authorizations.delete_all
+ create(:project_authorization, user: user)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when there are no removals, but there are additions to be made' do
+ before do
+ user.project_authorizations.delete_all
+ end
+
+ it { is_expected.to eq(true) }
+ end
+
+ context 'when there are no additions, but there are removals to be made' do
+ before do
+ create(:project_authorization, user: user)
+ end
+
+ it { is_expected.to eq(true) }
+ end
+ end
+
+ context 'when there are no additions or removals to be made' do
+ it { is_expected.to eq(false) }
+ end
+ end
+
+ describe '#fresh_access_levels_per_project' do
+ let(:hash) { service.fresh_access_levels_per_project }
+
+ it 'returns a Hash' do
+ expect(hash).to be_an_instance_of(Hash)
+ end
+
+ it 'sets the keys to the project IDs' do
+ expect(hash.keys).to eq([project.id])
+ end
+
+ it 'sets the values to the access levels' do
+ expect(hash.values).to eq([Gitlab::Access::MAINTAINER])
+ end
+
+ context 'personal projects' do
+ it 'includes the project with the right access level' do
+ expect(hash[project.id]).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
+ context 'projects the user is a member of' do
+ let!(:other_project) { create(:project) }
+
+ before do
+ other_project.team.add_reporter(user)
+ end
+
+ it 'includes the project with the right access level' do
+ expect(hash[other_project.id]).to eq(Gitlab::Access::REPORTER)
+ end
+ end
+
+ context 'projects of groups the user is a member of' do
+ let(:group) { create(:group) }
+ let!(:other_project) { create(:project, group: group) }
+
+ before do
+ group.add_owner(user)
+ end
+
+ it 'includes the project with the right access level' do
+ expect(hash[other_project.id]).to eq(Gitlab::Access::OWNER)
+ end
+ end
+
+ context 'projects of subgroups of groups the user is a member of' do
+ let(:group) { create(:group) }
+ let(:nested_group) { create(:group, parent: group) }
+ let!(:other_project) { create(:project, group: nested_group) }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'includes the project with the right access level' do
+ expect(hash[other_project.id]).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
+ context 'projects shared with groups the user is a member of' do
+ let(:group) { create(:group) }
+ let(:other_project) { create(:project) }
+ let!(:project_group_link) { create(:project_group_link, project: other_project, group: group, group_access: Gitlab::Access::GUEST) }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'includes the project with the right access level' do
+ expect(hash[other_project.id]).to eq(Gitlab::Access::GUEST)
+ end
+ end
+
+ context 'projects shared with subgroups of groups the user is a member of' do
+ let(:group) { create(:group) }
+ let(:nested_group) { create(:group, parent: group) }
+ let(:other_project) { create(:project) }
+ let!(:project_group_link) { create(:project_group_link, project: other_project, group: nested_group, group_access: Gitlab::Access::DEVELOPER) }
+
+ before do
+ group.add_maintainer(user)
+ end
+
+ it 'includes the project with the right access level' do
+ expect(hash[other_project.id]).to eq(Gitlab::Access::DEVELOPER)
+ end
+ end
+ end
+
+ describe '#current_authorizations_per_project' do
+ let(:hash) { service.current_authorizations_per_project }
+
+ it 'returns a Hash' do
+ expect(hash).to be_an_instance_of(Hash)
+ end
+
+ it 'sets the keys to the project IDs' do
+ expect(hash.keys).to eq([project.id])
+ end
+
+ it 'sets the values to the project authorization rows' do
+ expect(hash.values.length).to eq(1)
+
+ value = hash.values[0]
+
+ expect(value.project_id).to eq(project.id)
+ expect(value.access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+
+ describe '#current_authorizations' do
+ context 'without authorizations' do
+ it 'returns an empty list' do
+ user.project_authorizations.delete_all
+
+ expect(service.current_authorizations.empty?).to eq(true)
+ end
+ end
+
+ context 'with an authorization' do
+ let(:row) { service.current_authorizations.take }
+
+ it 'returns the currently authorized projects' do
+ expect(service.current_authorizations.length).to eq(1)
+ end
+
+ it 'includes the project ID for every row' do
+ expect(row.project_id).to eq(project.id)
+ end
+
+ it 'includes the access level for every row' do
+ expect(row.access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+ end
+
+ describe '#fresh_authorizations' do
+ it 'returns the new authorized projects' do
+ expect(service.fresh_authorizations.length).to eq(1)
+ end
+
+ it 'returns the highest access level' do
+ project.team.add_guest(user)
+
+ rows = service.fresh_authorizations.to_a
+
+ expect(rows.length).to eq(1)
+ expect(rows.first.access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+
+ context 'every returned row' do
+ let(:row) { service.fresh_authorizations.take }
+
+ it 'includes the project ID' do
+ expect(row.project_id).to eq(project.id)
+ end
+
+ it 'includes the access level' do
+ expect(row.access_level).to eq(Gitlab::Access::MAINTAINER)
+ end
+ end
+ end
+end
diff --git a/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb b/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
index 0c944cad40c..95e2c0380bf 100644
--- a/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
+++ b/spec/services/authorized_project_update/recalculate_for_user_range_service_spec.rb
@@ -7,12 +7,14 @@ RSpec.describe AuthorizedProjectUpdate::RecalculateForUserRangeService do
let_it_be(:users) { create_list(:user, 2) }
it 'calls Users::RefreshAuthorizedProjectsService' do
- users.each do |user|
+ user_ids = users.map(&:id)
+
+ User.where(id: user_ids).select(:id).each do |user|
expect(Users::RefreshAuthorizedProjectsService).to(
receive(:new).with(user, source: described_class.name).and_call_original)
end
- range = users.map(&:id).minmax
+ range = user_ids.minmax
described_class.new(*range).execute
end
end
diff --git a/spec/services/award_emojis/add_service_spec.rb b/spec/services/award_emojis/add_service_spec.rb
index 85c39015614..0fbb785e2d6 100644
--- a/spec/services/award_emojis/add_service_spec.rb
+++ b/spec/services/award_emojis/add_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe AwardEmojis::AddService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:awardable) { create(:note, project: project) }
+
let(:name) { 'thumbsup' }
subject(:service) { described_class.new(awardable, name, user) }
diff --git a/spec/services/award_emojis/destroy_service_spec.rb b/spec/services/award_emojis/destroy_service_spec.rb
index 2aba078b638..f743de7c59e 100644
--- a/spec/services/award_emojis/destroy_service_spec.rb
+++ b/spec/services/award_emojis/destroy_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe AwardEmojis::DestroyService do
let_it_be(:user) { create(:user) }
let_it_be(:awardable) { create(:note) }
let_it_be(:project) { awardable.project }
+
let(:name) { 'thumbsup' }
let!(:award_from_other_user) do
create(:award_emoji, name: name, awardable: awardable, user: create(:user))
diff --git a/spec/services/award_emojis/toggle_service_spec.rb b/spec/services/award_emojis/toggle_service_spec.rb
index a7feeed50c6..74e97c66193 100644
--- a/spec/services/award_emojis/toggle_service_spec.rb
+++ b/spec/services/award_emojis/toggle_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe AwardEmojis::ToggleService do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
let_it_be(:awardable) { create(:note, project: project) }
+
let(:name) { 'thumbsup' }
subject(:service) { described_class.new(awardable, name, user) }
diff --git a/spec/services/boards/destroy_service_spec.rb b/spec/services/boards/destroy_service_spec.rb
new file mode 100644
index 00000000000..cd6df832547
--- /dev/null
+++ b/spec/services/boards/destroy_service_spec.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Boards::DestroyService do
+ context 'with project board' do
+ let_it_be(:parent) { create(:project) }
+
+ let(:boards) { parent.boards }
+ let(:board_factory) { :board }
+
+ it_behaves_like 'board destroy service'
+ end
+
+ context 'with group board' do
+ let_it_be(:parent) { create(:group) }
+
+ let(:boards) { parent.boards }
+ let(:board_factory) { :board }
+
+ it_behaves_like 'board destroy service'
+ end
+end
diff --git a/spec/services/boards/issues/move_service_spec.rb b/spec/services/boards/issues/move_service_spec.rb
index 01a3ec72987..3a25f13762c 100644
--- a/spec/services/boards/issues/move_service_spec.rb
+++ b/spec/services/boards/issues/move_service_spec.rb
@@ -62,6 +62,7 @@ RSpec.describe Boards::Issues::MoveService do
let_it_be(:testing) { create(:group_label, group: group, name: 'Testing') }
let_it_be(:list1) { create(:list, board: board1, label: development, position: 0) }
let_it_be(:list2) { create(:list, board: board1, label: testing, position: 1) }
+
let(:params) { { board_id: board1.id, from_list_id: list1.id, to_list_id: list2.id } }
before do
diff --git a/spec/services/boards/lists/list_service_spec.rb b/spec/services/boards/lists/list_service_spec.rb
index 21619abf6aa..0c8a8dc7329 100644
--- a/spec/services/boards/lists/list_service_spec.rb
+++ b/spec/services/boards/lists/list_service_spec.rb
@@ -8,46 +8,32 @@ RSpec.describe Boards::Lists::ListService do
describe '#execute' do
let(:service) { described_class.new(parent, user) }
- shared_examples 'hidden lists' do
- let!(:list) { create(:list, board: board, label: label) }
-
- context 'when hide_backlog_list is true' do
- it 'hides backlog list' do
- board.update!(hide_backlog_list: true)
-
- expect(service.execute(board)).to match_array([board.closed_list, list])
- end
- end
-
- context 'when hide_closed_list is true' do
- it 'hides closed list' do
- board.update!(hide_closed_list: true)
-
- expect(service.execute(board)).to match_array([board.backlog_list, list])
- end
- end
- end
-
context 'when board parent is a project' do
- let(:project) { create(:project) }
- let(:board) { create(:board, project: project) }
- let(:label) { create(:label, project: project) }
- let!(:list) { create(:list, board: board, label: label) }
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:board) { create(:board, project: project) }
+ let_it_be(:label) { create(:label, project: project) }
+ let_it_be(:list) { create(:list, board: board, label: label) }
+ let_it_be(:unrelated_list) { create(:list) }
+
let(:parent) { project }
it_behaves_like 'lists list service'
- it_behaves_like 'hidden lists'
end
context 'when board parent is a group' do
- let(:group) { create(:group) }
- let(:board) { create(:board, group: group) }
- let(:label) { create(:group_label, group: group) }
- let!(:list) { create(:list, board: board, label: label) }
+ let_it_be(:group) { create(:group) }
+ let_it_be_with_reload(:board) { create(:board, group: group) }
+ let_it_be(:label) { create(:group_label, group: group) }
+ let_it_be(:list) { create(:list, board: board, label: label) }
+ let_it_be(:unrelated_list) { create(:list) }
+
let(:parent) { group }
it_behaves_like 'lists list service'
- it_behaves_like 'hidden lists'
+ end
+
+ def create_backlog_list(board)
+ create(:backlog_list, board: board)
end
end
end
diff --git a/spec/services/boards/lists/update_service_spec.rb b/spec/services/boards/lists/update_service_spec.rb
index cdc7784469a..10fed9b7aac 100644
--- a/spec/services/boards/lists/update_service_spec.rb
+++ b/spec/services/boards/lists/update_service_spec.rb
@@ -6,47 +6,6 @@ RSpec.describe Boards::Lists::UpdateService do
let(:user) { create(:user) }
let!(:list) { create(:list, board: board, position: 0) }
- shared_examples 'moving list' do
- context 'when user can admin list' do
- it 'calls Lists::MoveService to update list position' do
- board.resource_parent.add_developer(user)
-
- expect(Boards::Lists::MoveService).to receive(:new).with(board.resource_parent, user, params).and_call_original
- expect_any_instance_of(Boards::Lists::MoveService).to receive(:execute).with(list)
-
- service.execute(list)
- end
- end
-
- context 'when user cannot admin list' do
- it 'does not call Lists::MoveService to update list position' do
- expect(Boards::Lists::MoveService).not_to receive(:new)
-
- service.execute(list)
- end
- end
- end
-
- shared_examples 'updating list preferences' do
- context 'when user can read list' do
- it 'updates list preference for user' do
- board.resource_parent.add_guest(user)
-
- service.execute(list)
-
- expect(list.preferences_for(user).collapsed).to eq(true)
- end
- end
-
- context 'when user cannot read list' do
- it 'does not update list preference for user' do
- service.execute(list)
-
- expect(list.preferences_for(user).collapsed).to be_nil
- end
- end
- end
-
describe '#execute' do
let(:service) { described_class.new(board.resource_parent, user, params) }
diff --git a/spec/services/bulk_create_integration_service_spec.rb b/spec/services/bulk_create_integration_service_spec.rb
index 3ac993972c6..479309572a5 100644
--- a/spec/services/bulk_create_integration_service_spec.rb
+++ b/spec/services/bulk_create_integration_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe BulkCreateIntegrationService do
described_class.new(integration, batch, association).execute
expect(created_integration.reload.data_fields.attributes.except(*excluded_attributes))
- .to eq(integration.data_fields.attributes.except(*excluded_attributes))
+ .to eq(integration.reload.data_fields.attributes.except(*excluded_attributes))
end
end
end
diff --git a/spec/services/bulk_update_integration_service_spec.rb b/spec/services/bulk_update_integration_service_spec.rb
index e7944f07bb7..e20bcd44923 100644
--- a/spec/services/bulk_update_integration_service_spec.rb
+++ b/spec/services/bulk_update_integration_service_spec.rb
@@ -68,8 +68,8 @@ RSpec.describe BulkUpdateIntegrationService do
it 'updates the data fields from the integration', :aggregate_failures do
described_class.new(subgroup_integration, batch).execute
- expect(integration.data_fields.attributes.except(*excluded_attributes))
- .to eq(subgroup_integration.data_fields.attributes.except(*excluded_attributes))
+ expect(integration.reload.data_fields.attributes.except(*excluded_attributes))
+ .to eq(subgroup_integration.reload.data_fields.attributes.except(*excluded_attributes))
expect(integration.data_fields.attributes.except(*excluded_attributes))
.not_to eq(excluded_integration.data_fields.attributes.except(*excluded_attributes))
diff --git a/spec/services/ci/abort_pipelines_service_spec.rb b/spec/services/ci/abort_pipelines_service_spec.rb
new file mode 100644
index 00000000000..e31a45cb123
--- /dev/null
+++ b/spec/services/ci/abort_pipelines_service_spec.rb
@@ -0,0 +1,94 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::AbortPipelinesService do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ let_it_be(:cancelable_pipeline, reload: true) { create(:ci_pipeline, :running, project: project, user: user) }
+ let_it_be(:manual_pipeline, reload: true) { create(:ci_pipeline, status: :manual, project: project, user: user) } # not cancelable
+ let_it_be(:other_users_pipeline, reload: true) { create(:ci_pipeline, :running, project: project, user: create(:user)) } # not this user's pipeline
+ let_it_be(:cancelable_build, reload: true) { create(:ci_build, :running, pipeline: cancelable_pipeline) }
+ let_it_be(:non_cancelable_build, reload: true) { create(:ci_build, :success, pipeline: cancelable_pipeline) }
+ let_it_be(:cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageA', status: :running, pipeline: cancelable_pipeline, project: project) }
+ let_it_be(:non_cancelable_stage, reload: true) { create(:ci_stage_entity, name: 'stageB', status: :success, pipeline: cancelable_pipeline, project: project) }
+
+ describe '#execute' do
+ def expect_correct_cancellations
+ expect(cancelable_pipeline.finished_at).not_to be_nil
+ expect(cancelable_pipeline.status).to eq('failed')
+ expect((cancelable_pipeline.stages - [non_cancelable_stage]).map(&:status)).to all(eq('failed'))
+ expect(cancelable_build.status).to eq('failed')
+ expect(cancelable_build.finished_at).not_to be_nil
+
+ expect(manual_pipeline.status).not_to eq('failed')
+ expect(non_cancelable_stage.status).not_to eq('failed')
+ expect(non_cancelable_build.status).not_to eq('failed')
+ end
+
+ context 'with project pipelines' do
+ def abort_project_pipelines
+ described_class.new.execute(project.all_pipelines, :project_deleted)
+ end
+
+ it 'fails all running pipelines and related jobs' do
+ expect(abort_project_pipelines).to be_success
+
+ expect_correct_cancellations
+
+ expect(other_users_pipeline.status).to eq('failed')
+ expect(other_users_pipeline.failure_reason).to eq('project_deleted')
+ expect(other_users_pipeline.stages.map(&:status)).to all(eq('failed'))
+ end
+
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new { abort_project_pipelines }.count
+
+ pipelines = create_list(:ci_pipeline, 5, :running, project: project)
+ create_list(:ci_build, 5, :running, pipeline: pipelines.first)
+
+ expect { abort_project_pipelines }.not_to exceed_query_limit(control_count)
+ end
+
+ context 'with live build logs' do
+ before do
+ create(:ci_build_trace_chunk, build: cancelable_build)
+ end
+
+ it 'makes failed builds with stale trace visible' do
+ expect(Ci::Build.with_stale_live_trace.count).to eq 0
+
+ travel_to(2.days.ago) do
+ abort_project_pipelines
+ end
+
+ expect(Ci::Build.with_stale_live_trace.count).to eq 1
+ end
+ end
+ end
+
+ context 'with user pipelines' do
+ def abort_user_pipelines
+ described_class.new.execute(user.pipelines, :user_blocked)
+ end
+
+ it 'fails all running pipelines and related jobs' do
+ expect(abort_user_pipelines).to be_success
+
+ expect_correct_cancellations
+
+ expect(other_users_pipeline.status).not_to eq('failed')
+ end
+
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new { abort_user_pipelines }.count
+
+ pipelines = create_list(:ci_pipeline, 5, :running, project: project, user: user)
+ create_list(:ci_build, 5, :running, pipeline: pipelines.first)
+
+ expect { abort_user_pipelines }.not_to exceed_query_limit(control_count)
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/abort_project_pipelines_service_spec.rb b/spec/services/ci/abort_project_pipelines_service_spec.rb
deleted file mode 100644
index 9af909ac2ab..00000000000
--- a/spec/services/ci/abort_project_pipelines_service_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::AbortProjectPipelinesService do
- let_it_be(:project) { create(:project) }
- let_it_be(:pipeline) { create(:ci_pipeline, :running, project: project) }
- let_it_be(:build) { create(:ci_build, :running, pipeline: pipeline) }
-
- describe '#execute' do
- it 'cancels all running pipelines and related jobs' do
- result = described_class.new.execute(project)
-
- expect(result).to be_success
- expect(pipeline.reload).to be_canceled
- expect(build.reload).to be_canceled
- end
-
- it 'avoids N+1 queries' do
- control_count = ActiveRecord::QueryRecorder.new { described_class.new.execute(project) }.count
-
- pipelines = create_list(:ci_pipeline, 5, :running, project: project)
- create_list(:ci_build, 5, :running, pipeline: pipelines.first)
-
- expect { described_class.new.execute(project) }.not_to exceed_query_limit(control_count)
- end
- end
-
- context 'when feature disabled' do
- before do
- stub_feature_flags(abort_deleted_project_pipelines: false)
- end
-
- it 'does not abort the pipeline' do
- result = described_class.new.execute(project)
-
- expect(result).to be(nil)
- expect(pipeline.reload).to be_running
- expect(build.reload).to be_running
- end
- end
-end
diff --git a/spec/services/ci/after_requeue_job_service_spec.rb b/spec/services/ci/after_requeue_job_service_spec.rb
new file mode 100644
index 00000000000..a2147759dba
--- /dev/null
+++ b/spec/services/ci/after_requeue_job_service_spec.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::AfterRequeueJobService do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { project.owner }
+
+ let(:pipeline) { create(:ci_pipeline, project: project) }
+
+ let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
+ let!(:test1) { create(:ci_build, :success, pipeline: pipeline, stage_idx: 1) }
+ let!(:test2) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: 1) }
+
+ subject(:execute_service) { described_class.new(project, user).execute(build) }
+
+ it 'marks subsequent skipped jobs as processable' do
+ expect(test1.reload).to be_success
+ expect(test2.reload).to be_skipped
+
+ execute_service
+
+ expect(test1.reload).to be_success
+ expect(test2.reload).to be_created
+ end
+
+ context 'when the pipeline is a downstream pipeline and the bridge is depended' do
+ let!(:trigger_job) { create(:ci_bridge, :strategy_depend, status: 'success') }
+
+ before do
+ create(:ci_sources_pipeline, pipeline: pipeline, source_job: trigger_job)
+ end
+
+ it 'marks source bridge as pending' do
+ expect { execute_service }.to change { trigger_job.reload.status }.from('success').to('pending')
+ end
+ end
+end
diff --git a/spec/services/ci/archive_trace_service_spec.rb b/spec/services/ci/archive_trace_service_spec.rb
index 07ea314debc..a4f498f17c3 100644
--- a/spec/services/ci/archive_trace_service_spec.rb
+++ b/spec/services/ci/archive_trace_service_spec.rb
@@ -24,6 +24,52 @@ RSpec.describe Ci::ArchiveTraceService, '#execute' do
it 'does not create an archived trace' do
expect { subject }.not_to change { Ci::JobArtifact.trace.count }
end
+
+ context 'when live trace chunks still exist' do
+ before do
+ create(:ci_build_trace_chunk, build: job)
+ end
+
+ context 'when the feature flag `erase_traces_from_already_archived_jobs_when_archiving_again` is enabled' do
+ before do
+ stub_feature_flags(erase_traces_from_already_archived_jobs_when_archiving_again: true)
+ end
+
+ it 'removes the trace chunks' do
+ expect { subject }.to change { job.trace_chunks.count }.to(0)
+ end
+
+ context 'when associated data does not exist' do
+ before do
+ job.job_artifacts_trace.file.remove!
+ end
+
+ it 'removes the trace artifact' do
+ expect { subject }.to change { job.reload.job_artifacts_trace }.to(nil)
+ end
+ end
+ end
+
+ context 'when the feature flag `erase_traces_from_already_archived_jobs_when_archiving_again` is disabled' do
+ before do
+ stub_feature_flags(erase_traces_from_already_archived_jobs_when_archiving_again: false)
+ end
+
+ it 'does not remove the trace chunks' do
+ expect { subject }.not_to change { job.trace_chunks.count }
+ end
+
+ context 'when associated data does not exist' do
+ before do
+ job.job_artifacts_trace.file.remove!
+ end
+
+ it 'does not remove the trace artifact' do
+ expect { subject }.not_to change { job.reload.job_artifacts_trace }
+ end
+ end
+ end
+ end
end
context 'when job does not have trace' do
diff --git a/spec/services/ci/cancel_user_pipelines_service_spec.rb b/spec/services/ci/cancel_user_pipelines_service_spec.rb
deleted file mode 100644
index 8491242dfd5..00000000000
--- a/spec/services/ci/cancel_user_pipelines_service_spec.rb
+++ /dev/null
@@ -1,35 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Ci::CancelUserPipelinesService do
- describe '#execute' do
- let(:user) { create(:user) }
-
- subject { described_class.new.execute(user) }
-
- context 'when user has running CI pipelines' do
- let(:pipeline) { create(:ci_pipeline, :running, user: user) }
- let!(:build) { create(:ci_build, :running, pipeline: pipeline) }
-
- it 'cancels all running pipelines and related jobs', :sidekiq_might_not_need_inline do
- subject
-
- expect(pipeline.reload).to be_canceled
- expect(build.reload).to be_canceled
- end
- end
-
- context 'when an error ocurrs' do
- it 'raises a service level error' do
- service = double(execute: ServiceResponse.error(message: 'Error canceling pipeline'))
- allow(::Ci::CancelUserPipelinesService).to receive(:new).and_return(service)
-
- result = subject
-
- expect(result).to be_a(ServiceResponse)
- expect(result).to be_error
- end
- end
- end
-end
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index 860932d4fde..dd10fb017aa 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -112,7 +112,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'updates bridge status when downstream pipeline gets processed' do
pipeline = service.execute(bridge)
- expect(pipeline.reload).to be_pending
+ expect(pipeline.reload).to be_created
expect(bridge.reload).to be_success
end
@@ -227,7 +227,7 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute' do
it 'updates bridge status when downstream pipeline gets processed' do
pipeline = service.execute(bridge)
- expect(pipeline.reload).to be_pending
+ expect(pipeline.reload).to be_created
expect(bridge.reload).to be_success
end
diff --git a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
index 9cf66dfceb0..d4e9946ac46 100644
--- a/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cross_project_pipeline_spec.rb
@@ -40,6 +40,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it 'creates bridge job with resource group' do
pipeline = create_pipeline!
+ Ci::InitialPipelineProcessWorker.new.perform(pipeline.id)
test = pipeline.statuses.find_by(name: 'instrumentation_test')
expect(pipeline).to be_created_successfully
diff --git a/spec/services/ci/create_pipeline_service/needs_spec.rb b/spec/services/ci/create_pipeline_service/needs_spec.rb
index a6b0a9662c9..4521067cd52 100644
--- a/spec/services/ci/create_pipeline_service/needs_spec.rb
+++ b/spec/services/ci/create_pipeline_service/needs_spec.rb
@@ -202,7 +202,7 @@ RSpec.describe Ci::CreatePipelineService do
YAML
end
- it 'creates a pipeline with build_a and test_b pending; deploy_b manual' do
+ it 'creates a pipeline with build_a and test_b pending; deploy_b manual', :sidekiq_inline do
processables = pipeline.processables
build_a = processables.find { |processable| processable.name == 'build_a' }
diff --git a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
index a3818937113..5ea75c2253b 100644
--- a/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
+++ b/spec/services/ci/create_pipeline_service/parent_child_pipeline_spec.rb
@@ -91,6 +91,7 @@ RSpec.describe Ci::CreatePipelineService, '#execute' do
it 'creates bridge job with resource group', :aggregate_failures do
pipeline = create_pipeline!
+ Ci::InitialPipelineProcessWorker.new.perform(pipeline.id)
test = pipeline.statuses.find_by(name: 'instrumentation_test')
expect(pipeline).to be_created_successfully
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index e97e74c1515..33ec6aacc44 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -151,11 +151,29 @@ RSpec.describe Ci::CreatePipelineService do
context 'variables:' do
let(:config) do
<<-EOY
- job:
+ variables:
+ VAR4: workflow var 4
+ VAR5: workflow var 5
+ VAR7: workflow var 7
+
+ workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ variables:
+ VAR4: overridden workflow var 4
+ - if: $CI_COMMIT_REF_NAME =~ /feature/
+ variables:
+ VAR5: overridden workflow var 5
+ VAR6: new workflow var 6
+ VAR7: overridden workflow var 7
+ - when: always
+
+ job1:
script: "echo job1"
variables:
- VAR1: my var 1
- VAR2: my var 2
+ VAR1: job var 1
+ VAR2: job var 2
+ VAR5: job var 5
rules:
- if: $CI_COMMIT_REF_NAME =~ /master/
variables:
@@ -164,45 +182,117 @@ RSpec.describe Ci::CreatePipelineService do
variables:
VAR2: overridden var 2
VAR3: new var 3
+ VAR7: overridden var 7
+ - when: on_success
+
+ job2:
+ script: "echo job2"
+ inherit:
+ variables: [VAR4, VAR6, VAR7]
+ variables:
+ VAR4: job var 4
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ variables:
+ VAR7: overridden var 7
- when: on_success
EOY
end
- let(:job) { pipeline.builds.find_by(name: 'job') }
+ let(:job1) { pipeline.builds.find_by(name: 'job1') }
+ let(:job2) { pipeline.builds.find_by(name: 'job2') }
+
+ let(:variable_keys) { %w(VAR1 VAR2 VAR3 VAR4 VAR5 VAR6 VAR7) }
+
+ context 'when no match' do
+ let(:ref) { 'refs/heads/wip' }
+
+ it 'does not affect vars' do
+ expect(job1.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ ['job var 1', 'job var 2', nil, 'workflow var 4', 'job var 5', nil, 'workflow var 7']
+ )
+
+ expect(job2.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ [nil, nil, nil, 'job var 4', nil, nil, 'workflow var 7']
+ )
+ end
+ end
context 'when matching to the first rule' do
let(:ref) { 'refs/heads/master' }
- it 'overrides VAR1' do
- variables = job.scoped_variables.to_hash
+ it 'overrides variables' do
+ expect(job1.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ ['overridden var 1', 'job var 2', nil, 'overridden workflow var 4', 'job var 5', nil, 'workflow var 7']
+ )
- expect(variables['VAR1']).to eq('overridden var 1')
- expect(variables['VAR2']).to eq('my var 2')
- expect(variables['VAR3']).to be_nil
+ expect(job2.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ [nil, nil, nil, 'job var 4', nil, nil, 'overridden var 7']
+ )
+ end
+
+ context 'when FF ci_workflow_rules_variables is disabled' do
+ before do
+ stub_feature_flags(ci_workflow_rules_variables: false)
+ end
+
+ it 'does not affect workflow variables but job variables' do
+ expect(job1.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ ['overridden var 1', 'job var 2', nil, 'workflow var 4', 'job var 5', nil, 'workflow var 7']
+ )
+
+ expect(job2.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ [nil, nil, nil, 'job var 4', nil, nil, 'overridden var 7']
+ )
+ end
end
end
context 'when matching to the second rule' do
let(:ref) { 'refs/heads/feature' }
- it 'overrides VAR2 and adds VAR3' do
- variables = job.scoped_variables.to_hash
+ it 'overrides variables' do
+ expect(job1.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ ['job var 1', 'overridden var 2', 'new var 3', 'workflow var 4', 'job var 5', 'new workflow var 6', 'overridden var 7']
+ )
- expect(variables['VAR1']).to eq('my var 1')
- expect(variables['VAR2']).to eq('overridden var 2')
- expect(variables['VAR3']).to eq('new var 3')
+ expect(job2.scoped_variables.to_hash.values_at(*variable_keys)).to eq(
+ [nil, nil, nil, 'job var 4', nil, 'new workflow var 6', 'overridden workflow var 7']
+ )
end
end
- context 'when no match' do
- let(:ref) { 'refs/heads/wip' }
+ context 'using calculated workflow var in job rules' do
+ let(:config) do
+ <<-EOY
+ variables:
+ VAR1: workflow var 4
+
+ workflow:
+ rules:
+ - if: $CI_COMMIT_REF_NAME =~ /master/
+ variables:
+ VAR1: overridden workflow var 4
+ - when: always
+
+ job:
+ script: "echo job1"
+ rules:
+ - if: $VAR1 =~ "overridden workflow var 4"
+ variables:
+ VAR1: overridden var 1
+ - when: on_success
+ EOY
+ end
- it 'does not affect vars' do
- variables = job.scoped_variables.to_hash
+ let(:job) { pipeline.builds.find_by(name: 'job') }
+
+ context 'when matching the first workflow condition' do
+ let(:ref) { 'refs/heads/master' }
- expect(variables['VAR1']).to eq('my var 1')
- expect(variables['VAR2']).to eq('my var 2')
- expect(variables['VAR3']).to be_nil
+ it 'uses VAR1 of job rules result' do
+ expect(job.scoped_variables.to_hash['VAR1']).to eq('overridden var 1')
+ end
end
end
end
@@ -230,8 +320,8 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'matching the first rule in the list' do
- it 'saves a pending pipeline' do
- expect(pipeline).to be_pending
+ it 'saves a created pipeline' do
+ expect(pipeline).to be_created
expect(pipeline).to be_persisted
end
end
@@ -239,8 +329,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'matching the last rule in the list' do
let(:ref) { 'refs/heads/feature' }
- it 'saves a pending pipeline' do
- expect(pipeline).to be_pending
+ it 'saves a created pipeline' do
+ expect(pipeline).to be_created
expect(pipeline).to be_persisted
end
end
@@ -280,8 +370,8 @@ RSpec.describe Ci::CreatePipelineService do
end
context 'matching the first rule in the list' do
- it 'saves a pending pipeline' do
- expect(pipeline).to be_pending
+ it 'saves a created pipeline' do
+ expect(pipeline).to be_created
expect(pipeline).to be_persisted
end
end
@@ -305,8 +395,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'with partial match' do
let(:ref) { 'refs/heads/feature' }
- it 'saves a pending pipeline' do
- expect(pipeline).to be_pending
+ it 'saves a created pipeline' do
+ expect(pipeline).to be_created
expect(pipeline).to be_persisted
end
end
@@ -349,8 +439,8 @@ RSpec.describe Ci::CreatePipelineService do
context 'where workflow passes and the job passes' do
let(:ref) { 'refs/heads/feature' }
- it 'saves a pending pipeline' do
- expect(pipeline).to be_pending
+ it 'saves a created pipeline' do
+ expect(pipeline).to be_created
expect(pipeline).to be_persisted
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 9fafc57a770..98c85234fe7 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline).to be_push
expect(pipeline).to eq(project.ci_pipelines.last)
expect(pipeline).to have_attributes(user: user)
- expect(pipeline).to have_attributes(status: 'pending')
+ expect(pipeline).to have_attributes(status: 'created')
expect(pipeline.iid).not_to be_nil
expect(pipeline.repository_source?).to be true
expect(pipeline.builds.first).to be_kind_of(Ci::Build)
@@ -71,19 +71,21 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'increments the prometheus counter' do
- expect(Gitlab::Metrics).to receive(:counter)
- .with(:pipelines_created_total, "Counter of pipelines created")
- .and_call_original
- allow(Gitlab::Metrics).to receive(:counter).and_call_original # allow other counters
+ counter = spy('pipeline created counter')
+
+ allow(Gitlab::Ci::Pipeline::Metrics)
+ .to receive(:pipelines_created_counter).and_return(counter)
pipeline
+
+ expect(counter).to have_received(:increment)
end
it 'records pipeline size in a prometheus histogram' do
histogram = spy('pipeline size histogram')
allow(Gitlab::Ci::Pipeline::Metrics)
- .to receive(:new).and_return(histogram)
+ .to receive(:pipeline_size_histogram).and_return(histogram)
execute_service
@@ -253,7 +255,7 @@ RSpec.describe Ci::CreatePipelineService do
pipeline
pipeline_on_previous_commit
- expect(pipeline.reload).to have_attributes(status: 'pending', auto_canceled_by_id: nil)
+ expect(pipeline.reload).to have_attributes(status: 'created', auto_canceled_by_id: nil)
end
it 'auto cancel pending non-HEAD pipelines', :sidekiq_might_not_need_inline do
@@ -263,8 +265,8 @@ RSpec.describe Ci::CreatePipelineService do
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: pipeline.id)
end
- it 'cancels running outdated pipelines', :sidekiq_might_not_need_inline do
- pipeline_on_previous_commit.run
+ it 'cancels running outdated pipelines', :sidekiq_inline do
+ pipeline_on_previous_commit.reload.run
head_pipeline = execute_service
expect(pipeline_on_previous_commit.reload).to have_attributes(status: 'canceled', auto_canceled_by_id: head_pipeline.id)
@@ -278,13 +280,13 @@ RSpec.describe Ci::CreatePipelineService do
end
it 'does not cancel pipelines from the other branches' do
- pending_pipeline = execute_service(
+ new_pipeline = execute_service(
ref: 'refs/heads/feature',
after: previous_commit_sha_from_ref('feature')
)
pipeline
- expect(pending_pipeline.reload).to have_attributes(status: 'pending', auto_canceled_by_id: nil)
+ expect(new_pipeline.reload).to have_attributes(status: 'created', auto_canceled_by_id: nil)
end
context 'when the interruptible attribute is' do
@@ -465,12 +467,12 @@ RSpec.describe Ci::CreatePipelineService do
project.update!(auto_cancel_pending_pipelines: 'disabled')
end
- it 'does not auto cancel pending non-HEAD pipelines' do
+ it 'does not auto cancel created non-HEAD pipelines' do
pipeline_on_previous_commit
pipeline
expect(pipeline_on_previous_commit.reload)
- .to have_attributes(status: 'pending', auto_canceled_by_id: nil)
+ .to have_attributes(status: 'created', auto_canceled_by_id: nil)
end
end
@@ -580,6 +582,13 @@ RSpec.describe Ci::CreatePipelineService do
it_behaves_like 'a failed pipeline'
+ it 'increments the error metric' do
+ stub_ci_pipeline_yaml_file(ci_yaml)
+
+ counter = Gitlab::Metrics.counter(:gitlab_ci_pipeline_failure_reasons, 'desc')
+ expect { execute_service }.to change { counter.get(reason: 'config_error') }.by(1)
+ end
+
context 'when receive git commit' do
before do
allow_any_instance_of(Ci::Pipeline).to receive(:git_commit_message) { message }
@@ -770,7 +779,7 @@ RSpec.describe Ci::CreatePipelineService do
stub_ci_pipeline_yaml_file(config)
end
- it 'does not create a new pipeline' do
+ it 'does not create a new pipeline', :sidekiq_inline do
result = execute_service
expect(result).to be_persisted
diff --git a/spec/services/ci/create_web_ide_terminal_service_spec.rb b/spec/services/ci/create_web_ide_terminal_service_spec.rb
index c1c94e30018..c1acf8fd60c 100644
--- a/spec/services/ci/create_web_ide_terminal_service_spec.rb
+++ b/spec/services/ci/create_web_ide_terminal_service_spec.rb
@@ -66,6 +66,25 @@ RSpec.describe Ci::CreateWebIdeTerminalService do
it_behaves_like 'be successful'
end
+
+ context 'for configuration with variables' do
+ let(:config_content) do
+ <<-EOS
+ terminal:
+ script: rspec
+ variables:
+ KEY1: VAL1
+ EOS
+ end
+
+ it_behaves_like 'be successful'
+
+ it 'saves the variables' do
+ expect(subject[:pipeline].builds[0].variables).to include(
+ key: 'KEY1', value: 'VAL1', public: true, masked: false
+ )
+ end
+ end
end
end
diff --git a/spec/services/ci/disable_user_pipeline_schedules_service_spec.rb b/spec/services/ci/disable_user_pipeline_schedules_service_spec.rb
new file mode 100644
index 00000000000..4ff8dcf075b
--- /dev/null
+++ b/spec/services/ci/disable_user_pipeline_schedules_service_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DisableUserPipelineSchedulesService do
+ describe '#execute' do
+ let(:user) { create(:user) }
+
+ subject(:service) { described_class.new.execute(user) }
+
+ context 'when user has active pipeline schedules' do
+ let(:owned_pipeline_schedule) { create(:ci_pipeline_schedule, active: true, owner: user) }
+
+ it 'disables all active pipeline schedules', :aggregate_failures do
+ expect { service }.to change { owned_pipeline_schedule.reload.active? }
+ end
+ end
+ end
+end
diff --git a/spec/services/ci/drop_pipeline_service_spec.rb b/spec/services/ci/drop_pipeline_service_spec.rb
new file mode 100644
index 00000000000..4adbb99b9e2
--- /dev/null
+++ b/spec/services/ci/drop_pipeline_service_spec.rb
@@ -0,0 +1,60 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DropPipelineService do
+ let_it_be(:user) { create(:user) }
+
+ let(:failure_reason) { :user_blocked }
+
+ let!(:cancelable_pipeline) { create(:ci_pipeline, :running, user: user) }
+ let!(:running_build) { create(:ci_build, :running, pipeline: cancelable_pipeline) }
+ let!(:success_pipeline) { create(:ci_pipeline, :success, user: user) }
+ let!(:success_build) { create(:ci_build, :success, pipeline: success_pipeline) }
+
+ describe '#execute_async_for_all' do
+ subject { described_class.new.execute_async_for_all(user.pipelines, failure_reason, user) }
+
+ it 'drops only cancelable pipelines asynchronously', :sidekiq_inline do
+ subject
+
+ expect(cancelable_pipeline.reload).to be_failed
+ expect(running_build.reload).to be_failed
+
+ expect(success_pipeline.reload).to be_success
+ expect(success_build.reload).to be_success
+ end
+ end
+
+ describe '#execute' do
+ subject { described_class.new.execute(cancelable_pipeline.id, failure_reason) }
+
+ def drop_pipeline!(pipeline)
+ described_class.new.execute(pipeline, failure_reason)
+ end
+
+ it 'drops each cancelable build in the pipeline', :aggregate_failures do
+ drop_pipeline!(cancelable_pipeline)
+
+ expect(running_build.reload).to be_failed
+ expect(running_build.failure_reason).to eq(failure_reason.to_s)
+
+ expect(success_build.reload).to be_success
+ end
+
+ it 'avoids N+1 queries when reading data' do
+ control_count = ActiveRecord::QueryRecorder.new do
+ drop_pipeline!(cancelable_pipeline)
+ end.count
+
+ writes_per_build = 2
+ expected_reads_count = control_count - writes_per_build
+
+ create_list(:ci_build, 5, :running, pipeline: cancelable_pipeline)
+
+ expect do
+ drop_pipeline!(cancelable_pipeline)
+ end.not_to exceed_query_limit(expected_reads_count + (5 * writes_per_build))
+ end
+ end
+end
diff --git a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
index e2bdfae27f0..0cbeaa5446b 100644
--- a/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
+++ b/spec/services/ci/external_pull_requests/create_pipeline_service_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe Ci::ExternalPullRequests::CreatePipelineService do
expect(subject).to eq(project.ci_pipelines.last)
expect(subject.external_pull_request).to eq(pull_request)
expect(subject.user).to eq(user)
- expect(subject.status).to eq('pending')
+ expect(subject.status).to eq('created')
expect(subject.ref).to eq(pull_request.source_branch)
expect(subject.sha).to eq(pull_request.source_sha)
expect(subject.source_sha).to eq(pull_request.source_sha)
diff --git a/spec/services/ci/create_job_artifacts_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 1efd1d390a2..22aa9e62c6f 100644
--- a/spec/services/ci/create_job_artifacts_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreateJobArtifactsService do
+RSpec.describe Ci::JobArtifacts::CreateService do
let_it_be(:project) { create(:project) }
let(:service) { described_class.new(job) }
let(:job) { create(:ci_build, project: project) }
diff --git a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index d315dd35632..04fa55068f2 100644
--- a/spec/services/ci/destroy_expired_job_artifacts_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared_state do
+RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_shared_state do
include ExclusiveLeaseHelpers
let(:service) { described_class.new }
@@ -24,7 +24,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
job = create(:ci_build, pipeline: artifact.job.pipeline)
create(:ci_job_artifact, :archive, :expired, job: job)
- stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
end
it 'performs the smallest number of queries for job_artifacts' do
@@ -113,7 +113,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when failed to destroy artifact' do
before do
- stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 10)
+ stub_const("#{described_class}::LOOP_LIMIT", 10)
end
context 'when the import fails' do
@@ -159,8 +159,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
let!(:second_artifact) { create(:ci_job_artifact, expire_at: 1.day.ago) }
before do
- stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_TIMEOUT', 0.seconds)
- stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
+ stub_const("#{described_class}::LOOP_TIMEOUT", 0.seconds)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
@@ -176,8 +176,8 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when loop reached loop limit' do
before do
- stub_const('Ci::DestroyExpiredJobArtifactsService::LOOP_LIMIT', 1)
- stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
+ stub_const("#{described_class}::LOOP_LIMIT", 1)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
@@ -209,7 +209,7 @@ RSpec.describe Ci::DestroyExpiredJobArtifactsService, :clean_gitlab_redis_shared
context 'when there are artifacts more than batch sizes' do
before do
- stub_const('Ci::DestroyExpiredJobArtifactsService::BATCH_SIZE', 1)
+ stub_const("#{described_class}::BATCH_SIZE", 1)
second_artifact.job.pipeline.unlocked!
end
diff --git a/spec/services/ci/job_artifacts_destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 74fbbf28ef1..52aaf73d67e 100644
--- a/spec/services/ci/job_artifacts_destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::JobArtifactsDestroyBatchService do
+RSpec.describe Ci::JobArtifacts::DestroyBatchService do
include ExclusiveLeaseHelpers
let(:artifacts) { Ci::JobArtifact.all }
diff --git a/spec/services/ci/pipeline_artifacts/destroy_expired_artifacts_service_spec.rb b/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
index ac1a590face..3dc4f35df22 100644
--- a/spec/services/ci/pipeline_artifacts/destroy_expired_artifacts_service_spec.rb
+++ b/spec/services/ci/pipeline_artifacts/destroy_all_expired_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
+RSpec.describe Ci::PipelineArtifacts::DestroyAllExpiredService do
let(:service) { described_class.new }
describe '.execute' do
@@ -10,7 +10,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when timeout happens' do
before do
- stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_TIMEOUT', 0.1.seconds)
+ stub_const('Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_TIMEOUT', 0.1.seconds)
allow(service).to receive(:destroy_artifacts_batch) { true }
end
@@ -27,8 +27,8 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when the loop limit is reached' do
before do
- stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::LOOP_LIMIT', 1)
- stub_const('::Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
+ stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::LOOP_LIMIT', 1)
+ stub_const('::Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
end
@@ -44,7 +44,7 @@ RSpec.describe Ci::PipelineArtifacts::DestroyExpiredArtifactsService do
context 'when there are artifacts more than batch sizes' do
before do
- stub_const('Ci::PipelineArtifacts::DestroyExpiredArtifactsService::BATCH_SIZE', 1)
+ stub_const('Ci::PipelineArtifacts::DestroyAllExpiredService::BATCH_SIZE', 1)
create_list(:ci_pipeline_artifact, 2, expire_at: 1.week.ago)
end
diff --git a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
index a9f9db8c689..572808cd2db 100644
--- a/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
+++ b/spec/services/ci/pipeline_processing/shared_processing_service_tests_with_yaml.rb
@@ -49,7 +49,13 @@ RSpec.shared_context 'Pipeline Processing Service Tests With Yaml' do
statuses = pipeline.latest_statuses.by_name(job_names).to_a
expect(statuses.count).to eq(job_names.count) # ensure that we have the same counts
- statuses.each { |status| status.public_send("#{event}!") }
+ statuses.each do |status|
+ if event == 'play'
+ status.play(user)
+ else
+ status.public_send("#{event}!")
+ end
+ end
end
end
end
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_build_test_two_manual_review_test_staging_production.yml b/spec/services/ci/pipeline_processing/test_cases/dag_build_test_two_manual_review_test_staging_production.yml
new file mode 100644
index 00000000000..ed009ee4f25
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_build_test_two_manual_review_test_staging_production.yml
@@ -0,0 +1,171 @@
+config:
+ stages: [build, test, review, deploy, post_deploy]
+
+ build:
+ stage: build
+ script: exit 0
+
+ test:
+ stage: test
+ script: exit 0
+
+ release_test1:
+ stage: test
+ when: manual
+ script: exit 0
+
+ release_test2:
+ stage: test
+ when: manual
+ script: exit 0
+
+ review:
+ stage: review
+ script: exit 0
+ needs: [test, release_test1, release_test2]
+
+ staging:
+ stage: deploy
+ script: exit 0
+ needs: [release_test1]
+
+ production:
+ stage: deploy
+ script: exit 0
+ needs: [release_test2]
+
+ after_deploy:
+ stage: post_deploy
+ script: exit 0
+ needs: [production]
+
+ handle_failure:
+ stage: post_deploy
+ when: on_failure
+ script: exit 0
+
+init:
+ expect:
+ pipeline: pending
+ stages:
+ build: pending
+ test: created
+ review: created
+ deploy: created
+ post_deploy: created
+ jobs:
+ build: pending
+ test: created
+ release_test1: created
+ release_test2: created
+ review: created
+ staging: created
+ production: created
+ after_deploy: created
+ handle_failure: created
+
+transitions:
+ - event: success
+ jobs: [build]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: pending
+ review: skipped
+ deploy: skipped
+ post_deploy: pending
+ jobs:
+ build: success
+ test: pending
+ release_test1: manual
+ release_test2: manual
+ review: skipped
+ staging: skipped
+ production: skipped
+ after_deploy: skipped
+ handle_failure: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: success
+ review: skipped
+ deploy: skipped
+ post_deploy: skipped
+ jobs:
+ build: success
+ test: success
+ release_test1: manual
+ release_test2: manual
+ review: skipped
+ staging: skipped
+ production: skipped
+ after_deploy: skipped
+ handle_failure: skipped
+
+ - event: play
+ jobs: [release_test1]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: running
+ review: skipped
+ deploy: pending
+ post_deploy: pending
+ jobs:
+ build: success
+ test: success
+ release_test1: pending
+ release_test2: manual
+ review: skipped
+ staging: created
+ production: skipped
+ after_deploy: skipped
+ handle_failure: created
+
+ - event: success
+ jobs: [release_test1]
+ expect:
+ pipeline: running
+ stages:
+ build: success
+ test: success
+ review: skipped
+ deploy: pending
+ post_deploy: pending
+ jobs:
+ build: success
+ test: success
+ release_test1: success
+ release_test2: manual
+ review: skipped
+ staging: pending
+ production: skipped
+ after_deploy: skipped
+ handle_failure: created
+
+ - event: success
+ jobs: [staging]
+ expect:
+ pipeline: success
+ stages:
+ build: success
+ test: success
+ review: skipped
+ deploy: success
+ post_deploy: skipped
+ jobs:
+ build: success
+ test: success
+ release_test1: success
+ release_test2: manual
+ review: skipped
+ staging: success
+ production: skipped
+ after_deploy: skipped
+ handle_failure: skipped
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml
index 1d61cd24f8c..7987f4568a4 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false.yml
@@ -21,7 +21,7 @@ init:
deploy: created
transitions:
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: pending
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml
index bb8723aa303..ea7f0f06c50 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_always.yml
@@ -22,7 +22,7 @@ init:
deploy: created
transitions:
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: pending
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml
index 3099a94befb..5c839ebc0e9 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_false_deploy_on_failure.yml
@@ -22,7 +22,7 @@ init:
deploy: created
transitions:
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: pending
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
index 81aad4940b6..2d379f2d7c5 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_deploy_on_failure.yml
@@ -22,7 +22,7 @@ init:
deploy: skipped
transitions:
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: pending
@@ -31,7 +31,7 @@ transitions:
deploy: skipped
jobs:
test: pending
- deploy: skipped
+ deploy: created
- event: drop
jobs: [test]
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_fails.yml
index b8fcdd1566a..fbe04c7e18e 100644
--- a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_fails.yml
@@ -21,7 +21,7 @@ init:
deploy: skipped
transitions:
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: pending
@@ -30,7 +30,7 @@ transitions:
deploy: skipped
jobs:
test: pending
- deploy: skipped
+ deploy: created
- event: run
jobs: [test]
@@ -41,15 +41,26 @@ transitions:
deploy: skipped
jobs:
test: running
- deploy: skipped
+ deploy: created
- event: drop
jobs: [test]
expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: failed
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
pipeline: success
stages:
test: success
- deploy: skipped
+ deploy: success
jobs:
test: failed
- deploy: skipped
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_succeeds.yml b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_succeeds.yml
new file mode 100644
index 00000000000..68ef057f62d
--- /dev/null
+++ b/spec/services/ci/pipeline_processing/test_cases/dag_test_manual_allow_failure_true_succeeds.yml
@@ -0,0 +1,66 @@
+config:
+ test:
+ stage: test
+ when: manual
+ allow_failure: true
+ script: exit 0
+
+ deploy:
+ stage: deploy
+ script: exit 0
+ needs: [test]
+
+init:
+ expect:
+ pipeline: skipped
+ stages:
+ test: skipped
+ deploy: skipped
+ jobs:
+ test: manual
+ deploy: skipped
+
+transitions:
+ - event: play
+ jobs: [test]
+ expect:
+ pipeline: pending
+ stages:
+ test: pending
+ deploy: skipped
+ jobs:
+ test: pending
+ deploy: created
+
+ - event: run
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: running
+ deploy: skipped
+ jobs:
+ test: running
+ deploy: created
+
+ - event: success
+ jobs: [test]
+ expect:
+ pipeline: running
+ stages:
+ test: success
+ deploy: pending
+ jobs:
+ test: success
+ deploy: pending
+
+ - event: success
+ jobs: [deploy]
+ expect:
+ pipeline: success
+ stages:
+ test: success
+ deploy: success
+ jobs:
+ test: success
+ deploy: success
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml
index 2ffa35b56d7..759b4d0ae75 100644
--- a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_false.yml
@@ -20,7 +20,7 @@ init:
deploy: created
transitions:
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: pending
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml
index 088fab5ca09..93eecae8fcf 100644
--- a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true.yml
@@ -31,7 +31,7 @@ transitions:
test: manual
deploy: success
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: running
diff --git a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml
index 2b30316aef6..301f9631845 100644
--- a/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml
+++ b/spec/services/ci/pipeline_processing/test_cases/stage_test_manual_allow_failure_true_deploy_on_failure.yml
@@ -21,7 +21,7 @@ init:
deploy: skipped
transitions:
- - event: enqueue
+ - event: play
jobs: [test]
expect:
pipeline: pending
@@ -30,7 +30,7 @@ transitions:
deploy: skipped
jobs:
test: pending
- deploy: skipped
+ deploy: created
- event: drop
jobs: [test]
diff --git a/spec/services/ci/pipeline_trigger_service_spec.rb b/spec/services/ci/pipeline_trigger_service_spec.rb
index 89d3da89011..36055779a2e 100644
--- a/spec/services/ci/pipeline_trigger_service_spec.rb
+++ b/spec/services/ci/pipeline_trigger_service_spec.rb
@@ -55,17 +55,6 @@ RSpec.describe Ci::PipelineTriggerService do
expect(var.variable_type).to eq('file')
end
- context 'when FF ci_trigger_payload_into_pipeline is disabled' do
- before do
- stub_feature_flags(ci_trigger_payload_into_pipeline: false)
- end
-
- it 'does not store the payload as a variable' do
- expect { result }.not_to change { Ci::PipelineVariable.count }
- expect(result[:pipeline].variables).to be_empty
- end
- end
-
context 'when commit message has [ci skip]' do
before do
allow_next(Ci::Pipeline).to receive(:git_commit_message) { '[ci skip]' }
diff --git a/spec/services/ci/play_bridge_service_spec.rb b/spec/services/ci/play_bridge_service_spec.rb
index 0482ad4d76f..d6130325b5a 100644
--- a/spec/services/ci/play_bridge_service_spec.rb
+++ b/spec/services/ci/play_bridge_service_spec.rb
@@ -35,6 +35,28 @@ RSpec.describe Ci::PlayBridgeService, '#execute' do
expect(bridge.reload.user).to eq(user)
end
+ context 'when a subsequent job is skipped' do
+ let!(:job) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: bridge.stage_idx + 1) }
+
+ before do
+ create(:ci_build_need, build: job, name: bridge.name)
+ end
+
+ it 'marks the subsequent job as processable' do
+ expect { execute_service }.to change { job.reload.status }.from('skipped').to('created')
+ end
+
+ context 'when the FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
+ before do
+ stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
+ end
+
+ it 'does not change the subsequent job' do
+ expect { execute_service }.not_to change { job.reload.status }.from('skipped')
+ end
+ end
+ end
+
context 'when bridge is not playable' do
let(:bridge) { create(:ci_bridge, :failed, pipeline: pipeline, downstream: downstream_project) }
diff --git a/spec/services/ci/play_build_service_spec.rb b/spec/services/ci/play_build_service_spec.rb
index 00c6de7681d..78de91675f9 100644
--- a/spec/services/ci/play_build_service_spec.rb
+++ b/spec/services/ci/play_build_service_spec.rb
@@ -61,6 +61,28 @@ RSpec.describe Ci::PlayBuildService, '#execute' do
expect(build.reload.user).to eq user
end
+ context 'when a subsequent job is skipped' do
+ let!(:job) { create(:ci_build, :skipped, pipeline: pipeline, stage_idx: build.stage_idx + 1) }
+
+ before do
+ create(:ci_build_need, build: job, name: build.name)
+ end
+
+ it 'marks the subsequent job as processable' do
+ expect { service.execute(build) }.to change { job.reload.status }.from('skipped').to('created')
+ end
+
+ context 'when the FF ci_fix_pipeline_status_for_dag_needs_manual is disabled' do
+ before do
+ stub_feature_flags(ci_fix_pipeline_status_for_dag_needs_manual: false)
+ end
+
+ it 'does not change the subsequent job' do
+ expect { service.execute(build) }.not_to change { job.reload.status }.from('skipped')
+ end
+ end
+ end
+
context 'when variables are supplied' do
let(:job_variables) do
[{ key: 'first', secret_value: 'first' },
diff --git a/spec/services/ci/process_build_service_spec.rb b/spec/services/ci/process_build_service_spec.rb
index 42a92504839..b54fc45d36a 100644
--- a/spec/services/ci/process_build_service_spec.rb
+++ b/spec/services/ci/process_build_service_spec.rb
@@ -145,28 +145,5 @@ RSpec.describe Ci::ProcessBuildService, '#execute' do
expect { subject }.to change { build.status }.to(after_status)
end
end
-
- context 'when FF skip_dag_manual_and_delayed_jobs is disabled on the project' do
- let_it_be(:other_project) { create(:project) }
-
- before do
- stub_feature_flags(skip_dag_manual_and_delayed_jobs: other_project)
- end
-
- where(:build_when, :current_status, :after_status) do
- :on_success | 'success' | 'pending'
- :on_success | 'skipped' | 'skipped'
- :manual | 'success' | 'manual'
- :manual | 'skipped' | 'manual'
- :delayed | 'success' | 'manual'
- :delayed | 'skipped' | 'manual'
- end
-
- with_them do
- it 'proceeds the build' do
- expect { subject }.to change { build.status }.to(after_status)
- end
- end
- end
end
end
diff --git a/spec/services/ci/process_pipeline_service_spec.rb b/spec/services/ci/process_pipeline_service_spec.rb
index e02536fd07f..254bd19c808 100644
--- a/spec/services/ci/process_pipeline_service_spec.rb
+++ b/spec/services/ci/process_pipeline_service_spec.rb
@@ -10,6 +10,14 @@ RSpec.describe Ci::ProcessPipelineService do
create(:ci_empty_pipeline, ref: 'master', project: project)
end
+ let(:pipeline_processing_events_counter) { double(increment: true) }
+ let(:legacy_update_jobs_counter) { double(increment: true) }
+
+ let(:metrics) do
+ double(pipeline_processing_events_counter: pipeline_processing_events_counter,
+ legacy_update_jobs_counter: legacy_update_jobs_counter)
+ end
+
subject { described_class.new(pipeline) }
before do
@@ -17,22 +25,13 @@ RSpec.describe Ci::ProcessPipelineService do
stub_not_protect_default_branch
project.add_developer(user)
+
+ allow(subject).to receive(:metrics).and_return(metrics)
end
describe 'processing events counter' do
- let(:metrics) { double('pipeline metrics') }
- let(:counter) { double('events counter') }
-
- before do
- allow(subject)
- .to receive(:metrics).and_return(metrics)
- allow(metrics)
- .to receive(:pipeline_processing_events_counter)
- .and_return(counter)
- end
-
it 'increments processing events counter' do
- expect(counter).to receive(:increment)
+ expect(pipeline_processing_events_counter).to receive(:increment)
subject.execute
end
@@ -64,33 +63,22 @@ RSpec.describe Ci::ProcessPipelineService do
expect(all_builds.retried).to contain_exactly(build_retried)
end
- context 'counter ci_legacy_update_jobs_as_retried_total' do
- let(:counter) { double(increment: true) }
+ it 'increments the counter' do
+ expect(legacy_update_jobs_counter).to receive(:increment)
+ subject.execute
+ end
+
+ context 'when the previous build has already retried column true' do
before do
- allow(Gitlab::Metrics).to receive(:counter).and_call_original
- allow(Gitlab::Metrics).to receive(:counter)
- .with(:ci_legacy_update_jobs_as_retried_total, anything)
- .and_return(counter)
+ build_retried.update_columns(retried: true)
end
- it 'increments the counter' do
- expect(counter).to receive(:increment)
+ it 'does not increment the counter' do
+ expect(legacy_update_jobs_counter).not_to receive(:increment)
subject.execute
end
-
- context 'when the previous build has already retried column true' do
- before do
- build_retried.update_columns(retried: true)
- end
-
- it 'does not increment the counter' do
- expect(counter).not_to receive(:increment)
-
- subject.execute
- end
- end
end
end
diff --git a/spec/services/ci/register_job_service_spec.rb b/spec/services/ci/register_job_service_spec.rb
index 9187dd4f300..02b48e8ba06 100644
--- a/spec/services/ci/register_job_service_spec.rb
+++ b/spec/services/ci/register_job_service_spec.rb
@@ -225,6 +225,28 @@ module Ci
end
end
+ context 'when the use_distinct_in_register_job_object_hierarchy feature flag is enabled' do
+ before do
+ stub_feature_flags(use_distinct_in_register_job_object_hierarchy: true)
+ stub_feature_flags(use_distinct_for_all_object_hierarchy: true)
+ end
+
+ it 'calls DISTINCT' do
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).to_sql).to include("DISTINCT")
+ end
+ end
+
+ context 'when the use_distinct_in_register_job_object_hierarchy feature flag is disabled' do
+ before do
+ stub_feature_flags(use_distinct_in_register_job_object_hierarchy: false)
+ stub_feature_flags(use_distinct_for_all_object_hierarchy: false)
+ end
+
+ it 'does not call DISTINCT' do
+ expect(described_class.new(group_runner).send(:builds_for_group_runner).to_sql).not_to include("DISTINCT")
+ end
+ end
+
context 'group runner' do
let(:build) { execute(group_runner) }
@@ -593,9 +615,22 @@ module Ci
create(:ci_build, pipeline: pipeline, tag_list: %w[non-matching])
end
- it "observes queue size of only matching jobs" do
+ it 'observes queue size of only matching jobs' do
# pending_job + 2 x matching ones
- expect(Gitlab::Ci::Queue::Metrics.queue_size_total).to receive(:observe).with({}, 3)
+ expect(Gitlab::Ci::Queue::Metrics.queue_size_total).to receive(:observe)
+ .with({ runner_type: specific_runner.runner_type }, 3)
+
+ expect(execute(specific_runner)).to eq(pending_job)
+ end
+
+ it 'observes queue processing time by the runner type' do
+ expect(Gitlab::Ci::Queue::Metrics.queue_iteration_duration_seconds)
+ .to receive(:observe)
+ .with({ runner_type: specific_runner.runner_type }, anything)
+
+ expect(Gitlab::Ci::Queue::Metrics.queue_retrieval_duration_seconds)
+ .to receive(:observe)
+ .with({ runner_type: specific_runner.runner_type }, anything)
expect(execute(specific_runner)).to eq(pending_job)
end
diff --git a/spec/services/ci/retry_build_service_spec.rb b/spec/services/ci/retry_build_service_spec.rb
index bdf60bb3fdc..7dd3d963e56 100644
--- a/spec/services/ci/retry_build_service_spec.rb
+++ b/spec/services/ci/retry_build_service_spec.rb
@@ -181,7 +181,7 @@ RSpec.describe Ci::RetryBuildService do
end
it 'resolves todos for old build that failed' do
- expect(MergeRequests::AddTodoWhenBuildFailsService)
+ expect(::MergeRequests::AddTodoWhenBuildFailsService)
.to receive_message_chain(:new, :close)
service.execute(build)
diff --git a/spec/services/ci/retry_pipeline_service_spec.rb b/spec/services/ci/retry_pipeline_service_spec.rb
index 3c6a99efbf8..3e2e9f07723 100644
--- a/spec/services/ci/retry_pipeline_service_spec.rb
+++ b/spec/services/ci/retry_pipeline_service_spec.rb
@@ -272,7 +272,7 @@ RSpec.describe Ci::RetryPipelineService, '#execute' do
end
it 'closes all todos about failed jobs for pipeline' do
- expect(MergeRequests::AddTodoWhenBuildFailsService)
+ expect(::MergeRequests::AddTodoWhenBuildFailsService)
.to receive_message_chain(:new, :close_all)
service.execute(pipeline)
diff --git a/spec/services/ci/test_failure_history_service_spec.rb b/spec/services/ci/test_failure_history_service_spec.rb
index d9c1c8dc3fa..c19df6e217b 100644
--- a/spec/services/ci/test_failure_history_service_spec.rb
+++ b/spec/services/ci/test_failure_history_service_spec.rb
@@ -11,15 +11,15 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
context 'when pipeline has failed builds with test reports' do
before do
- # The test report has 2 test case failures
+ # The test report has 2 unit test failures
create(:ci_build, :failed, :test_reports, pipeline: pipeline, project: project)
end
- it 'creates test case failures records' do
+ it 'creates unit test failures records' do
execute_service
- expect(Ci::TestCase.count).to eq(2)
- expect(Ci::TestCaseFailure.count).to eq(2)
+ expect(Ci::UnitTest.count).to eq(2)
+ expect(Ci::UnitTestFailure.count).to eq(2)
end
context 'when pipeline is not for the default branch' do
@@ -30,8 +30,8 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
it 'does not persist data' do
execute_service
- expect(Ci::TestCase.count).to eq(0)
- expect(Ci::TestCaseFailure.count).to eq(0)
+ expect(Ci::UnitTest.count).to eq(0)
+ expect(Ci::UnitTestFailure.count).to eq(0)
end
end
@@ -43,12 +43,12 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
it 'does not fail but does not persist new data' do
expect { described_class.new(pipeline).execute }.not_to raise_error
- expect(Ci::TestCase.count).to eq(2)
- expect(Ci::TestCaseFailure.count).to eq(2)
+ expect(Ci::UnitTest.count).to eq(2)
+ expect(Ci::UnitTestFailure.count).to eq(2)
end
end
- context 'when number of failed test cases exceed the limit' do
+ context 'when number of failed unit tests exceed the limit' do
before do
stub_const("#{described_class.name}::MAX_TRACKABLE_FAILURES", 1)
end
@@ -56,16 +56,16 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
it 'does not persist data' do
execute_service
- expect(Ci::TestCase.count).to eq(0)
- expect(Ci::TestCaseFailure.count).to eq(0)
+ expect(Ci::UnitTest.count).to eq(0)
+ expect(Ci::UnitTestFailure.count).to eq(0)
end
end
- context 'when number of failed test cases across multiple builds exceed the limit' do
+ context 'when number of failed unit tests across multiple builds exceed the limit' do
before do
stub_const("#{described_class.name}::MAX_TRACKABLE_FAILURES", 2)
- # This other test report has 1 unique test case failure which brings us to 3 total failures across all builds
+ # This other test report has 1 unique unit test failure which brings us to 3 total failures across all builds
# thus exceeding the limit of 2 for MAX_TRACKABLE_FAILURES
create(:ci_build, :failed, :test_reports_with_duplicate_failed_test_names, pipeline: pipeline, project: project)
end
@@ -73,23 +73,23 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
it 'does not persist data' do
execute_service
- expect(Ci::TestCase.count).to eq(0)
- expect(Ci::TestCaseFailure.count).to eq(0)
+ expect(Ci::UnitTest.count).to eq(0)
+ expect(Ci::UnitTestFailure.count).to eq(0)
end
end
end
- context 'when test failure data have duplicates within the same payload (happens when the JUnit report has duplicate test case names but have different failures)' do
+ context 'when test failure data have duplicates within the same payload (happens when the JUnit report has duplicate unit test names but have different failures)' do
before do
- # The test report has 2 test case failures but with the same test case keys
+ # The test report has 2 unit test failures but with the same unit test keys
create(:ci_build, :failed, :test_reports_with_duplicate_failed_test_names, pipeline: pipeline, project: project)
end
it 'does not fail but does not persist duplicate data' do
expect { execute_service }.not_to raise_error
- expect(Ci::TestCase.count).to eq(1)
- expect(Ci::TestCaseFailure.count).to eq(1)
+ expect(Ci::UnitTest.count).to eq(1)
+ expect(Ci::UnitTestFailure.count).to eq(1)
end
end
@@ -102,8 +102,8 @@ RSpec.describe Ci::TestFailureHistoryService, :aggregate_failures do
it 'does not persist data' do
execute_service
- expect(Ci::TestCase.count).to eq(0)
- expect(Ci::TestCaseFailure.count).to eq(0)
+ expect(Ci::UnitTest.count).to eq(0)
+ expect(Ci::UnitTestFailure.count).to eq(0)
end
end
end
diff --git a/spec/services/clusters/integrations/create_service_spec.rb b/spec/services/clusters/integrations/create_service_spec.rb
new file mode 100644
index 00000000000..cfc0943b6ad
--- /dev/null
+++ b/spec/services/clusters/integrations/create_service_spec.rb
@@ -0,0 +1,90 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Clusters::Integrations::CreateService, '#execute' do
+ let_it_be(:project) { create(:project) }
+ let_it_be_with_reload(:cluster) { create(:cluster, :provided_by_gcp, projects: [project]) }
+
+ let(:params) do
+ { application_type: 'prometheus', enabled: true }
+ end
+
+ let(:service) do
+ described_class.new(container: project, cluster: cluster, current_user: project.owner, params: params)
+ end
+
+ it 'creates a new Prometheus instance' do
+ expect(service.execute).to be_success
+
+ expect(cluster.integration_prometheus).to be_present
+ expect(cluster.integration_prometheus).to be_persisted
+ expect(cluster.integration_prometheus).to be_enabled
+ end
+
+ context 'enabled param is false' do
+ let(:params) do
+ { application_type: 'prometheus', enabled: false }
+ end
+
+ it 'creates a new uninstalled Prometheus instance' do
+ expect(service.execute).to be_success
+
+ expect(cluster.integration_prometheus).to be_present
+ expect(cluster.integration_prometheus).to be_persisted
+ expect(cluster.integration_prometheus).not_to be_enabled
+ end
+ end
+
+ context 'unauthorized user' do
+ let(:service) do
+ unauthorized_user = create(:user)
+
+ described_class.new(container: project, cluster: cluster, current_user: unauthorized_user, params: params)
+ end
+
+ it 'does not create a new Prometheus instance' do
+ expect(service.execute).to be_error
+
+ expect(cluster.integration_prometheus).to be_nil
+ end
+ end
+
+ context 'prometheus record exists' do
+ before do
+ create(:clusters_integrations_prometheus, cluster: cluster)
+ end
+
+ it 'updates the Prometheus instance' do
+ expect(service.execute).to be_success
+
+ expect(cluster.integration_prometheus).to be_present
+ expect(cluster.integration_prometheus).to be_persisted
+ expect(cluster.integration_prometheus).to be_enabled
+ end
+
+ context 'enabled param is false' do
+ let(:params) do
+ { application_type: 'prometheus', enabled: false }
+ end
+
+ it 'updates the Prometheus instance as uninstalled' do
+ expect(service.execute).to be_success
+
+ expect(cluster.integration_prometheus).to be_present
+ expect(cluster.integration_prometheus).to be_persisted
+ expect(cluster.integration_prometheus).not_to be_enabled
+ end
+ end
+ end
+
+ context 'for an un-supported application type' do
+ let(:params) do
+ { application_type: 'something_else', enabled: true }
+ end
+
+ it 'errors' do
+ expect { service.execute}.to raise_error(ArgumentError)
+ end
+ end
+end
diff --git a/spec/services/deployments/link_merge_requests_service_spec.rb b/spec/services/deployments/link_merge_requests_service_spec.rb
index e2ac2273b8c..a5a13230d6f 100644
--- a/spec/services/deployments/link_merge_requests_service_spec.rb
+++ b/spec/services/deployments/link_merge_requests_service_spec.rb
@@ -32,6 +32,17 @@ RSpec.describe Deployments::LinkMergeRequestsService do
end
end
+ context 'when the deployment failed' do
+ it 'does nothing' do
+ environment = create(:environment, name: 'foo')
+ deploy = create(:deployment, :failed, environment: environment)
+
+ expect(deploy).not_to receive(:link_merge_requests)
+
+ described_class.new(deploy).execute
+ end
+ end
+
context 'when there is a previous deployment' do
it 'links all merge requests merged since the previous deployment' do
deploy1 = create(
diff --git a/spec/services/design_management/copy_design_collection/copy_service_spec.rb b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
index ddbed91815f..03242487b53 100644
--- a/spec/services/design_management/copy_design_collection/copy_service_spec.rb
+++ b/spec/services/design_management/copy_design_collection/copy_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe DesignManagement::CopyDesignCollection::CopyService, :clean_gitla
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:issue, refind: true) { create(:issue, project: project) }
+
let(:target_issue) { create(:issue) }
subject { described_class.new(project, user, issue: issue, target_issue: target_issue).execute }
diff --git a/spec/services/design_management/delete_designs_service_spec.rb b/spec/services/design_management/delete_designs_service_spec.rb
index ed161b4c8ff..341f71fa62c 100644
--- a/spec/services/design_management/delete_designs_service_spec.rb
+++ b/spec/services/design_management/delete_designs_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe DesignManagement::DeleteDesignsService do
let_it_be(:project) { create(:project) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
+
let(:designs) { create_designs }
subject(:service) { described_class.new(project, user, issue: issue, designs: designs) }
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index f36e68c8dbd..5bc763cc95e 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
let_it_be_with_reload(:issue) { create(:issue) }
let_it_be(:developer) { create(:user, developer_projects: [issue.project]) }
+
let(:project) { issue.project }
let(:user) { developer }
let(:files) { [rails_sample] }
@@ -309,7 +310,7 @@ RSpec.describe DesignManagement::SaveDesignsService do
end
context 'when the user is not allowed to upload designs' do
- let(:user) { build_stubbed(:user) }
+ let(:user) { build_stubbed(:user, id: non_existing_record_id) }
it_behaves_like 'a service error'
end
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index f83e91b683f..f93622dc25a 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -229,7 +229,7 @@ RSpec.describe DraftNotes::PublishService do
expect(DraftNote.count).to eq(2)
end
- context 'with quick actions' do
+ context 'with quick actions', :sidekiq_inline do
it 'performs quick actions' do
other_user = create(:user)
project.add_developer(other_user)
diff --git a/spec/services/environments/auto_stop_service_spec.rb b/spec/services/environments/auto_stop_service_spec.rb
index 8e56c7e642c..93b1596586f 100644
--- a/spec/services/environments/auto_stop_service_spec.rb
+++ b/spec/services/environments/auto_stop_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Environments::AutoStopService, :clean_gitlab_redis_shared_state d
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:service) { described_class.new }
before_all do
@@ -19,6 +20,7 @@ RSpec.describe Environments::AutoStopService, :clean_gitlab_redis_shared_state d
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:environments) { Environment.all }
before_all do
diff --git a/spec/services/environments/canary_ingress/update_service_spec.rb b/spec/services/environments/canary_ingress/update_service_spec.rb
index 5ba62e7104c..0e72fff1ed2 100644
--- a/spec/services/environments/canary_ingress/update_service_spec.rb
+++ b/spec/services/environments/canary_ingress/update_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Environments::CanaryIngress::UpdateService, :clean_gitlab_redis_c
let_it_be(:project, refind: true) { create(:project) }
let_it_be(:maintainer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+
let(:user) { maintainer }
let(:params) { {} }
let(:service) { described_class.new(project, user, params) }
diff --git a/spec/services/environments/reset_auto_stop_service_spec.rb b/spec/services/environments/reset_auto_stop_service_spec.rb
index cab1bf2cc26..4a0b091c12d 100644
--- a/spec/services/environments/reset_auto_stop_service_spec.rb
+++ b/spec/services/environments/reset_auto_stop_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Environments::ResetAutoStopService do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user).tap { |user| project.add_developer(user) } }
let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
+
let(:user) { developer }
let(:service) { described_class.new(project, user) }
diff --git a/spec/services/groups/auto_devops_service_spec.rb b/spec/services/groups/auto_devops_service_spec.rb
index 3d89ee96823..486a99dd8df 100644
--- a/spec/services/groups/auto_devops_service_spec.rb
+++ b/spec/services/groups/auto_devops_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Groups::AutoDevopsService, '#execute' do
let_it_be(:group) { create(:group) }
let_it_be(:user) { create(:user) }
+
let(:group_params) { { auto_devops_enabled: '0' } }
let(:service) { described_class.new(group, user, group_params) }
diff --git a/spec/services/groups/group_links/update_service_spec.rb b/spec/services/groups/group_links/update_service_spec.rb
index 436cdf89a0f..82c4a10f15a 100644
--- a/spec/services/groups/group_links/update_service_spec.rb
+++ b/spec/services/groups/group_links/update_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Groups::GroupLinks::UpdateService, '#execute' do
let_it_be(:group) { create(:group, :private) }
let_it_be(:shared_group) { create(:group, :private) }
let_it_be(:project) { create(:project, group: shared_group) }
+
let(:group_member_user) { create(:user) }
let!(:link) { create(:group_group_link, shared_group: shared_group, shared_with_group: group) }
diff --git a/spec/services/groups/merge_requests_count_service_spec.rb b/spec/services/groups/merge_requests_count_service_spec.rb
new file mode 100644
index 00000000000..10c7ba5fca4
--- /dev/null
+++ b/spec/services/groups/merge_requests_count_service_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Groups::MergeRequestsCountService, :use_clean_rails_memory_store_caching do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :public)}
+ let_it_be(:project) { create(:project, :repository, namespace: group) }
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
+
+ subject { described_class.new(group, user) }
+
+ describe '#relation_for_count' do
+ before do
+ group.add_reporter(user)
+ allow(MergeRequestsFinder).to receive(:new).and_call_original
+ end
+
+ it 'uses the MergeRequestsFinder to scope merge requests' do
+ expect(MergeRequestsFinder)
+ .to receive(:new)
+ .with(user, group_id: group.id, state: 'opened', non_archived: true, include_subgroups: true)
+
+ subject.count
+ end
+ end
+
+ it_behaves_like 'a counter caching service with threshold'
+end
diff --git a/spec/services/groups/open_issues_count_service_spec.rb b/spec/services/groups/open_issues_count_service_spec.rb
index 8bbb1c90c6b..740e9846119 100644
--- a/spec/services/groups/open_issues_count_service_spec.rb
+++ b/spec/services/groups/open_issues_count_service_spec.rb
@@ -54,53 +54,7 @@ RSpec.describe Groups::OpenIssuesCountService, :use_clean_rails_memory_store_cac
end
end
- context 'with different cache values' do
- let(:public_count_key) { subject.cache_key(described_class::PUBLIC_COUNT_KEY) }
- let(:under_threshold) { described_class::CACHED_COUNT_THRESHOLD - 1 }
- let(:over_threshold) { described_class::CACHED_COUNT_THRESHOLD + 1 }
-
- context 'when cache is empty' do
- before do
- Rails.cache.delete(public_count_key)
- end
-
- it 'refreshes cache if value over threshold' do
- allow(subject).to receive(:uncached_count).and_return(over_threshold)
-
- expect(subject.count).to eq(over_threshold)
- expect(Rails.cache.read(public_count_key)).to eq(over_threshold)
- end
-
- it 'does not refresh cache if value under threshold' do
- allow(subject).to receive(:uncached_count).and_return(under_threshold)
-
- expect(subject.count).to eq(under_threshold)
- expect(Rails.cache.read(public_count_key)).to be_nil
- end
- end
-
- context 'when cached count is under the threshold value' do
- before do
- Rails.cache.write(public_count_key, under_threshold)
- end
-
- it 'does not refresh cache' do
- expect(Rails.cache).not_to receive(:write)
- expect(subject.count).to eq(under_threshold)
- end
- end
-
- context 'when cached count is over the threshold value' do
- before do
- Rails.cache.write(public_count_key, over_threshold)
- end
-
- it 'does not refresh cache' do
- expect(Rails.cache).not_to receive(:write)
- expect(subject.count).to eq(over_threshold)
- end
- end
- end
+ it_behaves_like 'a counter caching service with threshold'
end
end
end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 19b746ade34..3a1197970f4 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -5,12 +5,14 @@ require 'spec_helper'
RSpec.describe Groups::TransferService do
let_it_be(:user) { create(:user) }
let_it_be(:new_parent_group) { create(:group, :public) }
+
let!(:group_member) { create(:group_member, :owner, group: group, user: user) }
let(:transfer_service) { described_class.new(group, user) }
context 'handling packages' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:new_group) { create(:group, :public) }
+
let(:project) { create(:project, :public, namespace: group) }
before do
@@ -272,6 +274,7 @@ RSpec.describe Groups::TransferService do
context 'with a group integration' do
let_it_be(:instance_integration) { create(:slack_service, :instance, webhook: 'http://project.slack.com') }
+
let(:new_created_integration) { Service.find_by(group: group) }
context 'with an inherited integration' do
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index e2838c4ce0b..e941958eb8c 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -59,6 +59,7 @@ RSpec.describe Groups::UpdateSharedRunnersService do
context 'disable shared Runners' do
let_it_be(:group) { create(:group) }
+
let(:params) { { shared_runners_setting: 'disabled_and_unoverridable' } }
it 'receives correct method and succeeds' do
diff --git a/spec/services/ide/base_config_service_spec.rb b/spec/services/ide/base_config_service_spec.rb
index debdc6e5809..ee57f2c18ec 100644
--- a/spec/services/ide/base_config_service_spec.rb
+++ b/spec/services/ide/base_config_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ide::BaseConfigService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:sha) { 'sha' }
describe '#execute' do
diff --git a/spec/services/ide/schemas_config_service_spec.rb b/spec/services/ide/schemas_config_service_spec.rb
index 19e5ca9e87d..69ad9b5cbea 100644
--- a/spec/services/ide/schemas_config_service_spec.rb
+++ b/spec/services/ide/schemas_config_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ide::SchemasConfigService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:filename) { 'sample.yml' }
let(:schema_content) { double(body: '{"title":"Sample schema"}') }
diff --git a/spec/services/ide/terminal_config_service_spec.rb b/spec/services/ide/terminal_config_service_spec.rb
index d6c4f7a2a69..483b6413be3 100644
--- a/spec/services/ide/terminal_config_service_spec.rb
+++ b/spec/services/ide/terminal_config_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Ide::TerminalConfigService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:sha) { 'sha' }
describe '#execute' do
@@ -47,6 +48,7 @@ RSpec.describe Ide::TerminalConfigService do
terminal: {
tag_list: [],
yaml_variables: [],
+ job_variables: [],
options: { script: ["sleep 60"] }
})
end
@@ -61,6 +63,7 @@ RSpec.describe Ide::TerminalConfigService do
terminal: {
tag_list: [],
yaml_variables: [],
+ job_variables: [],
options: { before_script: ["ls"], script: ["sleep 60"] }
})
end
diff --git a/spec/services/issuable/destroy_service_spec.rb b/spec/services/issuable/destroy_service_spec.rb
index 8d62932f986..fa4902e5237 100644
--- a/spec/services/issuable/destroy_service_spec.rb
+++ b/spec/services/issuable/destroy_service_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
RSpec.describe Issuable::DestroyService do
let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
+ let(:group) { create(:group, :public) }
+ let(:project) { create(:project, :public, group: group) }
subject(:service) { described_class.new(project, user) }
@@ -22,17 +23,14 @@ RSpec.describe Issuable::DestroyService do
service.execute(issue)
end
- it 'updates the todo caches for users with todos on the issue' do
- create(:todo, target: issue, user: user, author: user, project: project)
-
- expect { service.execute(issue) }
- .to change { user.todos_pending_count }.from(1).to(0)
- end
-
it 'invalidates the issues count cache for the assignees' do
expect_any_instance_of(User).to receive(:invalidate_cache_counts).once
service.execute(issue)
end
+
+ it_behaves_like 'service deleting todos' do
+ let(:issuable) { issue }
+ end
end
context 'when issuable is a merge request' do
@@ -53,11 +51,8 @@ RSpec.describe Issuable::DestroyService do
service.execute(merge_request)
end
- it 'updates the todo caches for users with todos on the merge request' do
- create(:todo, target: merge_request, user: user, author: user, project: project)
-
- expect { service.execute(merge_request) }
- .to change { user.todos_pending_count }.from(1).to(0)
+ it_behaves_like 'service deleting todos' do
+ let(:issuable) { merge_request }
end
end
end
diff --git a/spec/services/issuable/process_assignees_spec.rb b/spec/services/issuable/process_assignees_spec.rb
index 876c84957cc..45d57a1772a 100644
--- a/spec/services/issuable/process_assignees_spec.rb
+++ b/spec/services/issuable/process_assignees_spec.rb
@@ -4,10 +4,10 @@ require 'spec_helper'
RSpec.describe Issuable::ProcessAssignees do
describe '#execute' do
- it 'returns assignee_ids when assignee_ids are specified' do
+ it 'returns assignee_ids when add_assignee_ids and remove_assignee_ids are not specified' do
process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: %w(4 7 11),
+ add_assignee_ids: nil,
+ remove_assignee_ids: nil,
existing_assignee_ids: %w(1 3 9),
extra_assignee_ids: %w(2 5 12))
result = process.execute
@@ -15,19 +15,19 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(5 7 9).sort)
end
- it 'combines other ids when assignee_ids is empty' do
- process = Issuable::ProcessAssignees.new(assignee_ids: [],
- add_assignee_ids: %w(2 4 6),
- remove_assignee_ids: %w(4 7 11),
+ it 'combines other ids when assignee_ids is nil' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ add_assignee_ids: nil,
+ remove_assignee_ids: nil,
existing_assignee_ids: %w(1 3 11),
extra_assignee_ids: %w(2 5 12))
result = process.execute
- expect(result.sort).to eq(%w(1 2 3 5 6 12).sort)
+ expect(result.sort).to eq(%w(1 2 3 5 11 12).sort)
end
- it 'combines other ids when assignee_ids is nil' do
- process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ it 'combines other ids when both add_assignee_ids and remove_assignee_ids are not empty' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: %w(4 7 11),
existing_assignee_ids: %w(1 3 11),
@@ -37,8 +37,8 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(1 2 3 5 6 12).sort)
end
- it 'combines other ids when assignee_ids and add_assignee_ids are nil' do
- process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ it 'combines other ids when remove_assignee_ids is not empty' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: nil,
remove_assignee_ids: %w(4 7 11),
existing_assignee_ids: %w(1 3 11),
@@ -48,8 +48,8 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(1 2 3 5 12).sort)
end
- it 'combines other ids when assignee_ids and remove_assignee_ids are nil' do
- process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ it 'combines other ids when add_assignee_ids is not empty' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: nil,
existing_assignee_ids: %w(1 3 11),
@@ -59,8 +59,8 @@ RSpec.describe Issuable::ProcessAssignees do
expect(result.sort).to eq(%w(1 2 4 3 5 6 11 12).sort)
end
- it 'combines ids when only add_assignee_ids and remove_assignee_ids are passed' do
- process = Issuable::ProcessAssignees.new(assignee_ids: nil,
+ it 'combines ids when existing_assignee_ids and extra_assignee_ids are omitted' do
+ process = Issuable::ProcessAssignees.new(assignee_ids: %w(5 7 9),
add_assignee_ids: %w(2 4 6),
remove_assignee_ids: %w(4 7 11))
result = process.execute
diff --git a/spec/services/issue_links/create_service_spec.rb b/spec/services/issue_links/create_service_spec.rb
index 873890d25cf..1bca717acb7 100644
--- a/spec/services/issue_links/create_service_spec.rb
+++ b/spec/services/issue_links/create_service_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe IssueLinks::CreateService do
end
it 'returns error' do
- is_expected.to eq(message: 'No Issue found for given params', status: :error, http_status: 404)
+ is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
end
end
@@ -34,7 +34,7 @@ RSpec.describe IssueLinks::CreateService do
end
it 'returns error' do
- is_expected.to eq(message: 'No Issue found for given params', status: :error, http_status: 404)
+ is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
end
it 'no relationship is created' do
@@ -52,7 +52,7 @@ RSpec.describe IssueLinks::CreateService do
it 'returns error' do
target_issuable.project.add_guest(user)
- is_expected.to eq(message: 'No Issue found for given params', status: :error, http_status: 404)
+ is_expected.to eq(message: 'No matching issue found. Make sure that you are adding a valid issue URL.', status: :error, http_status: 404)
end
it 'no relationship is created' do
diff --git a/spec/services/issues/after_create_service_spec.rb b/spec/services/issues/after_create_service_spec.rb
new file mode 100644
index 00000000000..bc9be3211d3
--- /dev/null
+++ b/spec/services/issues/after_create_service_spec.rb
@@ -0,0 +1,52 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Issues::AfterCreateService do
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+ let_it_be(:issue) { create(:issue, project: project, author: current_user, milestone: milestone, assignee_ids: [assignee.id]) }
+
+ subject(:after_create_service) { described_class.new(project, current_user) }
+
+ describe '#execute' do
+ it 'creates a pending todo for new assignee' do
+ attributes = {
+ project: project,
+ author: current_user,
+ user: assignee,
+ target_id: issue.id,
+ target_type: issue.class.name,
+ action: Todo::ASSIGNED,
+ state: :pending
+ }
+
+ expect { after_create_service.execute(issue) }.to change { Todo.where(attributes).count }.by(1)
+ end
+
+ it 'deletes milestone issues count cache' do
+ expect_next(Milestones::IssuesCountService, milestone)
+ .to receive(:delete_cache).and_call_original
+
+ after_create_service.execute(issue)
+ end
+
+ context 'with a regular issue' do
+ it_behaves_like 'does not track incident management event', :incident_management_incident_created do
+ subject { after_create_service.execute(issue) }
+ end
+ end
+
+ context 'with an incident issue' do
+ let(:issue) { create(:issue, :incident, project: project, author: current_user) }
+
+ it_behaves_like 'an incident management tracked event', :incident_management_incident_created do
+ subject { after_create_service.execute(issue) }
+ end
+ end
+ end
+end
diff --git a/spec/services/issues/build_service_spec.rb b/spec/services/issues/build_service_spec.rb
index 16433d49ca1..80fe2474ecd 100644
--- a/spec/services/issues/build_service_spec.rb
+++ b/spec/services/issues/build_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe Issues::BuildService do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:developer) { create(:user) }
let_it_be(:guest) { create(:user) }
+
let(:user) { developer }
before_all do
diff --git a/spec/services/issues/clone_service_spec.rb b/spec/services/issues/clone_service_spec.rb
index 9ceb4ffeec5..44180a322ca 100644
--- a/spec/services/issues/clone_service_spec.rb
+++ b/spec/services/issues/clone_service_spec.rb
@@ -242,6 +242,7 @@ RSpec.describe Issues::CloneService do
context 'issue with a design', :clean_gitlab_redis_shared_state do
let_it_be(:new_project) { create(:project) }
+
let!(:design) { create(:design, :with_lfs_file, issue: old_issue) }
let!(:note) { create(:diff_note_on_design, noteable: design, issue: old_issue, project: old_issue.project) }
let(:subject) { clone_service.execute(old_issue, new_project) }
diff --git a/spec/services/issues/create_service_spec.rb b/spec/services/issues/create_service_spec.rb
index d548e5ee74a..83c6373c335 100644
--- a/spec/services/issues/create_service_spec.rb
+++ b/spec/services/issues/create_service_spec.rb
@@ -3,12 +3,15 @@
require 'spec_helper'
RSpec.describe Issues::CreateService do
+ include AfterNextHelpers
+
let_it_be_with_reload(:project) { create(:project) }
let_it_be(:user) { create(:user) }
describe '#execute' do
let_it_be(:assignee) { create(:user) }
let_it_be(:milestone) { create(:milestone, project: project) }
+
let(:issue) { described_class.new(project, user, opts).execute }
context 'when params are valid' do
@@ -64,7 +67,6 @@ RSpec.describe Issues::CreateService do
it_behaves_like 'incident issue'
it_behaves_like 'has incident label'
- it_behaves_like 'an incident management tracked event', :incident_management_incident_created
it 'does create an incident label' do
expect { subject }
@@ -112,20 +114,6 @@ RSpec.describe Issues::CreateService do
end
end
- it 'creates a pending todo for new assignee' do
- attributes = {
- project: project,
- author: user,
- user: assignee,
- target_id: issue.id,
- target_type: issue.class.name,
- action: Todo::ASSIGNED,
- state: :pending
- }
-
- expect(Todo.where(attributes).count).to eq 1
- end
-
it 'moves the issue to the end, in an asynchronous worker' do
expect(IssuePlacementWorker).to receive(:perform_async).with(be_nil, Integer)
@@ -279,14 +267,6 @@ RSpec.describe Issues::CreateService do
end
end
- it 'deletes milestone issues count cache' do
- expect_next_instance_of(Milestones::IssuesCountService, milestone) do |service|
- expect(service).to receive(:delete_cache).and_call_original
- end
-
- issue
- end
-
it 'schedules a namespace onboarding create action worker' do
expect(Namespaces::OnboardingIssueCreatedWorker).to receive(:perform_async).with(project.namespace.id)
@@ -458,7 +438,7 @@ RSpec.describe Issues::CreateService do
end
context 'checking spam' do
- let(:request) { double(:request) }
+ let(:request) { double(:request, headers: nil) }
let(:api) { true }
let(:captcha_response) { 'abc123' }
let(:spam_log_id) { 1 }
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index d199f825276..d04480bec18 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Issues::ExportCsvService do
let_it_be(:project) { create(:project, :public, group: group) }
let_it_be(:issue) { create(:issue, project: project, author: user) }
let_it_be(:bad_issue) { create(:issue, project: project, author: user) }
+
subject { described_class.new(Issue.all, project) }
it 'renders csv to string' do
diff --git a/spec/services/issues/move_service_spec.rb b/spec/services/issues/move_service_spec.rb
index eb124f07900..2f29a2e2022 100644
--- a/spec/services/issues/move_service_spec.rb
+++ b/spec/services/issues/move_service_spec.rb
@@ -206,6 +206,7 @@ RSpec.describe Issues::MoveService do
context 'issue with a design', :clean_gitlab_redis_shared_state do
let_it_be(:new_project) { create(:project) }
+
let!(:design) { create(:design, :with_lfs_file, issue: old_issue) }
let!(:note) { create(:diff_note_on_design, noteable: design, issue: old_issue, project: old_issue.project) }
let(:subject) { move_service.execute(old_issue, new_project) }
diff --git a/spec/services/issues/related_branches_service_spec.rb b/spec/services/issues/related_branches_service_spec.rb
index a8a1f95e800..c9c029bca4f 100644
--- a/spec/services/issues/related_branches_service_spec.rb
+++ b/spec/services/issues/related_branches_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe Issues::RelatedBranchesService do
let_it_be(:developer) { create(:user) }
let_it_be(:issue) { create(:issue) }
+
let(:user) { developer }
subject { described_class.new(issue.project, user) }
diff --git a/spec/services/keys/expiry_notification_service_spec.rb b/spec/services/keys/expiry_notification_service_spec.rb
new file mode 100644
index 00000000000..1d1da179cf7
--- /dev/null
+++ b/spec/services/keys/expiry_notification_service_spec.rb
@@ -0,0 +1,97 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Keys::ExpiryNotificationService do
+ let_it_be_with_reload(:user) { create(:user) }
+
+ let(:params) { { keys: user.keys, expiring_soon: expiring_soon } }
+
+ subject { described_class.new(user, params) }
+
+ shared_examples 'sends a notification' do
+ it do
+ perform_enqueued_jobs do
+ subject.execute
+ end
+ should_email(user)
+ end
+ end
+
+ shared_examples 'uses notification service to send email to the user' do |notification_method|
+ it do
+ expect_next_instance_of(NotificationService) do |notification_service|
+ expect(notification_service).to receive(notification_method).with(key.user, [key.fingerprint])
+ end
+
+ subject.execute
+ end
+ end
+
+ shared_examples 'does not send notification' do
+ it do
+ perform_enqueued_jobs do
+ subject.execute
+ end
+ should_not_email(user)
+ end
+ end
+
+ shared_context 'block user' do
+ before do
+ user.block!
+ end
+ end
+
+ context 'with key expiring today', :mailer do
+ let_it_be_with_reload(:key) { create(:key, expires_at: Time.current, user: user) }
+
+ let(:expiring_soon) { false }
+
+ context 'when user has permission to receive notification' do
+ it_behaves_like 'sends a notification'
+
+ it_behaves_like 'uses notification service to send email to the user', :ssh_key_expired
+
+ it 'updates notified column' do
+ expect { subject.execute }.to change { key.reload.expiry_notification_delivered_at }
+ end
+ end
+
+ context 'when user does NOT have permission to receive notification' do
+ include_context 'block user'
+
+ it_behaves_like 'does not send notification'
+
+ it 'does not update notified column' do
+ expect { subject.execute }.not_to change { key.reload.expiry_notification_delivered_at }
+ end
+ end
+ end
+
+ context 'with key expiring soon', :mailer do
+ let_it_be_with_reload(:key) { create(:key, expires_at: 3.days.from_now, user: user) }
+
+ let(:expiring_soon) { true }
+
+ context 'when user has permission to receive notification' do
+ it_behaves_like 'sends a notification'
+
+ it_behaves_like 'uses notification service to send email to the user', :ssh_key_expiring_soon
+
+ it 'updates notified column' do
+ expect { subject.execute }.to change { key.reload.before_expiry_notification_delivered_at }
+ end
+ end
+
+ context 'when user does NOT have permission to receive notification' do
+ include_context 'block user'
+
+ it_behaves_like 'does not send notification'
+
+ it 'does not update notified column' do
+ expect { subject.execute }.not_to change { key.reload.before_expiry_notification_delivered_at }
+ end
+ end
+ end
+end
diff --git a/spec/services/members/create_service_spec.rb b/spec/services/members/create_service_spec.rb
index 50efee9f43c..916941e1111 100644
--- a/spec/services/members/create_service_spec.rb
+++ b/spec/services/members/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::CreateService, :clean_gitlab_redis_shared_state, :sidekiq_inline do
+RSpec.describe Members::CreateService, :aggregate_failures, :clean_gitlab_redis_shared_state, :sidekiq_inline do
let_it_be(:source) { create(:project) }
let_it_be(:user) { create(:user) }
let_it_be(:member) { create(:user) }
@@ -10,7 +10,7 @@ RSpec.describe Members::CreateService, :clean_gitlab_redis_shared_state, :sideki
let_it_be(:access_level) { Gitlab::Access::GUEST }
let(:params) { { user_ids: user_ids, access_level: access_level } }
- subject(:execute_service) { described_class.new(user, params).execute(source) }
+ subject(:execute_service) { described_class.new(user, params.merge({ source: source })).execute }
before do
if source.is_a?(Project)
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index 4f731ad5852..b9f382d3cd8 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -289,60 +289,137 @@ RSpec.describe Members::DestroyService do
let(:group_project) { create(:project, :public, group: group) }
let(:control_project) { create(:project, group: subsubgroup) }
- before do
- create(:group_member, :developer, group: subsubgroup, user: member_user)
- create(:project_member, :invited, project: group_project, created_by: member_user)
- create(:group_member, :invited, group: group, created_by: member_user)
- create(:project_member, :invited, project: subsubproject, created_by: member_user)
- create(:group_member, :invited, group: subgroup, created_by: member_user)
+ context 'with memberships' do
+ before do
+ subgroup.add_developer(member_user)
+ subsubgroup.add_developer(member_user)
+ subsubproject.add_developer(member_user)
+ group_project.add_developer(member_user)
+ control_project.add_maintainer(user)
+ group.add_owner(user)
+
+ @group_member = create(:group_member, :developer, group: group, user: member_user)
+ end
- subsubproject.add_developer(member_user)
- control_project.add_maintainer(user)
- group.add_owner(user)
+ context 'with skipping of subresources' do
+ before do
+ described_class.new(user).execute(@group_member, skip_subresources: true)
+ end
- group_member = create(:group_member, :developer, group: group, user: member_user)
+ it 'removes the group membership' do
+ expect(group.members.map(&:user)).not_to include(member_user)
+ end
- described_class.new(user).execute(group_member)
- end
+ it 'does not remove the project membership' do
+ expect(group_project.members.map(&:user)).to include(member_user)
+ end
- it 'removes the project membership' do
- expect(group_project.members.map(&:user)).not_to include(member_user)
- end
+ it 'does not remove the subgroup membership' do
+ expect(subgroup.members.map(&:user)).to include(member_user)
+ end
- it 'removes the group membership' do
- expect(group.members.map(&:user)).not_to include(member_user)
- end
+ it 'does not remove the subsubgroup membership' do
+ expect(subsubgroup.members.map(&:user)).to include(member_user)
+ end
- it 'removes the subgroup membership' do
- expect(subgroup.members.map(&:user)).not_to include(member_user)
- end
+ it 'does not remove the subsubproject membership' do
+ expect(subsubproject.members.map(&:user)).to include(member_user)
+ end
- it 'removes the subsubgroup membership' do
- expect(subsubgroup.members.map(&:user)).not_to include(member_user)
- end
+ it 'does not remove the user from the control project' do
+ expect(control_project.members.map(&:user)).to include(user)
+ end
+ end
- it 'removes the subsubproject membership' do
- expect(subsubproject.members.map(&:user)).not_to include(member_user)
- end
+ context 'without skipping of subresources' do
+ before do
+ described_class.new(user).execute(@group_member, skip_subresources: false)
+ end
- it 'does not remove the user from the control project' do
- expect(control_project.members.map(&:user)).to include(user)
- end
+ it 'removes the project membership' do
+ expect(group_project.members.map(&:user)).not_to include(member_user)
+ end
- it 'removes group members invited by deleted user' do
- expect(group.members.not_accepted_invitations_by_user(member_user)).to be_empty
- end
+ it 'removes the group membership' do
+ expect(group.members.map(&:user)).not_to include(member_user)
+ end
- it 'removes project members invited by deleted user' do
- expect(group_project.members.not_accepted_invitations_by_user(member_user)).to be_empty
- end
+ it 'removes the subgroup membership' do
+ expect(subgroup.members.map(&:user)).not_to include(member_user)
+ end
+
+ it 'removes the subsubgroup membership' do
+ expect(subsubgroup.members.map(&:user)).not_to include(member_user)
+ end
+
+ it 'removes the subsubproject membership' do
+ expect(subsubproject.members.map(&:user)).not_to include(member_user)
+ end
- it 'removes subgroup members invited by deleted user' do
- expect(subgroup.members.not_accepted_invitations_by_user(member_user)).to be_empty
+ it 'does not remove the user from the control project' do
+ expect(control_project.members.map(&:user)).to include(user)
+ end
+ end
end
- it 'removes subproject members invited by deleted user' do
- expect(subsubproject.members.not_accepted_invitations_by_user(member_user)).to be_empty
+ context 'with invites' do
+ before do
+ create(:group_member, :developer, group: subsubgroup, user: member_user)
+ create(:project_member, :invited, project: group_project, created_by: member_user)
+ create(:group_member, :invited, group: group, created_by: member_user)
+ create(:project_member, :invited, project: subsubproject, created_by: member_user)
+ create(:group_member, :invited, group: subgroup, created_by: member_user)
+
+ subsubproject.add_developer(member_user)
+ control_project.add_maintainer(user)
+ group.add_owner(user)
+
+ @group_member = create(:group_member, :developer, group: group, user: member_user)
+ end
+
+ context 'with skipping of subresources' do
+ before do
+ described_class.new(user).execute(@group_member, skip_subresources: true)
+ end
+
+ it 'does not remove group members invited by deleted user' do
+ expect(group.members.not_accepted_invitations_by_user(member_user)).not_to be_empty
+ end
+
+ it 'does not remove project members invited by deleted user' do
+ expect(group_project.members.not_accepted_invitations_by_user(member_user)).not_to be_empty
+ end
+
+ it 'does not remove subgroup members invited by deleted user' do
+ expect(subgroup.members.not_accepted_invitations_by_user(member_user)).not_to be_empty
+ end
+
+ it 'does not remove subproject members invited by deleted user' do
+ expect(subsubproject.members.not_accepted_invitations_by_user(member_user)).not_to be_empty
+ end
+ end
+
+ context 'without skipping of subresources' do
+ before do
+ described_class.new(user).execute(@group_member, skip_subresources: false)
+ end
+
+ it 'removes group members invited by deleted user' do
+ expect(group.members.not_accepted_invitations_by_user(member_user)).to be_empty
+ end
+
+ it 'removes project members invited by deleted user' do
+ expect(group_project.members.not_accepted_invitations_by_user(member_user)).to be_empty
+ end
+
+ it 'removes subgroup members invited by deleted user' do
+ expect(subgroup.members.not_accepted_invitations_by_user(member_user)).to be_empty
+ end
+
+ it 'removes subproject members invited by deleted user' do
+ expect(subsubproject.members.not_accepted_invitations_by_user(member_user)).to be_empty
+ end
+ end
end
end
diff --git a/spec/services/members/invite_service_spec.rb b/spec/services/members/invite_service_spec.rb
index cced93896a5..d7fd7d5b2ca 100644
--- a/spec/services/members/invite_service_spec.rb
+++ b/spec/services/members/invite_service_spec.rb
@@ -2,29 +2,43 @@
require 'spec_helper'
-RSpec.describe Members::InviteService, :aggregate_failures do
+RSpec.describe Members::InviteService, :aggregate_failures, :clean_gitlab_redis_shared_state, :sidekiq_inline do
let_it_be(:project) { create(:project) }
let_it_be(:user) { project.owner }
let_it_be(:project_user) { create(:user) }
+ let_it_be(:namespace) { project.namespace }
let(:params) { {} }
- let(:base_params) { { access_level: Gitlab::Access::GUEST } }
+ let(:base_params) { { access_level: Gitlab::Access::GUEST, source: project } }
- subject(:result) { described_class.new(user, base_params.merge(params)).execute(project) }
+ subject(:result) { described_class.new(user, base_params.merge(params) ).execute }
- context 'when email is previously unused by current members' do
+ context 'when there is a valid member invited' do
let(:params) { { email: 'email@example.org' } }
it 'successfully creates a member' do
- expect { result }.to change(ProjectMember, :count).by(1)
+ expect_to_create_members(count: 1)
expect(result[:status]).to eq(:success)
end
+
+ it_behaves_like 'records an onboarding progress action', :user_added
+ end
+
+ context 'when email is not a valid email' do
+ let(:params) { { email: '_bogus_' } }
+
+ it 'returns an error' do
+ expect_not_to_create_members
+ expect(result[:message]['_bogus_']).to eq("Invite email is invalid")
+ end
+
+ it_behaves_like 'does not record an onboarding progress action'
end
context 'when emails are passed as an array' do
let(:params) { { email: %w[email@example.org email2@example.org] } }
it 'successfully creates members' do
- expect { result }.to change(ProjectMember, :count).by(2)
+ expect_to_create_members(count: 2)
expect(result[:status]).to eq(:success)
end
end
@@ -33,25 +47,25 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: '' } }
it 'returns an error' do
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Email cannot be blank')
+ expect_not_to_create_members
+ expect(result[:message]).to eq('Emails cannot be blank')
end
end
context 'when email param is not included' do
it 'returns an error' do
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq('Email cannot be blank')
+ expect_not_to_create_members
+ expect(result[:message]).to eq('Emails cannot be blank')
end
end
- context 'when email is not a valid email' do
+ context 'when email is not a valid email format' do
let(:params) { { email: '_bogus_' } }
it 'returns an error' do
expect { result }.not_to change(ProjectMember, :count)
expect(result[:status]).to eq(:error)
- expect(result[:message]['_bogus_']).to eq("Invite email is invalid")
+ expect(result[:message][params[:email]]).to eq("Invite email is invalid")
end
end
@@ -59,7 +73,7 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: 'email@example.org,email@example.org' } }
it 'only creates one member per unique address' do
- expect { result }.to change(ProjectMember, :count).by(1)
+ expect_to_create_members(count: 1)
expect(result[:status]).to eq(:success)
end
end
@@ -71,8 +85,7 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: emails } }
it 'limits the number of emails to 100' do
- expect { result }.not_to change(ProjectMember, :count)
- expect(result[:status]).to eq(:error)
+ expect_not_to_create_members
expect(result[:message]).to eq('Too many users specified (limit is 100)')
end
end
@@ -81,8 +94,7 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: 'email@example.org,email2@example.org', limit: 1 } }
it 'limits the number of emails to the limit supplied' do
- expect { result }.not_to change(ProjectMember, :count)
- expect(result[:status]).to eq(:error)
+ expect_not_to_create_members
expect(result[:message]).to eq('Too many users specified (limit is 1)')
end
end
@@ -91,7 +103,7 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: emails, limit: -1 } }
it 'does not limit number of emails' do
- expect { result }.to change(ProjectMember, :count).by(101)
+ expect_to_create_members(count: 101)
expect(result[:status]).to eq(:success)
end
end
@@ -101,7 +113,7 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: project_user.email } }
it 'adds an existing user to members' do
- expect { result }.to change(ProjectMember, :count).by(1)
+ expect_to_create_members(count: 1)
expect(result[:status]).to eq(:success)
expect(project.users).to include project_user
end
@@ -111,9 +123,9 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: project_user.email, access_level: -1 } }
it 'returns an error' do
- expect { result }.not_to change(ProjectMember, :count)
- expect(result[:status]).to eq(:error)
- expect(result[:message][project_user.email]).to eq("Access level is not included in the list")
+ expect_not_to_create_members
+ expect(result[:message][project_user.email])
+ .to eq("Access level is not included in the list")
end
end
@@ -122,9 +134,10 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: "#{invited_member.invite_email},#{project_user.email}" } }
it 'adds new email and returns an error for the already invited email' do
- expect { result }.to change(ProjectMember, :count).by(1)
+ expect_to_create_members(count: 1)
expect(result[:status]).to eq(:error)
- expect(result[:message][invited_member.invite_email]).to eq("Member already invited to #{project.name}")
+ expect(result[:message][invited_member.invite_email])
+ .to eq("Invite email has already been taken")
expect(project.users).to include project_user
end
end
@@ -134,10 +147,10 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: "#{requested_member.user.email},#{project_user.email}" } }
it 'adds new email and returns an error for the already invited email' do
- expect { result }.to change(ProjectMember, :count).by(1)
+ expect_to_create_members(count: 1)
expect(result[:status]).to eq(:error)
expect(result[:message][requested_member.user.email])
- .to eq("Member cannot be invited because they already requested to join #{project.name}")
+ .to eq("User already exists in source")
expect(project.users).to include project_user
end
end
@@ -147,10 +160,20 @@ RSpec.describe Members::InviteService, :aggregate_failures do
let(:params) { { email: "#{existing_member.user.email},#{project_user.email}" } }
it 'adds new email and returns an error for the already invited email' do
- expect { result }.to change(ProjectMember, :count).by(1)
+ expect_to_create_members(count: 1)
expect(result[:status]).to eq(:error)
- expect(result[:message][existing_member.user.email]).to eq("Already a member of #{project.name}")
+ expect(result[:message][existing_member.user.email])
+ .to eq("User already exists in source")
expect(project.users).to include project_user
end
end
+
+ def expect_to_create_members(count:)
+ expect { result }.to change(ProjectMember, :count).by(count)
+ end
+
+ def expect_not_to_create_members
+ expect { result }.not_to change(ProjectMember, :count)
+ expect(result[:status]).to eq(:error)
+ end
end
diff --git a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
index 3c81ad6722d..6edaa91b8b2 100644
--- a/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
+++ b/spec/services/merge_requests/add_todo_when_build_fails_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::AddTodoWhenBuildFailsService do
+RSpec.describe ::MergeRequests::AddTodoWhenBuildFailsService do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:sha) { '1234567890abcdef1234567890abcdef12345678' }
@@ -24,8 +24,8 @@ RSpec.describe MergeRequests::AddTodoWhenBuildFailsService do
before do
allow_any_instance_of(MergeRequest)
- .to receive(:head_pipeline)
- .and_return(pipeline)
+ .to receive(:head_pipeline_id)
+ .and_return(pipeline.id)
allow(service).to receive(:todo_service).and_return(todo_service)
end
diff --git a/spec/services/merge_requests/after_create_service_spec.rb b/spec/services/merge_requests/after_create_service_spec.rb
index dce351d8a31..e1f28e32164 100644
--- a/spec/services/merge_requests/after_create_service_spec.rb
+++ b/spec/services/merge_requests/after_create_service_spec.rb
@@ -93,5 +93,109 @@ RSpec.describe MergeRequests::AfterCreateService do
expect(merge_request.reload).to be_unchecked
end
end
+
+ it 'increments the usage data counter of create event' do
+ counter = Gitlab::UsageDataCounters::MergeRequestCounter
+
+ expect { execute_service }.to change { counter.read(:create) }.by(1)
+ end
+
+ context 'with a milestone' do
+ let(:milestone) { create(:milestone, project: merge_request.target_project) }
+
+ before do
+ merge_request.update!(milestone_id: milestone.id)
+ end
+
+ it 'deletes the cache key for milestone merge request counter', :use_clean_rails_memory_store_caching do
+ expect_next_instance_of(Milestones::MergeRequestsCountService, milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ execute_service
+ end
+ end
+
+ context 'todos' do
+ it 'does not creates todos' do
+ attributes = {
+ project: merge_request.target_project,
+ target_id: merge_request.id,
+ target_type: merge_request.class.name
+ }
+
+ expect { execute_service }.not_to change { Todo.where(attributes).count }
+ end
+
+ context 'when merge request is assigned to someone' do
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request, assignees: [assignee]) }
+
+ it 'creates a todo for new assignee' do
+ attributes = {
+ project: merge_request.target_project,
+ author: merge_request.author,
+ user: assignee,
+ target_id: merge_request.id,
+ target_type: merge_request.class.name,
+ action: Todo::ASSIGNED,
+ state: :pending
+ }
+
+ expect { execute_service }.to change { Todo.where(attributes).count }.by(1)
+ end
+ end
+
+ context 'when reviewer is assigned' do
+ let_it_be(:reviewer) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request, reviewers: [reviewer]) }
+
+ it 'creates a todo for new reviewer' do
+ attributes = {
+ project: merge_request.target_project,
+ author: merge_request.author,
+ user: reviewer,
+ target_id: merge_request.id,
+ target_type: merge_request.class.name,
+ action: Todo::REVIEW_REQUESTED,
+ state: :pending
+ }
+
+ expect { execute_service }.to change { Todo.where(attributes).count }.by(1)
+ end
+ end
+ end
+
+ context 'when saving references to issues that the created merge request closes' do
+ let_it_be(:first_issue) { create(:issue, project: merge_request.target_project) }
+ let_it_be(:second_issue) { create(:issue, project: merge_request.target_project) }
+
+ it 'creates a `MergeRequestsClosingIssues` record for each issue' do
+ merge_request.description = "Closes #{first_issue.to_reference} and #{second_issue.to_reference}"
+ merge_request.source_branch = "feature"
+ merge_request.target_branch = merge_request.target_project.default_branch
+ merge_request.save!
+
+ execute_service
+
+ issue_ids = MergeRequestsClosingIssues.where(merge_request: merge_request).pluck(:issue_id)
+ expect(issue_ids).to match_array([first_issue.id, second_issue.id])
+ end
+ end
+
+ it 'tracks merge request creation in usage data' do
+ expect(Gitlab::UsageDataCounters::MergeRequestCounter).to receive(:count).with(:create)
+
+ execute_service
+ end
+
+ it 'calls MergeRequests::LinkLfsObjectsService#execute' do
+ service = instance_spy(MergeRequests::LinkLfsObjectsService)
+ allow(MergeRequests::LinkLfsObjectsService).to receive(:new).with(merge_request.target_project).and_return(service)
+
+ execute_service
+
+ expect(service).to have_received(:execute).with(merge_request)
+ end
end
end
diff --git a/spec/services/merge_requests/base_service_spec.rb b/spec/services/merge_requests/base_service_spec.rb
index 83431105545..d8ba2bc43fb 100644
--- a/spec/services/merge_requests/base_service_spec.rb
+++ b/spec/services/merge_requests/base_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe MergeRequests::BaseService do
include ProjectForksHelper
let_it_be(:project) { create(:project, :repository) }
+
let(:title) { 'Awesome merge_request' }
let(:params) do
{
diff --git a/spec/services/merge_requests/create_pipeline_service_spec.rb b/spec/services/merge_requests/create_pipeline_service_spec.rb
index 4dd70627977..3e2e940dc24 100644
--- a/spec/services/merge_requests/create_pipeline_service_spec.rb
+++ b/spec/services/merge_requests/create_pipeline_service_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
let_it_be(:project, reload: true) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
+
let(:service) { described_class.new(project, actor, params) }
let(:actor) { user }
let(:params) { {} }
@@ -50,6 +51,7 @@ RSpec.describe MergeRequests::CreatePipelineService do
context 'with fork merge request' do
let_it_be(:forked_project) { fork_project(project, nil, repository: true, target_project: create(:project, :private, :repository)) }
+
let(:source_project) { forked_project }
context 'when actor has permission to create pipelines in target project' do
diff --git a/spec/services/merge_requests/create_service_spec.rb b/spec/services/merge_requests/create_service_spec.rb
index 4f47a22b07c..f2bc55103f0 100644
--- a/spec/services/merge_requests/create_service_spec.rb
+++ b/spec/services/merge_requests/create_service_spec.rb
@@ -47,16 +47,6 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
.to change { project.open_merge_requests_count }.from(0).to(1)
end
- it 'does not creates todos' do
- attributes = {
- project: project,
- target_id: merge_request.id,
- target_type: merge_request.class.name
- }
-
- expect(Todo.where(attributes).count).to be_zero
- end
-
it 'creates exactly 1 create MR event', :sidekiq_might_not_need_inline do
attributes = {
action: :created,
@@ -67,6 +57,10 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(Event.where(attributes).count).to eq(1)
end
+ it 'sets the merge_status to preparing' do
+ expect(merge_request.reload).to be_preparing
+ end
+
describe 'when marked with /wip' do
context 'in title and in description' do
let(:opts) do
@@ -113,20 +107,6 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
it { expect(merge_request.assignees).to eq([user2]) }
-
- it 'creates a todo for new assignee' do
- attributes = {
- project: project,
- author: user,
- user: user2,
- target_id: merge_request.id,
- target_type: merge_request.class.name,
- action: Todo::ASSIGNED,
- state: :pending
- }
-
- expect(Todo.where(attributes).count).to eq 1
- end
end
context 'when reviewer is assigned' do
@@ -142,20 +122,6 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
it { expect(merge_request.reviewers).to eq([user2]) }
- it 'creates a todo for new reviewer' do
- attributes = {
- project: project,
- author: user,
- user: user2,
- target_id: merge_request.id,
- target_type: merge_request.class.name,
- action: Todo::REVIEW_REQUESTED,
- state: :pending
- }
-
- expect(Todo.where(attributes).count).to eq 1
- end
-
it 'invalidates counter cache for reviewers', :use_clean_rails_memory_store_caching do
expect { merge_request }
.to change { user2.review_requested_open_merge_requests_count }
@@ -328,12 +294,6 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
- it 'increments the usage data counter of create event' do
- counter = Gitlab::UsageDataCounters::MergeRequestCounter
-
- expect { service.execute }.to change { counter.read(:create) }.by(1)
- end
-
context 'after_save callback to store_mentions' do
let(:labels) { create_pair(:label, project: project) }
let(:milestone) { create(:milestone, project: project) }
@@ -494,35 +454,6 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
end
end
- context 'while saving references to issues that the created merge request closes' do
- let(:first_issue) { create(:issue, project: project) }
- let(:second_issue) { create(:issue, project: project) }
-
- let(:opts) do
- {
- title: 'Awesome merge_request',
- source_branch: 'feature',
- target_branch: 'master',
- force_remove_source_branch: '1'
- }
- end
-
- before do
- project.add_maintainer(user)
- project.add_developer(user2)
- end
-
- it 'creates a `MergeRequestsClosingIssues` record for each issue' do
- issue_closing_opts = opts.merge(description: "Closes #{first_issue.to_reference} and #{second_issue.to_reference}")
- service = described_class.new(project, user, issue_closing_opts)
- allow(service).to receive(:execute_hooks)
- merge_request = service.execute
-
- issue_ids = MergeRequestsClosingIssues.where(merge_request: merge_request).pluck(:issue_id)
- expect(issue_ids).to match_array([first_issue.id, second_issue.id])
- end
- end
-
context 'when source and target projects are different' do
let(:target_project) { fork_project(project, nil, repository: true) }
@@ -571,14 +502,6 @@ RSpec.describe MergeRequests::CreateService, :clean_gitlab_redis_shared_state do
expect(merge_request).to be_persisted
end
- it 'calls MergeRequests::LinkLfsObjectsService#execute', :sidekiq_might_not_need_inline do
- expect_next_instance_of(MergeRequests::LinkLfsObjectsService) do |service|
- expect(service).to receive(:execute).with(instance_of(MergeRequest))
- end
-
- described_class.new(project, user, opts).execute
- end
-
it 'does not create the merge request when the target project is archived' do
target_project.update!(archived: true)
diff --git a/spec/services/merge_requests/export_csv_service_spec.rb b/spec/services/merge_requests/export_csv_service_spec.rb
index 4ce032c396e..97217e979a5 100644
--- a/spec/services/merge_requests/export_csv_service_spec.rb
+++ b/spec/services/merge_requests/export_csv_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe MergeRequests::ExportCsvService do
let_it_be(:merge_request) { create(:merge_request) }
+
let(:csv) { CSV.parse(subject.csv_data, headers: true).first }
subject { described_class.new(MergeRequest.where(id: merge_request.id), merge_request.project) }
@@ -46,6 +47,7 @@ RSpec.describe MergeRequests::ExportCsvService do
describe 'approvers' do
context 'when approved' do
let_it_be(:merge_request) { create(:merge_request) }
+
let(:approvers) { create_list(:user, 2) }
before do
diff --git a/spec/services/merge_requests/handle_assignees_change_service_spec.rb b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
new file mode 100644
index 00000000000..cc595aab04b
--- /dev/null
+++ b/spec/services/merge_requests/handle_assignees_change_service_spec.rb
@@ -0,0 +1,114 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::HandleAssigneesChangeService do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:assignee) { create(:user) }
+ let_it_be(:merge_request) { create(:merge_request, author: user, source_project: project, assignees: [assignee]) }
+ let_it_be(:old_assignees) { create_list(:user, 3) }
+
+ let(:options) { {} }
+ let(:service) { described_class.new(project, user) }
+
+ before_all do
+ project.add_maintainer(user)
+ project.add_developer(assignee)
+
+ old_assignees.each do |old_assignee|
+ project.add_developer(old_assignee)
+ end
+ end
+
+ describe '#async_execute' do
+ def async_execute
+ service.async_execute(merge_request, old_assignees, options)
+ end
+
+ it 'performs MergeRequests::HandleAssigneesChangeWorker asynchronously' do
+ expect(MergeRequests::HandleAssigneesChangeWorker)
+ .to receive(:perform_async)
+ .with(
+ merge_request.id,
+ user.id,
+ old_assignees.map(&:id),
+ options
+ )
+
+ async_execute
+ end
+
+ context 'when async_handle_merge_request_assignees_change feature is disabled' do
+ before do
+ stub_feature_flags(async_handle_merge_request_assignees_change: false)
+ end
+
+ it 'calls #execute' do
+ expect(service).to receive(:execute).with(merge_request, old_assignees, options)
+
+ async_execute
+ end
+ end
+ end
+
+ describe '#execute' do
+ def execute
+ service.execute(merge_request, old_assignees, options)
+ end
+
+ it 'creates assignee note' do
+ execute
+
+ note = merge_request.notes.last
+
+ expect(note).not_to be_nil
+ expect(note.note).to include "assigned to #{assignee.to_reference} and unassigned #{old_assignees.map(&:to_reference).to_sentence}"
+ end
+
+ it 'sends email notifications to old and new assignees', :mailer, :sidekiq_inline do
+ perform_enqueued_jobs do
+ execute
+ end
+
+ should_email(assignee)
+ old_assignees.each do |old_assignee|
+ should_email(old_assignee)
+ end
+ end
+
+ it 'creates pending todo for assignee' do
+ execute
+
+ todo = assignee.todos.last
+
+ expect(todo).to be_pending
+ end
+
+ it 'tracks users assigned event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_users_assigned_to_mr).once.with(users: [assignee])
+
+ execute
+ end
+
+ it 'tracks assignees changed event' do
+ expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
+ .to receive(:track_assignees_changed_action).once.with(user: user)
+
+ execute
+ end
+
+ context 'when execute_hooks option is set to true' do
+ let(:options) { { execute_hooks: true } }
+
+ it 'execute hooks and services' do
+ expect(merge_request.project).to receive(:execute_hooks).with(anything, :merge_request_hooks)
+ expect(merge_request.project).to receive(:execute_services).with(anything, :merge_request_hooks)
+ expect(service).to receive(:enqueue_jira_connect_messages_for).with(merge_request)
+
+ execute
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/merge_orchestration_service_spec.rb b/spec/services/merge_requests/merge_orchestration_service_spec.rb
index 67dbb5a1a01..da37cc97857 100644
--- a/spec/services/merge_requests/merge_orchestration_service_spec.rb
+++ b/spec/services/merge_requests/merge_orchestration_service_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe MergeRequests::MergeOrchestrationService do
let_it_be(:maintainer) { create(:user) }
+
let(:merge_params) { { sha: merge_request.diff_head_sha } }
let(:user) { maintainer }
let(:service) { described_class.new(project, user, merge_params) }
diff --git a/spec/services/merge_requests/merge_service_spec.rb b/spec/services/merge_requests/merge_service_spec.rb
index 87e5750ce6e..c73cbad9d2f 100644
--- a/spec/services/merge_requests/merge_service_spec.rb
+++ b/spec/services/merge_requests/merge_service_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe MergeRequests::MergeService do
let_it_be(:user) { create(:user) }
let_it_be(:user2) { create(:user) }
+
let(:merge_request) { create(:merge_request, :simple, author: user2, assignees: [user2]) }
let(:project) { merge_request.project }
@@ -166,20 +167,6 @@ RSpec.describe MergeRequests::MergeService do
service.execute(merge_request)
end
- context 'when jira_issue_transition_id is not present' do
- before do
- allow_any_instance_of(JIRA::Resource::Issue).to receive(:resolution).and_return(nil)
- end
-
- it 'does not close issue' do
- jira_tracker.update!(jira_issue_transition_id: nil)
-
- expect_any_instance_of(JiraService).not_to receive(:transition_issue)
-
- service.execute(merge_request)
- end
- end
-
context 'wrong issue markdown' do
it 'does not close issues on Jira issue tracker' do
jira_issue = ExternalIssue.new('#JIRA-123', project)
diff --git a/spec/services/merge_requests/merge_to_ref_service_spec.rb b/spec/services/merge_requests/merge_to_ref_service_spec.rb
index 14ef5b0b772..938165a807c 100644
--- a/spec/services/merge_requests/merge_to_ref_service_spec.rb
+++ b/spec/services/merge_requests/merge_to_ref_service_spec.rb
@@ -68,6 +68,7 @@ RSpec.describe MergeRequests::MergeToRefService do
end
let_it_be(:user) { create(:user) }
+
let(:merge_request) { create(:merge_request, :simple) }
let(:project) { merge_request.project }
@@ -226,6 +227,7 @@ RSpec.describe MergeRequests::MergeToRefService do
describe 'cascading merge refs' do
let_it_be(:project) { create(:project, :repository) }
+
let(:params) { { commit_message: 'Cascading merge', first_parent_ref: first_parent_ref, target_ref: target_ref, sha: merge_request.diff_head_sha } }
context 'when first merge happens' do
@@ -257,8 +259,9 @@ RSpec.describe MergeRequests::MergeToRefService do
let(:params) { { allow_conflicts: true } }
it 'calls merge_to_ref with allow_conflicts param' do
- expect(project.repository).to receive(:merge_to_ref)
- .with(anything, anything, anything, anything, anything, anything, true)
+ expect(project.repository).to receive(:merge_to_ref) do |user, **kwargs|
+ expect(kwargs[:allow_conflicts]).to eq(true)
+ end.and_call_original
service.execute(merge_request)
end
diff --git a/spec/services/merge_requests/push_options_handler_service_spec.rb b/spec/services/merge_requests/push_options_handler_service_spec.rb
index c2769d4fa88..b5086ea3a82 100644
--- a/spec/services/merge_requests/push_options_handler_service_spec.rb
+++ b/spec/services/merge_requests/push_options_handler_service_spec.rb
@@ -6,10 +6,12 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
include ProjectForksHelper
let_it_be(:project) { create(:project, :public, :repository) }
- let_it_be(:user) { create(:user, developer_projects: [project]) }
- let_it_be(:forked_project) { fork_project(project, user, repository: true) }
+ let_it_be(:user1) { create(:user, developer_projects: [project]) }
+ let_it_be(:user2) { create(:user, developer_projects: [project]) }
+ let_it_be(:user3) { create(:user, developer_projects: [project]) }
+ let_it_be(:forked_project) { fork_project(project, user1, repository: true) }
- let(:service) { described_class.new(project, user, changes, push_options) }
+ let(:service) { described_class.new(project, user1, changes, push_options) }
let(:source_branch) { 'fix' }
let(:target_branch) { 'feature' }
let(:title) { 'my title' }
@@ -23,32 +25,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:default_branch_changes) { "d14d6c0abdd253381df51a723d58691b2ee1ab08 570e7b2abdd848b95f2f578043fc23bd6f6fd24d refs/heads/#{project.default_branch}" }
let(:error_mr_required) { "A merge_request.create push option is required to create a merge request for branch #{source_branch}" }
- shared_examples_for 'a service that can create a merge request' do
- subject(:last_mr) { MergeRequest.last }
-
- it 'creates a merge request with the correct target branch and assigned user' do
- branch = push_options[:target] || project.default_branch
-
- expect { service.execute }.to change { MergeRequest.count }.by(1)
- expect(last_mr.target_branch).to eq(branch)
- expect(last_mr.assignees).to contain_exactly(user)
- end
-
- context 'when project has been forked', :sidekiq_might_not_need_inline do
- let(:forked_project) { fork_project(project, user, repository: true) }
- let(:service) { described_class.new(forked_project, user, changes, push_options) }
-
- before do
- allow(forked_project).to receive(:empty_repo?).and_return(false)
- end
-
- it 'sets the correct source and target project' do
- service.execute
-
- expect(last_mr.source_project).to eq(forked_project)
- expect(last_mr.target_project).to eq(project)
- end
- end
+ before do
+ stub_licensed_features(multiple_merge_request_assignees: false)
end
shared_examples_for 'a service that can set the target of a merge request' do
@@ -91,7 +69,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
expect(last_mr.auto_merge_enabled).to eq(true)
expect(last_mr.auto_merge_strategy).to eq(AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
- expect(last_mr.merge_user).to eq(user)
+ expect(last_mr.merge_user).to eq(user1)
expect(last_mr.merge_params['sha']).to eq(change[:newrev])
end
end
@@ -116,12 +94,6 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
end
- shared_examples_for 'a service that does not create a merge request' do
- it do
- expect { service.execute }.not_to change { MergeRequest.count }
- end
- end
-
shared_examples_for 'a service that does not update a merge request' do
it do
expect { service.execute }.not_to change { MergeRequest.maximum(:updated_at) }
@@ -133,6 +105,18 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
include_examples 'a service that does not update a merge request'
end
+ shared_examples 'with a deleted branch' do
+ let(:changes) { deleted_branch_changes }
+
+ it_behaves_like 'a service that does nothing'
+ end
+
+ shared_examples 'with the project default branch' do
+ let(:changes) { default_branch_changes }
+
+ it_behaves_like 'a service that does nothing'
+ end
+
describe '`create` push option' do
let(:push_options) { { create: true } }
@@ -155,17 +139,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
-
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe '`merge_when_pipeline_succeeds` push option' do
@@ -217,17 +192,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the merge request to merge when pipeline succeeds'
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
-
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe '`remove_source_branch` push option' do
@@ -239,11 +205,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -281,17 +245,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can remove the source branch when it is merged'
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
-
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe '`target` push option' do
@@ -343,17 +298,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the target of a merge request'
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
-
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe '`title` push option' do
@@ -405,17 +351,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the title of a merge request'
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
-
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe '`description` push option' do
@@ -467,17 +404,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can set the description of a merge request'
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
-
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe '`label` push option' do
@@ -529,17 +457,8 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can change labels of a merge request', 2
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
-
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
-
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe '`unlabel` push option' do
@@ -551,11 +470,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -572,11 +489,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that does not create a merge request'
it 'adds an error to the service' do
- error = "A merge_request.create push option is required to create a merge request for branch #{source_branch}"
-
service.execute
- expect(service.errors).to include(error)
+ expect(service.errors).to include(error_mr_required)
end
context 'when coupled with the `create` push option' do
@@ -595,17 +510,42 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
it_behaves_like 'a service that can change labels of a merge request', 1
end
- context 'with a deleted branch' do
- let(:changes) { deleted_branch_changes }
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
+ end
+
+ shared_examples 'with an existing branch that has a merge request open in foss' do
+ let(:changes) { existing_branch_changes }
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'a service that does not create a merge request'
+ it_behaves_like 'a service that can change assignees of a merge request', 1
+ end
- context 'with the project default branch' do
- let(:changes) { default_branch_changes }
+ describe '`assign` push option' do
+ let(:assigned) { { user2.id => 1, user3.id => 1 } }
+ let(:unassigned) { nil }
+ let(:push_options) { { assign: assigned, unassign: unassigned } }
- it_behaves_like 'a service that does nothing'
- end
+ it_behaves_like 'with a new branch', 1
+ it_behaves_like 'with an existing branch but no open MR', 1
+ it_behaves_like 'with an existing branch that has a merge request open in foss'
+
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
+ end
+
+ describe '`unassign` push option' do
+ let(:assigned) { { user2.id => 1, user3.id => 1 } }
+ let(:unassigned) { { user1.id => 1, user3.id => 1 } }
+ let(:push_options) { { assign: assigned, unassign: unassigned } }
+
+ it_behaves_like 'with a new branch', 1
+ it_behaves_like 'with an existing branch but no open MR', 1
+ it_behaves_like 'with an existing branch that has a merge request open in foss'
+
+ it_behaves_like 'with a deleted branch'
+ it_behaves_like 'with the project default branch'
end
describe 'multiple pushed branches' do
@@ -645,7 +585,9 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
describe 'no user' do
- let(:user) { nil }
+ let(:user1) { nil }
+ let(:user2) { nil }
+ let(:user3) { nil }
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }
@@ -661,7 +603,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
let(:changes) { new_branch_changes }
it 'records an error' do
- Members::DestroyService.new(user).execute(ProjectMember.find_by!(user_id: user.id))
+ Members::DestroyService.new(user1).execute(ProjectMember.find_by!(user_id: user1.id))
service.execute
@@ -707,7 +649,7 @@ RSpec.describe MergeRequests::PushOptionsHandlerService do
end
describe 'when MRs are not enabled' do
- let(:project) { create(:project, :public, :repository).tap { |pr| pr.add_developer(user) } }
+ let(:project) { create(:project, :public, :repository).tap { |pr| pr.add_developer(user1) } }
let(:push_options) { { create: true } }
let(:changes) { new_branch_changes }
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 2abe7a23bfe..f9b76db877b 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -198,7 +198,7 @@ RSpec.describe MergeRequests::RefreshService do
end
end
- describe 'Pipelines for merge requests' do
+ shared_examples 'Pipelines for merge requests' do
before do
stub_ci_pipeline_yaml_file(config)
end
@@ -256,7 +256,7 @@ RSpec.describe MergeRequests::RefreshService do
stub_feature_flags(ci_disallow_to_create_merge_request_pipelines_in_target_project: false)
end
- it 'creates detached merge request pipeline for fork merge request', :sidekiq_inline do
+ it 'creates detached merge request pipeline for fork merge request' do
expect { subject }
.to change { @fork_merge_request.pipelines_for_merge_request.count }.by(1)
@@ -364,6 +364,18 @@ RSpec.describe MergeRequests::RefreshService do
end
end
+ context 'when the code_review_async_pipeline_creation feature flag is on', :sidekiq_inline do
+ it_behaves_like 'Pipelines for merge requests'
+ end
+
+ context 'when the code_review_async_pipeline_creation feature flag is off', :sidekiq_inline do
+ before do
+ stub_feature_flags(code_review_async_pipeline_creation: false)
+ end
+
+ it_behaves_like 'Pipelines for merge requests'
+ end
+
context 'push to origin repo source branch' do
let(:refresh_service) { service.new(@project, @user) }
let(:notification_service) { spy('notification_service') }
diff --git a/spec/services/merge_requests/resolve_todos_service_spec.rb b/spec/services/merge_requests/resolve_todos_service_spec.rb
new file mode 100644
index 00000000000..3e6f2ea3f5d
--- /dev/null
+++ b/spec/services/merge_requests/resolve_todos_service_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ResolveTodosService do
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+
+ let(:service) { described_class.new(merge_request, user) }
+
+ describe '#async_execute' do
+ def async_execute
+ service.async_execute
+ end
+
+ it 'performs MergeRequests::ResolveTodosWorker asynchronously' do
+ expect(MergeRequests::ResolveTodosWorker)
+ .to receive(:perform_async)
+ .with(
+ merge_request.id,
+ user.id
+ )
+
+ async_execute
+ end
+
+ context 'when resolve_merge_request_todos_async feature is disabled' do
+ before do
+ stub_feature_flags(resolve_merge_request_todos_async: false)
+ end
+
+ it 'calls #execute' do
+ expect(service).to receive(:execute)
+
+ async_execute
+ end
+ end
+ end
+
+ describe '#execute' do
+ it 'marks pending todo as done' do
+ pending_todo = create(:todo, :pending, user: user, project: merge_request.project, target: merge_request)
+
+ service.execute
+
+ expect(pending_todo.reload).to be_done
+ end
+ end
+end
diff --git a/spec/services/merge_requests/update_assignees_service_spec.rb b/spec/services/merge_requests/update_assignees_service_spec.rb
new file mode 100644
index 00000000000..de03aab5418
--- /dev/null
+++ b/spec/services/merge_requests/update_assignees_service_spec.rb
@@ -0,0 +1,79 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::UpdateAssigneesService do
+ include AfterNextHelpers
+
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :private, :repository, group: group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:user3) { create(:user) }
+
+ let_it_be_with_reload(:merge_request) do
+ create(:merge_request, :simple, :unique_branches,
+ title: 'Old title',
+ description: "FYI #{user2.to_reference}",
+ assignee_ids: [user3.id],
+ source_project: project,
+ author: create(:user))
+ end
+
+ before do
+ project.add_maintainer(user)
+ project.add_developer(user2)
+ project.add_developer(user3)
+ end
+
+ let(:service) { described_class.new(project, user, opts) }
+ let(:opts) { { assignee_ids: [user2.id] } }
+
+ describe 'execute' do
+ def update_merge_request
+ service.execute(merge_request)
+ merge_request.reload
+ end
+
+ context 'when the parameters are valid' do
+ it 'updates the MR, and queues the more expensive work for later' do
+ expect_next(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ expect(service)
+ .to receive(:async_execute)
+ .with(merge_request, [user3], execute_hooks: true)
+ end
+
+ expect { update_merge_request }
+ .to change(merge_request, :assignees).to([user2])
+ .and change(merge_request, :updated_at)
+ .and change(merge_request, :updated_by).to(user)
+ end
+
+ it 'does not update the assignees if they do not have access' do
+ opts[:assignee_ids] = [create(:user).id]
+
+ expect { update_merge_request }.not_to change(merge_request, :assignee_ids)
+ end
+
+ it 'is more efficient than using the full update-service' do
+ allow_next(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ expect(service)
+ .to receive(:async_execute)
+ .with(merge_request, [user3], execute_hooks: true)
+ end
+
+ other_mr = create(:merge_request, :simple, :unique_branches,
+ title: merge_request.title,
+ description: merge_request.description,
+ assignee_ids: merge_request.assignee_ids,
+ source_project: merge_request.project,
+ author: merge_request.author)
+
+ update_service = ::MergeRequests::UpdateService.new(project, user, opts)
+
+ expect { service.execute(merge_request) }
+ .to issue_fewer_queries_than { update_service.execute(other_mr) }
+ end
+ end
+ end
+end
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index 7a7f684c6d0..8c010855eb2 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -205,30 +205,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
end
- context 'assignees' do
- context 'when assignees changed' do
- it 'tracks assignees changed event' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .to receive(:track_assignees_changed_action).once.with(user: user)
-
- opts[:assignees] = [user2]
-
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
- end
- end
-
- context 'when assignees did not change' do
- it 'does not track assignees changed event' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .not_to receive(:track_assignees_changed_action)
-
- opts[:assignees] = merge_request.assignees
-
- MergeRequests::UpdateService.new(project, user, opts).execute(merge_request)
- end
- end
- end
-
context 'reviewers' do
context 'when reviewers changed' do
it 'tracks reviewers changed event' do
@@ -272,6 +248,41 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
it_behaves_like 'updates milestone'
end
+
+ context 'milestone counters cache reset' do
+ let(:milestone_old) { create(:milestone, project: project) }
+ let(:opts) { { milestone: milestone_old } }
+
+ it 'deletes milestone counters' do
+ expect_next_instance_of(Milestones::MergeRequestsCountService, milestone_old) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ expect_next_instance_of(Milestones::MergeRequestsCountService, milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ update_merge_request(milestone: milestone)
+ end
+
+ it 'deletes milestone counters when the milestone is removed' do
+ expect_next_instance_of(Milestones::MergeRequestsCountService, milestone_old) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ update_merge_request(milestone: nil)
+ end
+
+ it 'deletes milestone counters when the milestone was not set' do
+ update_merge_request(milestone: nil)
+
+ expect_next_instance_of(Milestones::MergeRequestsCountService, milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
+
+ update_merge_request(milestone: milestone)
+ end
+ end
end
it 'executes hooks with update action' do
@@ -291,21 +302,6 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
)
end
- it 'sends email to user2 about assign of new merge request and email to user3 about merge request unassignment', :sidekiq_might_not_need_inline do
- deliveries = ActionMailer::Base.deliveries
- email = deliveries.last
- recipients = deliveries.last(2).flat_map(&:to)
- expect(recipients).to include(user2.email, user3.email)
- expect(email.subject).to include(merge_request.title)
- end
-
- it 'creates system note about merge_request reassign' do
- note = find_note('assigned to')
-
- expect(note).not_to be_nil
- expect(note.note).to include "assigned to #{user.to_reference} and unassigned #{user3.to_reference}"
- end
-
context 'with reviewers' do
let(:opts) { { reviewer_ids: [user2.id] } }
@@ -594,62 +590,54 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
let!(:pending_todo) { create(:todo, :assigned, user: user, project: project, target: merge_request, author: user2) }
context 'when the title change' do
- before do
- update_merge_request({ title: 'New title' })
- end
+ it 'calls MergeRequest::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
- it 'marks pending todos as done' do
- expect(pending_todo.reload).to be_done
+ update_merge_request({ title: 'New title' })
end
it 'does not create any new todos' do
+ update_merge_request({ title: 'New title' })
+
expect(Todo.count).to eq(1)
end
end
context 'when the description change' do
- before do
- update_merge_request({ description: "Also please fix #{user2.to_reference} #{user3.to_reference}" })
- end
+ it 'calls MergeRequest::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
- it 'marks pending todos as done' do
- expect(pending_todo.reload).to be_done
+ update_merge_request({ description: "Also please fix #{user2.to_reference} #{user3.to_reference}" })
end
it 'creates only 1 new todo' do
+ update_merge_request({ description: "Also please fix #{user2.to_reference} #{user3.to_reference}" })
+
expect(Todo.count).to eq(2)
end
end
context 'when is reassigned' do
- before do
- update_merge_request({ assignee_ids: [user2.id] })
- end
-
- it 'marks previous assignee pending todos as done' do
- expect(pending_todo.reload).to be_done
- end
-
- it 'creates a pending todo for new assignee' do
- attributes = {
- project: project,
- author: user,
- user: user2,
- target_id: merge_request.id,
- target_type: merge_request.class.name,
- action: Todo::ASSIGNED,
- state: :pending
- }
+ it 'calls MergeRequest::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
- expect(Todo.where(attributes).count).to eq 1
+ update_merge_request({ assignee_ids: [user2.id] })
end
end
context 'when reviewers gets changed' do
- it 'marks pending todo as done' do
- update_merge_request({ reviewer_ids: [user2.id] })
+ it 'calls MergeRequest::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
- expect(pending_todo.reload).to be_done
+ update_merge_request({ reviewer_ids: [user2.id] })
end
it 'creates a pending todo for new review request' do
@@ -727,10 +715,12 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
- it 'marks pending todos as done' do
- update_merge_request({ milestone: create(:milestone, project: project) })
+ it 'calls MergeRequests::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
- expect(pending_todo.reload).to be_done
+ update_merge_request({ milestone: create(:milestone, project: project) })
end
it 'sends notifications for subscribers of changed milestone', :sidekiq_might_not_need_inline do
@@ -744,17 +734,19 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
context 'when the labels change' do
- before do
- travel_to(1.minute.from_now) do
- update_merge_request({ label_ids: [label.id] })
+ it 'calls MergeRequests::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
end
- end
- it 'marks pending todos as done' do
- expect(pending_todo.reload).to be_done
+ update_merge_request({ label_ids: [label.id] })
end
it 'updates updated_at' do
+ travel_to(1.minute.from_now) do
+ update_merge_request({ label_ids: [label.id] })
+ end
+
expect(merge_request.reload.updated_at).to be > Time.current
end
end
@@ -769,24 +761,26 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
context 'when the target branch change' do
- before do
- update_merge_request({ target_branch: 'target' })
- end
+ it 'calls MergeRequests::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
- it 'marks pending todos as done' do
- expect(pending_todo.reload).to be_done
+ update_merge_request({ target_branch: 'target' })
end
end
context 'when auto merge is enabled and target branch changed' do
before do
AutoMergeService.new(project, user, { sha: merge_request.diff_head_sha }).execute(merge_request, AutoMergeService::STRATEGY_MERGE_WHEN_PIPELINE_SUCCEEDS)
-
- update_merge_request({ target_branch: 'target' })
end
- it 'marks pending todos as done' do
- expect(pending_todo.reload).to be_done
+ it 'calls MergeRequests::ResolveTodosService#async_execute' do
+ expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
+ expect(service).to receive(:async_execute)
+ end
+
+ update_merge_request({ target_branch: 'target' })
end
end
end
@@ -948,18 +942,8 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
it 'removes `MergeRequestsClosingIssues` records when issues are not closed anymore' do
- opts = {
- title: 'Awesome merge_request',
- description: "Closes #{first_issue.to_reference} and #{second_issue.to_reference}",
- source_branch: 'feature',
- target_branch: 'master',
- force_remove_source_branch: '1'
- }
-
- merge_request = MergeRequests::CreateService.new(project, user, opts).execute
-
- issue_ids = MergeRequestsClosingIssues.where(merge_request: merge_request).pluck(:issue_id)
- expect(issue_ids).to match_array([first_issue.id, second_issue.id])
+ create(:merge_requests_closing_issues, issue: first_issue, merge_request: merge_request)
+ create(:merge_requests_closing_issues, issue: second_issue, merge_request: merge_request)
service = described_class.new(project, user, description: "not closing any issues")
allow(service).to receive(:execute_hooks)
@@ -971,9 +955,45 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
context 'updating asssignee_ids' do
+ context ':use_specialized_service' do
+ context 'when true' do
+ it 'passes the update action to ::MergeRequests::UpdateAssigneesService' do
+ expect(::MergeRequests::UpdateAssigneesService)
+ .to receive(:new).and_call_original
+
+ update_merge_request({
+ assignee_ids: [user2.id],
+ use_specialized_service: true
+ })
+ end
+ end
+
+ context 'when false or nil' do
+ before do
+ expect(::MergeRequests::UpdateAssigneesService).not_to receive(:new)
+ end
+
+ it 'does not pass the update action to ::MergeRequests::UpdateAssigneesService when false' do
+ update_merge_request({
+ assignee_ids: [user2.id],
+ use_specialized_service: false
+ })
+ end
+
+ it 'does not pass the update action to ::MergeRequests::UpdateAssigneesService when nil' do
+ update_merge_request({
+ assignee_ids: [user2.id],
+ use_specialized_service: nil
+ })
+ end
+ end
+ end
+
it 'does not update assignee when assignee_id is invalid' do
merge_request.update!(assignee_ids: [user.id])
+ expect(MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
update_merge_request(assignee_ids: [-1])
expect(merge_request.reload.assignees).to eq([user])
@@ -982,29 +1002,35 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
it 'unassigns assignee when user id is 0' do
merge_request.update!(assignee_ids: [user.id])
+ expect_next_instance_of(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ expect(service)
+ .to receive(:async_execute)
+ .with(merge_request, [user])
+ end
+
update_merge_request(assignee_ids: [0])
expect(merge_request.assignee_ids).to be_empty
end
it 'saves assignee when user id is valid' do
+ expect_next_instance_of(MergeRequests::HandleAssigneesChangeService, project, user) do |service|
+ expect(service)
+ .to receive(:async_execute)
+ .with(merge_request, [user3])
+ end
+
update_merge_request(assignee_ids: [user.id])
expect(merge_request.assignee_ids).to eq([user.id])
end
- it 'updates the tracking when user ids are valid' do
- expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
- .to receive(:track_users_assigned_to_mr)
- .with(users: [user])
-
- update_merge_request(assignee_ids: [user.id])
- end
-
it 'does not update assignee_id when user cannot read issue' do
non_member = create(:user)
original_assignees = merge_request.assignees
+ expect(MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
update_merge_request(assignee_ids: [non_member.id])
expect(merge_request.reload.assignees).to eq(original_assignees)
diff --git a/spec/services/milestones/destroy_service_spec.rb b/spec/services/milestones/destroy_service_spec.rb
index dd68471d927..6c08b7db43a 100644
--- a/spec/services/milestones/destroy_service_spec.rb
+++ b/spec/services/milestones/destroy_service_spec.rb
@@ -22,14 +22,16 @@ RSpec.describe Milestones::DestroyService do
expect { milestone.reload }.to raise_error ActiveRecord::RecordNotFound
end
- it 'deletes milestone id from issuables' do
- issue = create(:issue, project: project, milestone: milestone)
- merge_request = create(:merge_request, source_project: project, milestone: milestone)
+ context 'with an existing merge request' do
+ let!(:issue) { create(:issue, project: project, milestone: milestone) }
+ let!(:merge_request) { create(:merge_request, source_project: project, milestone: milestone) }
- service.execute(milestone)
+ it 'deletes milestone id from issuables' do
+ service.execute(milestone)
- expect(issue.reload.milestone).to be_nil
- expect(merge_request.reload.milestone).to be_nil
+ expect(issue.reload.milestone).to be_nil
+ expect(merge_request.reload.milestone).to be_nil
+ end
end
it 'logs destroy event' do
diff --git a/spec/services/milestones/merge_requests_count_service_spec.rb b/spec/services/milestones/merge_requests_count_service_spec.rb
new file mode 100644
index 00000000000..aecc7d5ef52
--- /dev/null
+++ b/spec/services/milestones/merge_requests_count_service_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Milestones::MergeRequestsCountService, :use_clean_rails_memory_store_caching do
+ let_it_be(:project) { create(:project, :empty_repo) }
+ let_it_be(:milestone) { create(:milestone, project: project) }
+
+ before_all do
+ create(:merge_request, milestone: milestone, source_project: project)
+ create(:merge_request, :closed, milestone: milestone, source_project: project)
+ end
+
+ subject { described_class.new(milestone) }
+
+ it_behaves_like 'a counter caching service'
+
+ it 'counts all merge requests' do
+ expect(subject.count).to eq(2)
+ end
+end
diff --git a/spec/services/milestones/transfer_service_spec.rb b/spec/services/milestones/transfer_service_spec.rb
index 6f4f55b2bd0..afbc9c7dca2 100644
--- a/spec/services/milestones/transfer_service_spec.rb
+++ b/spec/services/milestones/transfer_service_spec.rb
@@ -50,7 +50,7 @@ RSpec.describe Milestones::TransferService do
end
end
- it 'deletes milestone issue counters cache for both milestones' do
+ it 'deletes milestone counters cache for both milestones' do
new_milestone = create(:milestone, project: project, title: group_milestone.title)
expect_next_instance_of(Milestones::IssuesCountService, group_milestone) do |service|
@@ -59,12 +59,18 @@ RSpec.describe Milestones::TransferService do
expect_next_instance_of(Milestones::ClosedIssuesCountService, group_milestone) do |service|
expect(service).to receive(:delete_cache).and_call_original
end
+ expect_next_instance_of(Milestones::MergeRequestsCountService, group_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
expect_next_instance_of(Milestones::IssuesCountService, new_milestone) do |service|
expect(service).to receive(:delete_cache).and_call_original
end
expect_next_instance_of(Milestones::ClosedIssuesCountService, new_milestone) do |service|
expect(service).to receive(:delete_cache).and_call_original
end
+ expect_next_instance_of(Milestones::MergeRequestsCountService, new_milestone) do |service|
+ expect(service).to receive(:delete_cache).and_call_original
+ end
service.execute
end
diff --git a/spec/services/namespace_settings/update_service_spec.rb b/spec/services/namespace_settings/update_service_spec.rb
index b588bf2034d..887d56df099 100644
--- a/spec/services/namespace_settings/update_service_spec.rb
+++ b/spec/services/namespace_settings/update_service_spec.rb
@@ -44,5 +44,36 @@ RSpec.describe NamespaceSettings::UpdateService do
.from(nil).to(example_branch_name)
end
end
+
+ context "updating :resource_access_token_creation_allowed" do
+ let(:settings) { { resource_access_token_creation_allowed: false } }
+
+ context 'when user is a group owner' do
+ before do
+ group.add_owner(user)
+ end
+
+ it "changes settings" do
+ expect { service.execute }
+ .to change { group.namespace_settings.resource_access_token_creation_allowed }
+ .from(true).to(false)
+ end
+ end
+
+ context 'when user is not a group owner' do
+ before do
+ group.add_developer(user)
+ end
+
+ it "does not change settings" do
+ expect { service.execute }.not_to change { group.namespace_settings.resource_access_token_creation_allowed }
+ end
+
+ it 'returns the group owner error' do
+ service.execute
+ expect(group.namespace_settings.errors.messages[:resource_access_token_creation_allowed]).to include('can only be changed by a group admin.')
+ end
+ end
+ end
end
end
diff --git a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
index 28b2e699e5e..3094f574184 100644
--- a/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
+++ b/spec/services/namespaces/in_product_marketing_emails_service_spec.rb
@@ -3,14 +3,12 @@
require 'spec_helper'
RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
- subject(:execute_service) do
- travel_to(frozen_time) { described_class.new(track, interval).execute }
- end
+ subject(:execute_service) { described_class.new(track, interval).execute }
let(:track) { :create }
let(:interval) { 1 }
- let(:frozen_time) { Time.current }
+ let(:frozen_time) { Time.zone.parse('23 Mar 2021 10:14:40 UTC') }
let(:previous_action_completed_at) { frozen_time - 2.days }
let(:current_action_completed_at) { nil }
let(:experiment_enabled) { true }
@@ -21,6 +19,7 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
let_it_be(:user) { create(:user, email_opted_in: true) }
before do
+ travel_to(frozen_time)
create(:onboarding_progress, namespace: group, **actions_completed)
group.add_developer(user)
stub_experiment_for_subject(in_product_marketing_emails: experiment_enabled)
@@ -86,26 +85,46 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
end
describe 'experimentation' do
- context 'when the experiment is enabled' do
- it 'adds the group as an experiment subject in the experimental group' do
- expect(Experiment).to receive(:add_group)
- .with(:in_product_marketing_emails, variant: :experimental, group: group)
+ context 'when on dotcom' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(true)
+ end
+
+ context 'when the experiment is enabled' do
+ it 'adds the group as an experiment subject in the experimental group' do
+ expect(Experiment).to receive(:add_group)
+ .with(:in_product_marketing_emails, variant: :experimental, group: group)
- execute_service
+ execute_service
+ end
end
- end
- context 'when the experiment is disabled' do
- let(:experiment_enabled) { false }
+ context 'when the experiment is disabled' do
+ let(:experiment_enabled) { false }
+
+ it 'adds the group as an experiment subject in the control group' do
+ expect(Experiment).to receive(:add_group)
+ .with(:in_product_marketing_emails, variant: :control, group: group)
- it 'adds the group as an experiment subject in the control group' do
- expect(Experiment).to receive(:add_group)
- .with(:in_product_marketing_emails, variant: :control, group: group)
+ execute_service
+ end
- execute_service
+ it { is_expected.not_to send_in_product_marketing_email }
end
- it { is_expected.not_to send_in_product_marketing_email }
+ context 'when not on dotcom' do
+ before do
+ allow(::Gitlab).to receive(:com?).and_return(false)
+ end
+
+ it 'does not add the group as an experiment subject' do
+ expect(Experiment).not_to receive(:add_group)
+
+ execute_service
+ end
+
+ it { is_expected.to send_in_product_marketing_email(user.id, group.id, :create, 0) }
+ end
end
end
@@ -139,25 +158,87 @@ RSpec.describe Namespaces::InProductMarketingEmailsService, '#execute' do
it { is_expected.not_to send_in_product_marketing_email }
end
- context 'when the user has already received a marketing email as part of another group' do
- before do
- other_group = create(:group)
- other_group.add_developer(user)
- create(:onboarding_progress, namespace: other_group, created_at: previous_action_completed_at, git_write_at: current_action_completed_at)
+ describe 'do not send emails twice' do
+ subject { described_class.send_for_all_tracks_and_intervals }
+
+ let(:user) { create(:user, email_opted_in: true) }
+
+ context 'when user already got a specific email' do
+ before do
+ create(:in_product_marketing_email, user: user, track: track, series: 0)
+ end
+
+ it { is_expected.not_to send_in_product_marketing_email(user.id, anything, track, 0) }
end
- # For any group Notify is called exactly once
- it { is_expected.to send_in_product_marketing_email(user.id, anything, :create, 0) }
+ context 'when user already got sent the whole track' do
+ before do
+ 0.upto(2) do |series|
+ create(:in_product_marketing_email, user: user, track: track, series: series)
+ end
+ end
+
+ it 'does not send any of the emails anymore', :aggregate_failures do
+ 0.upto(2) do |series|
+ expect(subject).not_to send_in_product_marketing_email(user.id, anything, track, series)
+ end
+ end
+ end
+
+ context 'when user is in two groups' do
+ let(:other_group) { create(:group) }
+
+ before do
+ other_group.add_developer(user)
+ end
+
+ context 'when both groups would get the same email' do
+ before do
+ create(:onboarding_progress, namespace: other_group, **actions_completed)
+ end
+
+ it 'does not send the same email twice' do
+ subject
+
+ expect(Notify).to have_received(:in_product_marketing_email).with(user.id, anything, :create, 0).once
+ end
+ end
+
+ context 'when other group gets a different email' do
+ before do
+ create(:onboarding_progress, namespace: other_group, created_at: previous_action_completed_at, git_write_at: frozen_time - 2.days)
+ end
+
+ it 'sends both emails' do
+ subject
+
+ expect(Notify).to have_received(:in_product_marketing_email).with(user.id, group.id, :create, 0)
+ expect(Notify).to have_received(:in_product_marketing_email).with(user.id, other_group.id, :verify, 0)
+ end
+ end
+ end
+ end
+
+ it 'records sent emails' do
+ expect { subject }.to change { Users::InProductMarketingEmail.count }.by(1)
+
+ expect(
+ Users::InProductMarketingEmail.where(
+ user: user,
+ track: Users::InProductMarketingEmail.tracks[:create],
+ series: 0
+ )
+ ).to exist
end
context 'when invoked with a non existing track' do
let(:track) { :foo }
before do
- stub_const("#{described_class}::TRACKS", { foo: :git_write })
+ stub_const("#{described_class}::TRACKS", { bar: :git_write })
end
- it { expect { subject }.to raise_error(NotImplementedError, 'No ability defined for track foo') }
+ it { expect { subject }.to raise_error(ArgumentError, 'Track foo not defined') }
end
context 'when group is a sub-group' do
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index f59749f0b63..d28cb118529 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -345,6 +345,24 @@ RSpec.describe Notes::CreateService do
expect(note.errors[:commands_only]).to be_present
end
+
+ it 'adds commands failed message to note errors' do
+ note_text = %(/reopen)
+ note = described_class.new(project, user, opts.merge(note: note_text)).execute
+
+ expect(note.errors[:commands_only]).to contain_exactly('Could not apply reopen command.')
+ end
+
+ it 'generates success and failed error messages' do
+ note_text = %(/close\n/reopen)
+ service = double(:service)
+ allow(Issues::UpdateService).to receive(:new).and_return(service)
+ expect(service).to receive(:execute)
+
+ note = described_class.new(project, user, opts.merge(note: note_text)).execute
+
+ expect(note.errors[:commands_only]).to contain_exactly('Closed this issue. Could not apply reopen command.')
+ end
end
end
diff --git a/spec/services/notification_recipients/builder/default_spec.rb b/spec/services/notification_recipients/builder/default_spec.rb
index d25410235c2..994138ea828 100644
--- a/spec/services/notification_recipients/builder/default_spec.rb
+++ b/spec/services/notification_recipients/builder/default_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe NotificationRecipients::Builder::Default do
describe '#build!' do
let_it_be(:group) { create(:group, :public) }
let_it_be(:project) { create(:project, :public, group: group).tap { |p| p.add_developer(project_watcher) } }
- let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:target) { create(:issue, project: project) }
let_it_be(:current_user) { create(:user) }
let_it_be(:other_user) { create(:user) }
@@ -17,11 +17,11 @@ RSpec.describe NotificationRecipients::Builder::Default do
let_it_be(:notification_setting_project_w) { create(:notification_setting, source: project, user: project_watcher, level: 2) }
let_it_be(:notification_setting_group_w) { create(:notification_setting, source: group, user: group_watcher, level: 2) }
- subject { described_class.new(issue, current_user, action: :new).tap { |s| s.build! } }
+ subject { described_class.new(target, current_user, action: :new).tap { |s| s.build! } }
context 'participants and project watchers' do
before do
- expect(issue).to receive(:participants).and_return([participant, current_user])
+ expect(target).to receive(:participants).and_return([participant, current_user])
end
it 'adds all participants and watchers' do
@@ -34,11 +34,147 @@ RSpec.describe NotificationRecipients::Builder::Default do
it 'adds all subscribers' do
subscriber = create(:user)
non_subscriber = create(:user)
- create(:subscription, project: project, user: subscriber, subscribable: issue, subscribed: true)
- create(:subscription, project: project, user: non_subscriber, subscribable: issue, subscribed: false)
+ create(:subscription, project: project, user: subscriber, subscribable: target, subscribed: true)
+ create(:subscription, project: project, user: non_subscriber, subscribable: target, subscribed: false)
expect(subject.recipients.map(&:user)).to include(subscriber)
end
end
+
+ context 'custom notifications' do
+ shared_examples 'custom notification recipients' do
+ let_it_be(:custom_notification_user) { create(:user) }
+ let_it_be(:another_group) { create(:group) }
+ let_it_be(:another_project) { create(:project, namespace: another_group) }
+
+ context 'with project custom notification setting' do
+ before do
+ create(:notification_setting, source: project, user: custom_notification_user, level: :custom)
+ end
+
+ it 'adds the user to the recipients' do
+ expect(subject.recipients.map(&:user)).to include(custom_notification_user)
+ end
+ end
+
+ context 'with the project custom notification setting in another project' do
+ before do
+ create(:notification_setting, source: another_project, user: custom_notification_user, level: :custom)
+ end
+
+ it 'does not add the user to the recipients' do
+ expect(subject.recipients.map(&:user)).not_to include(custom_notification_user)
+ end
+ end
+
+ context 'with group custom notification setting' do
+ before do
+ create(:notification_setting, source: group, user: custom_notification_user, level: :custom)
+ end
+
+ it 'adds the user to the recipients' do
+ expect(subject.recipients.map(&:user)).to include(custom_notification_user)
+ end
+ end
+
+ context 'with the group custom notification setting in another group' do
+ before do
+ create(:notification_setting, source: another_group, user: custom_notification_user, level: :custom)
+ end
+
+ it 'does not add the user to the recipients' do
+ expect(subject.recipients.map(&:user)).not_to include(custom_notification_user)
+ end
+ end
+
+ context 'with project global custom notification setting' do
+ before do
+ create(:notification_setting, source: project, user: custom_notification_user, level: :global)
+ end
+
+ context 'with global custom notification setting' do
+ before do
+ create(:notification_setting, source: nil, user: custom_notification_user, level: :custom)
+ end
+
+ it 'adds the user to the recipients' do
+ expect(subject.recipients.map(&:user)).to include(custom_notification_user)
+ end
+ end
+
+ context 'without global custom notification setting' do
+ it 'does not add the user to the recipients' do
+ expect(subject.recipients.map(&:user)).not_to include(custom_notification_user)
+ end
+ end
+ end
+
+ context 'with group global custom notification setting' do
+ before do
+ create(:notification_setting, source: group, user: custom_notification_user, level: :global)
+ end
+
+ context 'with global custom notification setting' do
+ before do
+ create(:notification_setting, source: nil, user: custom_notification_user, level: :custom)
+ end
+
+ it 'adds the user to the recipients' do
+ expect(subject.recipients.map(&:user)).to include(custom_notification_user)
+ end
+ end
+
+ context 'without global custom notification setting' do
+ it 'does not add the user to the recipients' do
+ expect(subject.recipients.map(&:user)).not_to include(custom_notification_user)
+ end
+ end
+ end
+
+ context 'with group custom notification setting in deeply nested parent group' do
+ let(:grand_parent_group) { create(:group, :public) }
+ let(:parent_group) { create(:group, :public, parent: grand_parent_group) }
+ let(:group) { create(:group, :public, parent: parent_group) }
+ let(:project) { create(:project, :public, group: group).tap { |p| p.add_developer(project_watcher) } }
+ let(:target) { create(:issue, project: project) }
+
+ before do
+ create(:notification_setting, source: grand_parent_group, user: custom_notification_user, level: :custom)
+ end
+
+ it 'adds the user to the recipients' do
+ expect(subject.recipients.map(&:user)).to include(custom_notification_user)
+ end
+ end
+
+ context 'without a project or group' do
+ let(:target) { create(:snippet) }
+
+ before do
+ create(:notification_setting, source: nil, user: custom_notification_user, level: :custom)
+ end
+
+ it 'does not add the user to the recipients' do
+ expect(subject.recipients.map(&:user)).not_to include(custom_notification_user)
+ end
+ end
+ end
+
+ before do
+ stub_feature_flags(notification_setting_recipient_refactor: enabled)
+ end
+
+ context 'with notification_setting_recipient_refactor enabled' do
+ let(:enabled) { true }
+
+ it_behaves_like 'custom notification recipients'
+ end
+
+ context 'with notification_setting_recipient_refactor disabled' do
+ let(:enabled) { false }
+
+ it_behaves_like 'custom notification recipients'
+ end
+ end
end
end
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index f3cd2776ce7..6eff768eac2 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -105,7 +105,7 @@ RSpec.describe NotificationService, :mailer do
recipient_1 = NotificationRecipient.new(user_1, :custom, custom_action: :new_release)
allow(NotificationRecipients::BuildService).to receive(:build_new_release_recipients).and_return([recipient_1])
- expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: current_user.id, klass: object.class, object_id: object.id)
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: current_user.id, klass: object.class.to_s, object_id: object.id)
action
@@ -290,6 +290,49 @@ RSpec.describe NotificationService, :mailer do
end
end
+ describe 'SSH Keys' do
+ let_it_be_with_reload(:user) { create(:user) }
+ let_it_be(:fingerprints) { ["aa:bb:cc:dd:ee:zz"] }
+
+ shared_context 'block user' do
+ before do
+ user.block!
+ end
+ end
+
+ describe '#ssh_key_expired' do
+ subject { notification.ssh_key_expired(user, fingerprints) }
+
+ it 'sends email to the token owner' do
+ expect { subject }.to have_enqueued_email(user, fingerprints, mail: "ssh_key_expired_email")
+ end
+
+ context 'when user is not allowed to receive notifications' do
+ include_context 'block user'
+
+ it 'does not send email to the token owner' do
+ expect { subject }.not_to have_enqueued_email(user, fingerprints, mail: "ssh_key_expired_email")
+ end
+ end
+ end
+
+ describe '#ssh_key_expiring_soon' do
+ subject { notification.ssh_key_expiring_soon(user, fingerprints) }
+
+ it 'sends email to the token owner' do
+ expect { subject }.to have_enqueued_email(user, fingerprints, mail: "ssh_key_expiring_soon_email")
+ end
+
+ context 'when user is not allowed to receive notifications' do
+ include_context 'block user'
+
+ it 'does not send email to the token owner' do
+ expect { subject }.not_to have_enqueued_email(user, fingerprints, mail: "ssh_key_expiring_soon_email")
+ end
+ end
+ end
+ end
+
describe '#unknown_sign_in' do
let_it_be(:user) { create(:user) }
let_it_be(:ip) { '127.0.0.1' }
@@ -1662,7 +1705,7 @@ RSpec.describe NotificationService, :mailer do
notification.issue_due(issue)
email = find_email_for(@subscriber)
- expect(email.header[:from].display_names).to eq([issue.author.name])
+ expect(email.header[:from].display_names).to eq(["#{issue.author.name} (@#{issue.author.username})"])
end
it_behaves_like 'participating notifications' do
diff --git a/spec/services/packages/create_dependency_service_spec.rb b/spec/services/packages/create_dependency_service_spec.rb
index 3ad59b31b2c..3eae9f099f7 100644
--- a/spec/services/packages/create_dependency_service_spec.rb
+++ b/spec/services/packages/create_dependency_service_spec.rb
@@ -5,7 +5,7 @@ RSpec.describe Packages::CreateDependencyService do
describe '#execute' do
let_it_be(:namespace) {create(:namespace)}
let_it_be(:version) { '1.0.1' }
- let_it_be(:package_name) { "@#{namespace.path}/my-app".freeze }
+ let_it_be(:package_name) { "@#{namespace.path}/my-app" }
context 'when packages are published' do
let(:json_file) { 'packages/npm/payload.json' }
diff --git a/spec/services/packages/debian/extract_changes_metadata_service_spec.rb b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
new file mode 100644
index 00000000000..2a92b8ed26e
--- /dev/null
+++ b/spec/services/packages/debian/extract_changes_metadata_service_spec.rb
@@ -0,0 +1,160 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ExtractChangesMetadataService do
+ describe '#execute' do
+ let_it_be(:distribution) { create(:debian_project_distribution, codename: 'unstable') }
+ let_it_be(:incoming) { create(:debian_incoming, project: distribution.project) }
+
+ let(:package_file) { incoming.package_files.last }
+ let(:service) { described_class.new(package_file) }
+
+ subject { service.execute }
+
+ context 'with valid package file' do
+ it 'extract metadata', :aggregate_failures do
+ expected_fields = { 'Architecture' => 'source amd64', 'Binary' => 'libsample0 sample-dev sample-udeb' }
+
+ expect(subject[:file_type]).to eq(:changes)
+ expect(subject[:architecture]).to be_nil
+ expect(subject[:fields]).to include(expected_fields)
+ expect(subject[:files].count).to eq(6)
+ end
+ end
+
+ context 'with invalid package file' do
+ let(:package_file) { incoming.package_files.first }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "is not a changes file")
+ end
+ end
+
+ context 'with invalid metadata' do
+ let(:md5_dsc) { '3b0817804f669e16cdefac583ad88f0e 671 libs optional sample_1.2.3~alpha2.dsc' }
+ let(:md5_source) { 'd79b34f58f61ff4ad696d9bd0b8daa68 864 libs optional sample_1.2.3~alpha2.tar.xz' }
+ let(:md5s) { "#{md5_dsc}\n#{md5_source}" }
+ let(:sha1_dsc) { '32ecbd674f0bfd310df68484d87752490685a8d6 671 sample_1.2.3~alpha2.dsc' }
+ let(:sha1_source) { '5f8bba5574eb01ac3b1f5e2988e8c29307788236 864 sample_1.2.3~alpha2.tar.xz' }
+ let(:sha1s) { "#{sha1_dsc}\n#{sha1_source}" }
+ let(:sha256_dsc) { '844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba 671 sample_1.2.3~alpha2.dsc' }
+ let(:sha256_source) { 'b5a599e88e7cbdda3bde808160a21ba1dd1ec76b2ec8d4912aae769648d68362 864 sample_1.2.3~alpha2.tar.xz' }
+ let(:sha256s) { "#{sha256_dsc}\n#{sha256_source}" }
+ let(:fields) { { 'Files' => md5s, 'Checksums-Sha1' => sha1s, 'Checksums-Sha256' => sha256s } }
+ let(:metadata) { { file_type: :changes, architecture: 'amd64', fields: fields } }
+
+ before do
+ allow_next_instance_of(::Packages::Debian::ExtractMetadataService) do |extract_metadata_service|
+ allow(extract_metadata_service).to receive(:execute).and_return(metadata)
+ end
+ end
+
+ context 'without Files field' do
+ let(:md5s) { nil }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Files field is missing")
+ end
+ end
+
+ context 'without Checksums-Sha1 field' do
+ let(:sha1s) { nil }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Checksums-Sha1 field is missing")
+ end
+ end
+
+ context 'without Checksums-Sha256 field' do
+ let(:sha256s) { nil }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Checksums-Sha256 field is missing")
+ end
+ end
+
+ context 'with file in Checksums-Sha1 but not in Files' do
+ let(:md5_dsc) { '' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "sample_1.2.3~alpha2.dsc is listed in Checksums-Sha1 but not in Files")
+ end
+ end
+
+ context 'with different size in Checksums-Sha1' do
+ let(:sha1_dsc) { '32ecbd674f0bfd310df68484d87752490685a8d6 42 sample_1.2.3~alpha2.dsc' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Size for sample_1.2.3~alpha2.dsc in Files and Checksums-Sha1 differ")
+ end
+ end
+
+ context 'with file in Checksums-Sha256 but not in Files' do
+ let(:md5_dsc) { '' }
+ let(:sha1_dsc) { '' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "sample_1.2.3~alpha2.dsc is listed in Checksums-Sha256 but not in Files")
+ end
+ end
+
+ context 'with different size in Checksums-Sha256' do
+ let(:sha256_dsc) { '844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba 42 sample_1.2.3~alpha2.dsc' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Size for sample_1.2.3~alpha2.dsc in Files and Checksums-Sha256 differ")
+ end
+ end
+
+ context 'with file in Files but not in Checksums-Sha1' do
+ let(:sha1_dsc) { '' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha1sum can't be blank")
+ end
+ end
+
+ context 'with file in Files but not in Checksums-Sha256' do
+ let(:sha256_dsc) { '' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha256sum can't be blank")
+ end
+ end
+
+ context 'with invalid MD5' do
+ let(:md5_dsc) { '1234567890123456789012345678012 671 libs optional sample_1.2.3~alpha2.dsc' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Md5sum mismatch for sample_1.2.3~alpha2.dsc: 3b0817804f669e16cdefac583ad88f0e != 1234567890123456789012345678012")
+ end
+ end
+
+ context 'with invalid SHA1' do
+ let(:sha1_dsc) { '1234567890123456789012345678901234567890 671 sample_1.2.3~alpha2.dsc' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha1sum mismatch for sample_1.2.3~alpha2.dsc: 32ecbd674f0bfd310df68484d87752490685a8d6 != 1234567890123456789012345678901234567890")
+ end
+ end
+
+ context 'with invalid SHA256' do
+ let(:sha256_dsc) { '1234567890123456789012345678901234567890123456789012345678901234 671 sample_1.2.3~alpha2.dsc' }
+
+ it 'raise ArgumentError', :aggregate_failures do
+ expect { subject }.to raise_error(described_class::ExtractionError, "Validation failed: Sha256sum mismatch for sample_1.2.3~alpha2.dsc: 844f79825b7e8aaa191e514b58a81f9ac1e58e2180134b0c9512fa66d896d7ba != 1234567890123456789012345678901234567890123456789012345678901234")
+ end
+ end
+ end
+
+ context 'with missing package file' do
+ before do
+ incoming.package_files.first.destroy!
+ end
+
+ it 'raise ArgumentError' do
+ expect { subject }.to raise_error(described_class::ExtractionError, "sample_1.2.3~alpha2.tar.xz is listed in Files but was not uploaded")
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/debian/extract_deb_metadata_service_spec.rb b/spec/services/packages/debian/extract_deb_metadata_service_spec.rb
index 33059adf8a2..ee3f3d179dc 100644
--- a/spec/services/packages/debian/extract_deb_metadata_service_spec.rb
+++ b/spec/services/packages/debian/extract_deb_metadata_service_spec.rb
@@ -10,17 +10,17 @@ RSpec.describe Packages::Debian::ExtractDebMetadataService do
context 'with correct file' do
it 'return as expected' do
expected = {
- 'Package': 'libsample0',
- 'Source': 'sample',
- 'Version': '1.2.3~alpha2',
- 'Architecture': 'amd64',
- 'Maintainer': 'John Doe <john.doe@example.com>',
- 'Installed-Size': '7',
- 'Section': 'libs',
- 'Priority': 'optional',
- 'Multi-Arch': 'same',
- 'Homepage': 'https://gitlab.com/',
- 'Description': "Some mostly empty lib\nUsed in GitLab tests.\n\nTesting another paragraph."
+ 'Package' => 'libsample0',
+ 'Source' => 'sample',
+ 'Version' => '1.2.3~alpha2',
+ 'Architecture' => 'amd64',
+ 'Maintainer' => 'John Doe <john.doe@example.com>',
+ 'Installed-Size' => '7',
+ 'Section' => 'libs',
+ 'Priority' => 'optional',
+ 'Multi-Arch' => 'same',
+ 'Homepage' => 'https://gitlab.com/',
+ 'Description' => "Some mostly empty lib\nUsed in GitLab tests.\n\nTesting another paragraph."
}
expect(subject.execute).to eq expected
diff --git a/spec/services/packages/debian/extract_metadata_service_spec.rb b/spec/services/packages/debian/extract_metadata_service_spec.rb
index 0aa9a67b263..e3911dbbfe0 100644
--- a/spec/services/packages/debian/extract_metadata_service_spec.rb
+++ b/spec/services/packages/debian/extract_metadata_service_spec.rb
@@ -33,11 +33,11 @@ RSpec.describe Packages::Debian::ExtractMetadataService do
where(:case_name, :trait, :expected_file_type, :expected_architecture, :expected_fields) do
'with invalid' | :invalid | :unknown | nil | nil
'with source' | :source | :source | nil | nil
- 'with dsc' | :dsc | :dsc | nil | { 'Binary': 'sample-dev, libsample0, sample-udeb' }
- 'with deb' | :deb | :deb | 'amd64' | { 'Multi-Arch': 'same' }
- 'with udeb' | :udeb | :udeb | 'amd64' | { 'Package': 'sample-udeb' }
- 'with buildinfo' | :buildinfo | :buildinfo | nil | { 'Architecture': 'amd64 source', 'Build-Architecture': 'amd64' }
- 'with changes' | :changes | :changes | nil | { 'Architecture': 'source amd64', 'Binary': 'libsample0 sample-dev sample-udeb' }
+ 'with dsc' | :dsc | :dsc | nil | { 'Binary' => 'sample-dev, libsample0, sample-udeb' }
+ 'with deb' | :deb | :deb | 'amd64' | { 'Multi-Arch' => 'same' }
+ 'with udeb' | :udeb | :udeb | 'amd64' | { 'Package' => 'sample-udeb' }
+ 'with buildinfo' | :buildinfo | :buildinfo | nil | { 'Architecture' => 'amd64 source', 'Build-Architecture' => 'amd64' }
+ 'with changes' | :changes | :changes | nil | { 'Architecture' => 'source amd64', 'Binary' => 'libsample0 sample-dev sample-udeb' }
end
with_them do
diff --git a/spec/services/packages/debian/parse_debian822_service_spec.rb b/spec/services/packages/debian/parse_debian822_service_spec.rb
index b67daca89c4..f43e38991ce 100644
--- a/spec/services/packages/debian/parse_debian822_service_spec.rb
+++ b/spec/services/packages/debian/parse_debian822_service_spec.rb
@@ -27,17 +27,17 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
it 'return as expected, preserving order' do
expected = {
'Package: libsample0' => {
- 'Package': 'libsample0',
- 'Source': 'sample',
- 'Version': '1.2.3~alpha2',
- 'Architecture': 'amd64',
- 'Maintainer': 'John Doe <john.doe@example.com>',
- 'Installed-Size': '9',
- 'Section': 'libs',
- 'Priority': 'optional',
- 'Multi-Arch': 'same',
- 'Homepage': 'https://gitlab.com/',
- 'Description': "Some mostly empty lib\nUsed in GitLab tests.\n\nTesting another paragraph."
+ 'Package' => 'libsample0',
+ 'Source' => 'sample',
+ 'Version' => '1.2.3~alpha2',
+ 'Architecture' => 'amd64',
+ 'Maintainer' => 'John Doe <john.doe@example.com>',
+ 'Installed-Size' => '9',
+ 'Section' => 'libs',
+ 'Priority' => 'optional',
+ 'Multi-Arch' => 'same',
+ 'Homepage' => 'https://gitlab.com/',
+ 'Description' => "Some mostly empty lib\nUsed in GitLab tests.\n\nTesting another paragraph."
}
}
@@ -51,38 +51,38 @@ RSpec.describe Packages::Debian::ParseDebian822Service do
it 'return as expected, preserving order' do
expected = {
'Source: sample' => {
- 'Source': 'sample',
- 'Priority': 'optional',
- 'Maintainer': 'John Doe <john.doe@example.com>',
- 'Build-Depends': 'debhelper-compat (= 13)',
- 'Standards-Version': '4.5.0',
- 'Section': 'libs',
- 'Homepage': 'https://gitlab.com/',
- # 'Vcs-Browser': 'https://salsa.debian.org/debian/sample-1.2.3',
- # '#Vcs-Git': 'https://salsa.debian.org/debian/sample-1.2.3.git',
- 'Rules-Requires-Root': 'no'
+ 'Source' => 'sample',
+ 'Priority' => 'optional',
+ 'Maintainer' => 'John Doe <john.doe@example.com>',
+ 'Build-Depends' => 'debhelper-compat (= 13)',
+ 'Standards-Version' => '4.5.0',
+ 'Section' => 'libs',
+ 'Homepage' => 'https://gitlab.com/',
+ # 'Vcs-Browser' => 'https://salsa.debian.org/debian/sample-1.2.3',
+ # '#Vcs-Git' => 'https://salsa.debian.org/debian/sample-1.2.3.git',
+ 'Rules-Requires-Root' => 'no'
},
'Package: sample-dev' => {
- 'Package': 'sample-dev',
- 'Section': 'libdevel',
- 'Architecture': 'any',
- 'Multi-Arch': 'same',
- 'Depends': 'libsample0 (= ${binary:Version}), ${misc:Depends}',
- 'Description': "Some mostly empty developpement files\nUsed in GitLab tests.\n\nTesting another paragraph."
+ 'Package' => 'sample-dev',
+ 'Section' => 'libdevel',
+ 'Architecture' => 'any',
+ 'Multi-Arch' => 'same',
+ 'Depends' => 'libsample0 (= ${binary:Version}), ${misc:Depends}',
+ 'Description' => "Some mostly empty developpement files\nUsed in GitLab tests.\n\nTesting another paragraph."
},
'Package: libsample0' => {
- 'Package': 'libsample0',
- 'Architecture': 'any',
- 'Multi-Arch': 'same',
- 'Depends': '${shlibs:Depends}, ${misc:Depends}',
- 'Description': "Some mostly empty lib\nUsed in GitLab tests.\n\nTesting another paragraph."
+ 'Package' => 'libsample0',
+ 'Architecture' => 'any',
+ 'Multi-Arch' => 'same',
+ 'Depends' => '${shlibs:Depends}, ${misc:Depends}',
+ 'Description' => "Some mostly empty lib\nUsed in GitLab tests.\n\nTesting another paragraph."
},
'Package: sample-udeb' => {
- 'Package': 'sample-udeb',
- 'Package-Type': 'udeb',
- 'Architecture': 'any',
- 'Depends': 'installed-base',
- 'Description': 'Some mostly empty udeb'
+ 'Package' => 'sample-udeb',
+ 'Package-Type' => 'udeb',
+ 'Architecture' => 'any',
+ 'Depends' => 'installed-base',
+ 'Description' => 'Some mostly empty udeb'
}
}
diff --git a/spec/services/packages/debian/process_changes_service_spec.rb b/spec/services/packages/debian/process_changes_service_spec.rb
new file mode 100644
index 00000000000..98b531bde10
--- /dev/null
+++ b/spec/services/packages/debian/process_changes_service_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Debian::ProcessChangesService do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+ let_it_be_with_reload(:distribution) { create(:debian_project_distribution, :with_file, codename: 'unstable') }
+ let_it_be(:incoming) { create(:debian_incoming, project: distribution.project) }
+
+ let(:package_file) { incoming.package_files.last }
+
+ subject { described_class.new(package_file, user) }
+
+ context 'with valid package file' do
+ it 'updates package and package file', :aggregate_failures do
+ expect { subject.execute }
+ .to change { Packages::Package.count }.from(1).to(2)
+ .and not_change { Packages::PackageFile.count }
+ .and change { incoming.package_files.count }.from(7).to(0)
+
+ created_package = Packages::Package.last
+ expect(created_package.name).to eq 'sample'
+ expect(created_package.version).to eq '1.2.3~alpha2'
+ expect(created_package.creator).to eq user
+ end
+ end
+
+ context 'with invalid package file' do
+ let(:package_file) { incoming.package_files.first }
+
+ it 'raise ExtractionError', :aggregate_failures do
+ expect { subject.execute }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and not_change { incoming.package_files.count }
+ .and not_change { distribution.reload.needs_update? }
+ .and raise_error(Packages::Debian::ExtractChangesMetadataService::ExtractionError, 'is not a changes file')
+ end
+ end
+
+ context 'when creating package fails' do
+ before do
+ allow_next_instance_of(::Packages::Debian::FindOrCreatePackageService) do |find_or_create_package_service|
+ expect(find_or_create_package_service).to receive(:execute).and_raise(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+ end
+ end
+
+ it 'remove the package file', :aggregate_failures do
+ expect { subject.execute }
+ .to not_change { Packages::Package.count }
+ .and not_change { Packages::PackageFile.count }
+ .and not_change { incoming.package_files.count }
+ .and not_change { distribution.reload.needs_update? }
+ .and raise_error(ActiveRecord::ConnectionTimeoutError, 'connect timeout')
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/go/create_package_service_spec.rb b/spec/services/packages/go/create_package_service_spec.rb
new file mode 100644
index 00000000000..5c5fec0aa3a
--- /dev/null
+++ b/spec/services/packages/go/create_package_service_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Go::CreatePackageService do
+ let_it_be(:project) { create :project_empty_repo, path: 'my-go-lib' }
+ let_it_be(:mod) { create :go_module, project: project }
+
+ before :all do
+ create :go_module_commit, :module, project: project, tag: 'v1.0.0'
+ end
+
+ shared_examples 'creates a package' do |files:|
+ it "returns a valid package with #{files ? files.to_s : 'no'} file(s)" do
+ expect(subject).to be_valid
+ expect(subject.name).to eq(version.mod.name)
+ expect(subject.version).to eq(version.name)
+ expect(subject.package_type).to eq('golang')
+ expect(subject.created_at).to eq(version.commit.committed_date)
+ expect(subject.package_files.count).to eq(files)
+ end
+ end
+
+ shared_examples 'creates a package file' do |type|
+ it "returns a package with a #{type} file" do
+ file_name = "#{version.name}.#{type}"
+ expect(subject.package_files.map { |f| f.file_name }).to include(file_name)
+
+ file = subject.package_files.with_file_name(file_name).first
+ expect(file).not_to be_nil
+ expect(file.file).not_to be_nil
+ expect(file.size).to eq(file.file.size)
+ expect(file.file_name).to eq(file_name)
+ expect(file.file_md5).not_to be_nil
+ expect(file.file_sha1).not_to be_nil
+ expect(file.file_sha256).not_to be_nil
+ end
+ end
+
+ describe '#execute' do
+ subject { described_class.new(project, nil, version: version).execute }
+
+ let(:version) { create :go_module_version, :tagged, mod: mod, name: 'v1.0.0' }
+
+ context 'with no existing package' do
+ it_behaves_like 'creates a package', files: 2
+ it_behaves_like 'creates a package file', :mod
+ it_behaves_like 'creates a package file', :zip
+
+ it 'creates a new package' do
+ expect { subject }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(2)
+ end
+ end
+
+ context 'with an existing package' do
+ before do
+ described_class.new(project, version: version).execute
+ end
+
+ it_behaves_like 'creates a package', files: 2
+ it_behaves_like 'creates a package file', :mod
+ it_behaves_like 'creates a package file', :zip
+
+ it 'does not create a package or files' do
+ expect { subject }
+ .to not_change { project.packages.count }
+ .and not_change { Packages::PackageFile.count }
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/go/sync_packages_service_spec.rb b/spec/services/packages/go/sync_packages_service_spec.rb
new file mode 100644
index 00000000000..565b0f252ce
--- /dev/null
+++ b/spec/services/packages/go/sync_packages_service_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Go::SyncPackagesService do
+ include_context 'basic Go module'
+
+ let(:params) { { info: true, mod: true, zip: true } }
+
+ describe '#execute_async' do
+ it 'schedules a package refresh' do
+ expect(::Packages::Go::SyncPackagesWorker).to receive(:perform_async).once
+
+ described_class.new(project, 'master').execute_async
+ end
+ end
+
+ describe '#initialize' do
+ context 'without a project' do
+ it 'raises an error' do
+ expect { described_class.new(nil, 'master') }
+ .to raise_error(ArgumentError, 'project is required')
+ end
+ end
+
+ context 'without a ref' do
+ it 'raises an error' do
+ expect { described_class.new(project, nil) }
+ .to raise_error(ArgumentError, 'ref is required')
+ end
+ end
+
+ context 'with an invalid ref' do
+ it 'raises an error' do
+ expect { described_class.new(project, 'not-a-ref') }
+ .to raise_error(ArgumentError)
+ end
+ end
+ end
+end
diff --git a/spec/services/packages/maven/metadata/sync_service_spec.rb b/spec/services/packages/maven/metadata/sync_service_spec.rb
index f5634159e6d..30ddb48207a 100644
--- a/spec/services/packages/maven/metadata/sync_service_spec.rb
+++ b/spec/services/packages/maven/metadata/sync_service_spec.rb
@@ -131,7 +131,7 @@ RSpec.describe ::Packages::Maven::Metadata::SyncService do
expect(::Packages::Maven::Metadata::CreateVersionsXmlService).not_to receive(:new)
end
- it_behaves_like 'returning an error service response', message: 'Non existing versionless package'
+ it_behaves_like 'returning a success service response', message: 'Non existing versionless package(s). Nothing to do.'
end
context 'without a metadata package file for versions' do
@@ -141,7 +141,7 @@ RSpec.describe ::Packages::Maven::Metadata::SyncService do
expect(::Packages::Maven::Metadata::CreateVersionsXmlService).not_to receive(:new)
end
- it_behaves_like 'returning an error service response', message: 'Non existing metadata file for versions'
+ it_behaves_like 'returning a success service response', message: 'Non existing versionless package(s). Nothing to do.'
end
context 'without a project' do
@@ -205,7 +205,7 @@ RSpec.describe ::Packages::Maven::Metadata::SyncService do
it_behaves_like 'returning a success service response', message: 'Versionless package for versions destroyed'
end
- context 'with a too big maven metadata file for versions' do
+ context 'with a too big maven metadata file for plugins' do
before do
metadata_file_for_plugins.update!(size: 100.megabytes)
end
@@ -244,6 +244,15 @@ RSpec.describe ::Packages::Maven::Metadata::SyncService do
it_behaves_like 'returning a success service response', message: 'New metadata package files created'
end
+ context 'without a versionless package for versions' do
+ before do
+ versionless_package_for_versions.package_files.update_all(file_name: 'test.txt')
+ expect(::Packages::Maven::Metadata::CreateVersionsXmlService).not_to receive(:new)
+ end
+
+ it_behaves_like 'returning a success service response', message: 'No changes for plugins xml'
+ end
+
context 'without a metadata package file for plugins' do
before do
versionless_package_for_plugins.package_files.update_all(file_name: 'test.txt')
diff --git a/spec/services/packages/rubygems/create_dependencies_service_spec.rb b/spec/services/packages/rubygems/create_dependencies_service_spec.rb
new file mode 100644
index 00000000000..b6e12b1cc61
--- /dev/null
+++ b/spec/services/packages/rubygems/create_dependencies_service_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rubygems::CreateDependenciesService do
+ include RubygemsHelpers
+
+ let_it_be(:package) { create(:rubygems_package) }
+ let_it_be(:package_file) { create(:package_file, :gem) }
+ let_it_be(:gem) { gem_from_file(package_file.file) }
+ let_it_be(:gemspec) { gem.spec }
+
+ let(:service) { described_class.new(package, gemspec) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ it 'creates dependencies', :aggregate_failures do
+ expect { subject }.to change { Packages::Dependency.count }.by(4)
+
+ gemspec.dependencies.each do |dependency|
+ persisted_dependency = Packages::Dependency.find_by(name: dependency.name)
+
+ expect(persisted_dependency.version_pattern).to eq dependency.requirement.to_s
+ end
+ end
+
+ it 'links dependencies to the package' do
+ expect { subject }.to change { package.dependency_links.count }.by(4)
+
+ expect(package.dependency_links.first).to be_dependencies
+ end
+ end
+end
diff --git a/spec/services/packages/rubygems/create_gemspec_service_spec.rb b/spec/services/packages/rubygems/create_gemspec_service_spec.rb
new file mode 100644
index 00000000000..198e978a47e
--- /dev/null
+++ b/spec/services/packages/rubygems/create_gemspec_service_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe Packages::Rubygems::CreateGemspecService do
+ include RubygemsHelpers
+
+ let_it_be(:package_file) { create(:package_file, :gem) }
+ let_it_be(:gem) { gem_from_file(package_file.file) }
+ let_it_be(:gemspec) { gem.spec }
+ let_it_be(:package) { package_file.package }
+
+ let(:service) { described_class.new(package, gemspec) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ it 'creates a new package file', :aggregate_failures do
+ expect { subject }.to change { package.package_files.count }.by(1)
+
+ gemspec_file = package.package_files.find_by(file_name: "#{gemspec.name}.gemspec")
+ expect(gemspec_file.file).not_to be_nil
+ expect(gemspec_file.size).not_to be_nil
+ expect(gemspec_file.file_md5).not_to be_nil
+ expect(gemspec_file.file_sha1).not_to be_nil
+ expect(gemspec_file.file_sha256).not_to be_nil
+ end
+ end
+end
diff --git a/spec/services/packages/rubygems/metadata_extraction_service_spec.rb b/spec/services/packages/rubygems/metadata_extraction_service_spec.rb
new file mode 100644
index 00000000000..b308daad8f5
--- /dev/null
+++ b/spec/services/packages/rubygems/metadata_extraction_service_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+require 'spec_helper'
+require 'rubygems/package'
+
+RSpec.describe Packages::Rubygems::MetadataExtractionService do
+ include RubygemsHelpers
+
+ let_it_be(:package) { create(:rubygems_package) }
+ let_it_be(:package_file) { create(:package_file, :gem) }
+ let_it_be(:gem) { gem_from_file(package_file.file) }
+ let_it_be(:gemspec) { gem.spec }
+
+ let(:service) { described_class.new(package, gemspec) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ it 'creates the metadata' do
+ expect { subject }.to change { Packages::Rubygems::Metadatum.count }.by(1)
+ end
+
+ it 'stores the metadata', :aggregate_failures do
+ subject
+
+ metadata = package.rubygems_metadatum
+
+ expect(metadata.authors).to eq(gemspec.authors.to_json)
+ expect(metadata.files).to eq(gemspec.files.to_json)
+ expect(metadata.summary).to eq(gemspec.summary)
+ expect(metadata.description).to eq(gemspec.description)
+ expect(metadata.email).to eq(gemspec.email)
+ expect(metadata.homepage).to eq(gemspec.homepage)
+ expect(metadata.licenses).to eq(gemspec.licenses.to_json)
+ expect(metadata.metadata).to eq(gemspec.metadata.to_json)
+ expect(metadata.author).to eq(gemspec.author)
+ expect(metadata.bindir).to eq(gemspec.bindir)
+ expect(metadata.executables).to eq(gemspec.executables.to_json)
+ expect(metadata.extensions).to eq(gemspec.extensions.to_json)
+ expect(metadata.extra_rdoc_files).to eq(gemspec.extra_rdoc_files.to_json)
+ expect(metadata.platform).to eq(gemspec.platform)
+ expect(metadata.post_install_message).to eq(gemspec.post_install_message)
+ expect(metadata.rdoc_options).to eq(gemspec.rdoc_options.to_json)
+ expect(metadata.require_paths).to eq(gemspec.require_paths.to_json)
+ expect(metadata.required_ruby_version).to eq(gemspec.required_ruby_version.to_s)
+ expect(metadata.required_rubygems_version).to eq(gemspec.required_rubygems_version.to_s)
+ expect(metadata.requirements).to eq(gemspec.requirements.to_json)
+ expect(metadata.rubygems_version).to eq(gemspec.rubygems_version)
+ end
+ end
+end
diff --git a/spec/services/packages/rubygems/process_gem_service_spec.rb b/spec/services/packages/rubygems/process_gem_service_spec.rb
new file mode 100644
index 00000000000..83e868d9579
--- /dev/null
+++ b/spec/services/packages/rubygems/process_gem_service_spec.rb
@@ -0,0 +1,134 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Rubygems::ProcessGemService do
+ include ExclusiveLeaseHelpers
+ include RubygemsHelpers
+
+ let_it_be_with_reload(:package) { create(:rubygems_package, :processing, name: 'temp_name', version: '0.0.0') }
+
+ let(:package_file) { create(:package_file, :unprocessed_gem, package: package) }
+ let(:gem) { gem_from_file(package_file.file) }
+ let(:gemspec) { gem.spec }
+ let(:service) { described_class.new(package_file) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ context 'no gem file', :aggregate_failures do
+ let(:package_file) { nil }
+
+ it 'returns an error' do
+ expect(subject.error?).to be(true)
+ expect(subject.message).to eq('Gem was not processed')
+ end
+ end
+
+ context 'success' do
+ let(:sub_service) { double }
+
+ before do
+ expect(Packages::Rubygems::MetadataExtractionService).to receive(:new).with(package, gemspec).and_return(sub_service)
+ expect(Packages::Rubygems::CreateGemspecService).to receive(:new).with(package, gemspec).and_return(sub_service)
+ expect(Packages::Rubygems::CreateDependenciesService).to receive(:new).with(package, gemspec).and_return(sub_service)
+
+ expect(sub_service).to receive(:execute).exactly(3).times.and_return(true)
+ end
+
+ it 'returns successfully', :aggregate_failures do
+ result = subject
+
+ expect(result.success?).to be true
+ expect(result.payload[:package]).to eq(package)
+ end
+
+ it 'updates the package name and version', :aggregate_failures do
+ expect(package.name).to eq('temp_name')
+ expect(package.version).to eq('0.0.0')
+ expect(package).to be_processing
+
+ subject
+
+ expect(package.reload.name).to eq('package')
+ expect(package.version).to eq('0.0.1')
+ expect(package).to be_default
+ end
+
+ it 'updates the package file name', :aggregate_failures do
+ expect(package_file.file_name).to eq('package.gem')
+
+ subject
+
+ expect(package_file.reload.file_name).to eq('package-0.0.1.gem')
+ end
+ end
+
+ context 'when the package already exists' do
+ let_it_be(:existing_package) { create(:rubygems_package, name: 'package', version: '0.0.1', project: package.project) }
+
+ let(:sub_service) { double }
+
+ before do
+ expect(Packages::Rubygems::MetadataExtractionService).to receive(:new).with(existing_package, gemspec).and_return(sub_service)
+ expect(Packages::Rubygems::CreateGemspecService).to receive(:new).with(existing_package, gemspec).and_return(sub_service)
+ expect(Packages::Rubygems::CreateDependenciesService).to receive(:new).with(existing_package, gemspec).and_return(sub_service)
+
+ expect(sub_service).to receive(:execute).exactly(3).times.and_return(true)
+ end
+
+ it 'assigns the package_file to the existing package and deletes the temporary package', :aggregate_failures do
+ expect(package).to receive(:destroy)
+
+ expect { subject }.to change { existing_package.package_files.count }.by(1)
+
+ expect(package_file.reload.package).to eq(existing_package)
+ end
+ end
+
+ context 'sub-service failure' do
+ before do
+ expect(Packages::Rubygems::MetadataExtractionService).to receive(:new).with(package, gemspec).and_raise(::Packages::Rubygems::ProcessGemService::ExtractionError.new('failure'))
+ end
+
+ it 'returns an error' do
+ expect { subject }.to raise_error(::Packages::Rubygems::ProcessGemService::ExtractionError, 'failure')
+ end
+ end
+
+ context 'bad gem file' do
+ before do
+ expect(Gem::Package).to receive(:new).and_raise(ArgumentError)
+ end
+
+ it 'returns an error' do
+ expect { subject }.to raise_error(::Packages::Rubygems::ProcessGemService::ExtractionError, 'Unable to read gem file')
+ end
+ end
+
+ context 'without obtaining an exclusive lease' do
+ let(:lease_key) { "packages:rubygems:process_gem_service:package:#{package.id}" }
+
+ before do
+ stub_exclusive_lease_taken(lease_key, timeout: 1.hour)
+ end
+
+ it 'does not perform the services', :aggregate_failures do
+ # The #use_file call triggers a separate lease on the package file being opened
+ # for use with the gem. We don't want to test that here, so we allow the call to proceed
+ expect(Gitlab::ExclusiveLease).to receive(:new).with("object_storage_migrate:Packages::PackageFile:#{package_file.id}", anything).and_call_original
+
+ expect(Packages::Rubygems::MetadataExtractionService).not_to receive(:new)
+ expect(Packages::Rubygems::CreateGemspecService).not_to receive(:new)
+ expect(Packages::Rubygems::CreateDependenciesService).not_to receive(:new)
+
+ subject
+
+ expect(package.reload.name).to eq('temp_name')
+ expect(package.version).to eq('0.0.0')
+ expect(package).to be_processing
+ expect(package_file.reload.file_name).to eq('package.gem')
+ end
+ end
+ end
+end
diff --git a/spec/services/pages/delete_services_spec.rb b/spec/services/pages/delete_service_spec.rb
index f1edf93b0c1..a79c89a1c35 100644
--- a/spec/services/pages/delete_services_spec.rb
+++ b/spec/services/pages/delete_service_spec.rb
@@ -16,7 +16,10 @@ RSpec.describe Pages::DeleteService do
it 'deletes published pages', :sidekiq_inline do
expect(project.pages_deployed?).to be(true)
- expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project).and_return true
+ expect_next_instance_of(Gitlab::PagesTransfer) do |pages_transfer|
+ expect(pages_transfer).to receive(:rename_project).and_return true
+ end
+
expect(PagesWorker).to receive(:perform_in).with(5.minutes, :remove, project.namespace.full_path, anything)
service.execute
@@ -24,11 +27,10 @@ RSpec.describe Pages::DeleteService do
expect(project.pages_deployed?).to be(false)
end
- it "doesn't remove anything from the legacy storage if updates on it are disabled", :sidekiq_inline do
- stub_feature_flags(pages_update_legacy_storage: false)
+ it "doesn't remove anything from the legacy storage", :sidekiq_inline do
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
expect(project.pages_deployed?).to be(true)
-
expect(PagesWorker).not_to receive(:perform_in)
service.execute
@@ -69,7 +71,9 @@ RSpec.describe Pages::DeleteService do
expect(project.pages_deployed?).to eq(false)
expect(project.pages_domains.count).to eq(0)
- expect_any_instance_of(Gitlab::PagesTransfer).to receive(:rename_project).and_return true
+ expect_next_instance_of(Gitlab::PagesTransfer) do |pages_transfer|
+ expect(pages_transfer).to receive(:rename_project).and_return true
+ end
Sidekiq::Worker.drain_all
end
diff --git a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
index 4ec57044912..d058324f3bb 100644
--- a/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
+++ b/spec/services/pages/migrate_from_legacy_storage_service_spec.rb
@@ -3,90 +3,135 @@
require 'spec_helper'
RSpec.describe Pages::MigrateFromLegacyStorageService do
- let(:service) { described_class.new(Rails.logger, migration_threads: 3, batch_size: 10, ignore_invalid_entries: false) }
+ let(:batch_size) { 10 }
+ let(:mark_projects_as_not_deployed) { false }
+ let(:service) { described_class.new(Rails.logger, ignore_invalid_entries: false, mark_projects_as_not_deployed: mark_projects_as_not_deployed) }
- it 'does not try to migrate pages if pages are not deployed' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+ shared_examples "migrates projects properly" do
+ it 'does not try to migrate pages if pages are not deployed' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
- expect(service.execute).to eq(migrated: 0, errored: 0)
- end
+ is_expected.to eq(migrated: 0, errored: 0)
+ end
- it 'uses multiple threads' do
- projects = create_list(:project, 20)
- projects.each do |project|
- project.mark_pages_as_deployed
+ context 'when pages are marked as deployed' do
+ let(:project) { create(:project) }
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
+ before do
+ project.mark_pages_as_deployed
end
- end
-
- service = described_class.new(Rails.logger, migration_threads: 3, batch_size: 2, ignore_invalid_entries: false)
- threads = Concurrent::Set.new
+ context 'when pages directory does not exist' do
+ context 'when mark_projects_as_not_deployed is set' do
+ let(:mark_projects_as_not_deployed) { true }
- expect(service).to receive(:migrate_project).exactly(20).times.and_wrap_original do |m, *args|
- threads.add(Thread.current)
+ it 'counts project as migrated' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: true) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
- # sleep to be 100% certain that once thread can't consume all the queue
- # it works without it, but I want to avoid making this test flaky
- sleep(0.01)
+ is_expected.to eq(migrated: 1, errored: 0)
+ end
+ end
- m.call(*args)
- end
+ it 'counts project as errored' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
- expect(service.execute).to eq(migrated: 20, errored: 0)
- expect(threads.length).to eq(3)
- end
+ is_expected.to eq(migrated: 0, errored: 1)
+ end
+ end
- context 'when pages are marked as deployed' do
- let(:project) { create(:project) }
+ context 'when pages directory exists on disk' do
+ before do
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
+ end
- before do
- project.mark_pages_as_deployed
- end
+ it 'migrates pages projects without deployments' do
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute).and_call_original
+ end
- context 'when pages directory does not exist' do
- it 'tries to migrate the project, but does not crash' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false) do |service|
- expect(service).to receive(:execute).and_call_original
+ expect(project.pages_metadatum.reload.pages_deployment).to eq(nil)
+ expect(subject).to eq(migrated: 1, errored: 0)
+ expect(project.pages_metadatum.reload.pages_deployment).to be
end
- expect(service.execute).to eq(migrated: 0, errored: 1)
+ context 'when deployed already exists for the project' do
+ before do
+ deployment = create(:pages_deployment, project: project)
+ project.set_first_pages_deployment!(deployment)
+ end
+
+ it 'does not try to migrate project' do
+ expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+
+ is_expected.to eq(migrated: 0, errored: 0)
+ end
+ end
end
end
+ end
- context 'when pages directory exists on disk' do
- before do
- FileUtils.mkdir_p File.join(project.pages_path, "public")
- File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
- f.write("Hello!")
+ describe '#execute_with_threads' do
+ subject { service.execute_with_threads(threads: 3, batch_size: batch_size) }
+
+ include_examples "migrates projects properly"
+
+ context 'when there is work for multiple threads' do
+ let(:batch_size) { 2 } # override to force usage of multiple threads
+
+ it 'uses multiple threads' do
+ projects = create_list(:project, 20)
+ projects.each do |project|
+ project.mark_pages_as_deployed
+
+ FileUtils.mkdir_p File.join(project.pages_path, "public")
+ File.open(File.join(project.pages_path, "public/index.html"), "w") do |f|
+ f.write("Hello!")
+ end
end
- end
- it 'migrates pages projects without deployments' do
- expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false) do |service|
- expect(service).to receive(:execute).and_call_original
+ threads = Concurrent::Set.new
+
+ expect(service).to receive(:migrate_project).exactly(20).times.and_wrap_original do |m, *args|
+ threads.add(Thread.current)
+
+ # sleep to be 100% certain that once thread can't consume all the queue
+ # it works without it, but I want to avoid making this test flaky
+ sleep(0.01)
+
+ m.call(*args)
end
- expect do
- expect(service.execute).to eq(migrated: 1, errored: 0)
- end.to change { project.pages_metadatum.reload.pages_deployment }.from(nil)
+ is_expected.to eq(migrated: 20, errored: 0)
+ expect(threads.length).to eq(3)
end
+ end
+ end
- context 'when deployed already exists for the project' do
- before do
- deployment = create(:pages_deployment, project: project)
- project.set_first_pages_deployment!(deployment)
- end
+ describe "#execute_for_batch" do
+ subject { service.execute_for_batch(Project.ids) }
- it 'does not try to migrate project' do
- expect(::Pages::MigrateLegacyStorageToDeploymentService).not_to receive(:new)
+ include_examples "migrates projects properly"
- expect(service.execute).to eq(migrated: 0, errored: 0)
+ it 'only tries to migrate projects with passed ids' do
+ projects = create_list(:project, 5)
+
+ projects.each(&:mark_pages_as_deployed)
+ projects_to_migrate = projects.first(3)
+
+ projects_to_migrate.each do |project|
+ expect_next_instance_of(::Pages::MigrateLegacyStorageToDeploymentService, project, ignore_invalid_entries: false, mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute).and_call_original
end
end
+
+ expect(service.execute_for_batch(projects_to_migrate.pluck(:id))).to eq(migrated: 0, errored: 3)
end
end
end
diff --git a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
index d95303c3e85..25f571a73d1 100644
--- a/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
+++ b/spec/services/pages/migrate_legacy_storage_to_deployment_service_spec.rb
@@ -14,44 +14,44 @@ RSpec.describe Pages::MigrateLegacyStorageToDeploymentService do
expect(described_class.new(project, ignore_invalid_entries: true).execute[:status]).to eq(:error)
end
- it 'marks pages as not deployed if public directory is absent' do
- project.mark_pages_as_deployed
-
- expect(project.pages_metadatum.reload.deployed).to eq(true)
+ context 'when mark_projects_as_not_deployed is passed' do
+ let(:service) { described_class.new(project, mark_projects_as_not_deployed: true) }
- expect(service.execute).to(
- eq(status: :error,
- message: "Can't create zip archive: Can not find valid public dir in #{project.pages_path}")
- )
+ it 'marks pages as not deployed if public directory is absent and invalid entries are ignored' do
+ project.mark_pages_as_deployed
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(project.pages_metadatum.reload.deployed).to eq(false)
- end
+ expect(service.execute).to(
+ eq(status: :success,
+ message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed")
+ )
- it 'does not mark pages as not deployed if public directory is absent but pages_deployment exists' do
- deployment = create(:pages_deployment, project: project)
- project.update_pages_deployment!(deployment)
- project.mark_pages_as_deployed
+ expect(project.pages_metadatum.reload.deployed).to eq(false)
+ end
- expect(project.pages_metadatum.reload.deployed).to eq(true)
+ it 'does not mark pages as not deployed if public directory is absent but pages_deployment exists' do
+ deployment = create(:pages_deployment, project: project)
+ project.update_pages_deployment!(deployment)
+ project.mark_pages_as_deployed
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
- expect(service.execute).to(
- eq(status: :error,
- message: "Can't create zip archive: Can not find valid public dir in #{project.pages_path}")
- )
+ expect(service.execute).to(
+ eq(status: :success,
+ message: "Archive not created. Missing public directory in #{project.pages_path}? Marked project as not deployed")
+ )
- expect(project.pages_metadatum.reload.deployed).to eq(true)
+ expect(project.pages_metadatum.reload.deployed).to eq(true)
+ end
end
- it 'does not mark pages as not deployed if public directory is absent but feature is disabled' do
- stub_feature_flags(pages_migration_mark_as_not_deployed: false)
-
+ it 'does not mark pages as not deployed if public directory is absent but invalid entries are not ignored' do
project.mark_pages_as_deployed
expect(project.pages_metadatum.reload.deployed).to eq(true)
expect(service.execute).to(
eq(status: :error,
- message: "Can't create zip archive: Can not find valid public dir in #{project.pages_path}")
+ message: "Archive not created. Missing public directory in #{project.pages_path}")
)
expect(project.pages_metadatum.reload.deployed).to eq(true)
diff --git a/spec/services/pages/zip_directory_service_spec.rb b/spec/services/pages/zip_directory_service_spec.rb
index 9de68dd62bb..9cce90c6c0d 100644
--- a/spec/services/pages/zip_directory_service_spec.rb
+++ b/spec/services/pages/zip_directory_service_spec.rb
@@ -12,8 +12,10 @@ RSpec.describe Pages::ZipDirectoryService do
let(:ignore_invalid_entries) { false }
+ let(:service_directory) { @work_dir }
+
let(:service) do
- described_class.new(@work_dir, ignore_invalid_entries: ignore_invalid_entries)
+ described_class.new(service_directory, ignore_invalid_entries: ignore_invalid_entries)
end
let(:result) do
@@ -25,32 +27,32 @@ RSpec.describe Pages::ZipDirectoryService do
let(:archive) { result[:archive_path] }
let(:entries_count) { result[:entries_count] }
- it 'returns error if project pages dir does not exist' do
- expect(Gitlab::ErrorTracking).not_to receive(:track_exception)
-
- expect(
- described_class.new("/tmp/not/existing/dir").execute
- ).to eq(status: :error, message: "Can not find valid public dir in /tmp/not/existing/dir")
+ shared_examples 'handles invalid public directory' do
+ it 'returns success' do
+ expect(status).to eq(:success)
+ expect(archive).to be_nil
+ expect(entries_count).to be_nil
+ end
end
- it 'returns nils if there is no public directory and does not leave archive' do
- expect(status).to eq(:error)
- expect(message).to eq("Can not find valid public dir in #{@work_dir}")
- expect(archive).to eq(nil)
- expect(entries_count).to eq(nil)
+ context "when work direcotry doesn't exist" do
+ let(:service_directory) { "/tmp/not/existing/dir" }
- expect(File.exist?(File.join(@work_dir, '@migrated.zip'))).to eq(false)
+ include_examples 'handles invalid public directory'
end
- it 'returns nils if public directory is a symlink' do
- create_dir('target')
- create_file('./target/index.html', 'hello')
- create_link("public", "./target")
+ context 'when public directory is absent' do
+ include_examples 'handles invalid public directory'
+ end
+
+ context 'when public directory is a symlink' do
+ before do
+ create_dir('target')
+ create_file('./target/index.html', 'hello')
+ create_link("public", "./target")
+ end
- expect(status).to eq(:error)
- expect(message).to eq("Can not find valid public dir in #{@work_dir}")
- expect(archive).to eq(nil)
- expect(entries_count).to eq(nil)
+ include_examples 'handles invalid public directory'
end
context 'when there is a public directory' do
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index 306d87eefb8..e0d6b9afcff 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -673,7 +673,17 @@ RSpec.describe Projects::CreateService, '#execute' do
expect(rugged.config['gitlab.fullpath']).to eq project.full_path
end
+ it 'triggers PostCreationWorker' do
+ expect(Projects::PostCreationWorker).to receive(:perform_async).with(a_kind_of(Integer))
+
+ create_project(user, opts)
+ end
+
context 'when project has access to shared service' do
+ before do
+ stub_feature_flags(projects_post_creation_worker: false)
+ end
+
context 'Prometheus application is shared via group cluster' do
let(:cluster) { create(:cluster, :group, groups: [group]) }
let(:group) do
@@ -714,9 +724,7 @@ RSpec.describe Projects::CreateService, '#execute' do
it 'cleans invalid record and logs warning', :aggregate_failures do
invalid_service_record = build(:prometheus_service, properties: { api_url: nil, manual_configuration: true }.to_json)
- allow_next_instance_of(Project) do |instance|
- allow(instance).to receive(:build_prometheus_service).and_return(invalid_service_record)
- end
+ allow(PrometheusService).to receive(:new).and_return(invalid_service_record)
expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) }))
project = create_project(user, opts)
diff --git a/spec/services/projects/destroy_service_spec.rb b/spec/services/projects/destroy_service_spec.rb
index 5410e784cc0..b2a68bbd0aa 100644
--- a/spec/services/projects/destroy_service_spec.rb
+++ b/spec/services/projects/destroy_service_spec.rb
@@ -93,10 +93,26 @@ RSpec.describe Projects::DestroyService, :aggregate_failures do
destroy_project(project, user, {})
end
- it 'performs cancel for project ci pipelines' do
- expect(::Ci::AbortProjectPipelinesService).to receive_message_chain(:new, :execute).with(project)
+ context 'with abort_deleted_project_pipelines feature disabled' do
+ it 'does not cancel project ci pipelines' do
+ stub_feature_flags(abort_deleted_project_pipelines: false)
- destroy_project(project, user, {})
+ expect(::Ci::AbortPipelinesService).not_to receive(:new)
+
+ destroy_project(project, user, {})
+ end
+ end
+
+ context 'with abort_deleted_project_pipelines feature enabled' do
+ it 'performs cancel for project ci pipelines' do
+ stub_feature_flags(abort_deleted_project_pipelines: true)
+ pipelines = build_list(:ci_pipeline, 3, :running)
+ allow(project).to receive(:all_pipelines).and_return(pipelines)
+
+ expect(::Ci::AbortPipelinesService).to receive_message_chain(:new, :execute).with(pipelines, :project_deleted)
+
+ destroy_project(project, user, {})
+ end
end
context 'when project has remote mirrors' do
diff --git a/spec/services/projects/update_pages_configuration_service_spec.rb b/spec/services/projects/update_pages_configuration_service_spec.rb
index 9ef66a10f0d..58939ef4ada 100644
--- a/spec/services/projects/update_pages_configuration_service_spec.rb
+++ b/spec/services/projects/update_pages_configuration_service_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Projects::UpdatePagesConfigurationService do
end
it "doesn't update configuration files if updates on legacy storage are disabled" do
- stub_feature_flags(pages_update_legacy_storage: false)
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
expect(service).not_to receive(:update_file)
diff --git a/spec/services/projects/update_pages_service_spec.rb b/spec/services/projects/update_pages_service_spec.rb
index b735f4b6bc2..b11607bc213 100644
--- a/spec/services/projects/update_pages_service_spec.rb
+++ b/spec/services/projects/update_pages_service_spec.rb
@@ -16,6 +16,7 @@ RSpec.describe Projects::UpdatePagesService do
subject { described_class.new(project, build) }
before do
+ stub_feature_flags(skip_pages_deploy_to_legacy_storage: false)
project.legacy_remove_pages
end
@@ -55,8 +56,24 @@ RSpec.describe Projects::UpdatePagesService do
end
end
+ it 'creates a temporary directory with the project and build ID' do
+ expect(Dir).to receive(:mktmpdir).with("project-#{project.id}-build-#{build.id}-", anything).and_call_original
+
+ subject.execute
+ end
+
it "doesn't deploy to legacy storage if it's disabled" do
- stub_feature_flags(pages_update_legacy_storage: false)
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
+
+ expect(execute).to eq(:success)
+ expect(project.pages_deployed?).to be_truthy
+
+ expect(File.exist?(File.join(project.pages_path, 'public', 'index.html'))).to eq(false)
+ end
+
+ it "doesn't deploy to legacy storage if skip_pages_deploy_to_legacy_storage is enabled" do
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(true)
+ stub_feature_flags(skip_pages_deploy_to_legacy_storage: true)
expect(execute).to eq(:success)
expect(project.pages_deployed?).to be_truthy
diff --git a/spec/services/projects/update_remote_mirror_service_spec.rb b/spec/services/projects/update_remote_mirror_service_spec.rb
index 30530da8013..96dbfe8e0b7 100644
--- a/spec/services/projects/update_remote_mirror_service_spec.rb
+++ b/spec/services/projects/update_remote_mirror_service_spec.rb
@@ -12,7 +12,9 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
subject(:service) { described_class.new(project, project.creator) }
describe '#execute' do
- subject(:execute!) { service.execute(remote_mirror, 0) }
+ let(:retries) { 0 }
+
+ subject(:execute!) { service.execute(remote_mirror, retries) }
before do
project.repository.add_branch(project.owner, 'existing-branch', 'master')
@@ -62,8 +64,18 @@ RSpec.describe Projects::UpdateRemoteMirrorService do
allow(Gitlab::UrlBlocker).to receive(:blocked_url?).and_return(true)
end
- it 'fails and returns error status' do
+ it 'hard retries and returns error status' do
expect(execute!).to eq(status: :error, message: 'The remote mirror URL is invalid.')
+ expect(remote_mirror).to be_to_retry
+ end
+
+ context 'when retries are exceeded' do
+ let(:retries) { 4 }
+
+ it 'hard fails and returns error status' do
+ expect(execute!).to eq(status: :error, message: 'The remote mirror URL is invalid.')
+ expect(remote_mirror).to be_failed
+ end
end
end
diff --git a/spec/services/quick_actions/interpret_service_spec.rb b/spec/services/quick_actions/interpret_service_spec.rb
index bf35e72a037..9df238c6dac 100644
--- a/spec/services/quick_actions/interpret_service_spec.rb
+++ b/spec/services/quick_actions/interpret_service_spec.rb
@@ -368,24 +368,24 @@ RSpec.describe QuickActions::InterpretService do
spent_at: DateTime.current.to_date
})
end
-
- it 'returns the spend_time message including the formatted duration and verb' do
- _, _, message = service.execute('/spend -120m', issuable)
-
- expect(message).to eq('Subtracted 2h spent time.')
- end
end
shared_examples 'spend command with negative time' do
- it 'populates spend_time: -1800 if content contains /spend -30m' do
+ it 'populates spend_time: -7200 if content contains -120m' do
_, updates, _ = service.execute(content, issuable)
expect(updates).to eq(spend_time: {
- duration: -1800,
+ duration: -7200,
user_id: developer.id,
spent_at: DateTime.current.to_date
})
end
+
+ it 'returns the spend_time message including the formatted duration and verb' do
+ _, _, message = service.execute(content, issuable)
+
+ expect(message).to eq('Subtracted 2h spent time.')
+ end
end
shared_examples 'spend command with valid date' do
@@ -478,7 +478,7 @@ RSpec.describe QuickActions::InterpretService do
end
end
- shared_examples 'empty command' do |error_msg|
+ shared_examples 'failed command' do |error_msg|
it 'populates {} if content contains an unsupported command' do
_, updates, _ = service.execute(content, issuable)
@@ -607,10 +607,10 @@ RSpec.describe QuickActions::InterpretService do
issuable.update!(confidential: true)
end
- it 'does not return the success message' do
+ it 'returns an error message' do
_, _, message = service.execute(content, issuable)
- expect(message).to be_empty
+ expect(message).to eq('Could not apply confidential command.')
end
it 'is not part of the available commands' do
@@ -728,7 +728,7 @@ RSpec.describe QuickActions::InterpretService do
context 'can not be merged when logged user does not have permissions' do
let(:service) { described_class.new(project, create(:user)) }
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply merge command.' do
let(:content) { "/merge" }
let(:issuable) { merge_request }
end
@@ -737,7 +737,7 @@ RSpec.describe QuickActions::InterpretService do
context 'can not be merged when sha does not match' do
let(:service) { described_class.new(project, developer, { merge_request_diff_head_sha: 'othersha' }) }
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply merge command.' do
let(:content) { "/merge" }
let(:issuable) { merge_request }
end
@@ -755,21 +755,21 @@ RSpec.describe QuickActions::InterpretService do
end
context 'issue can not be merged' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply merge command.' do
let(:content) { "/merge" }
let(:issuable) { issue }
end
end
context 'non persisted merge request cant be merged' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply merge command.' do
let(:content) { "/merge" }
let(:issuable) { build(:merge_request) }
end
end
context 'not persisted merge request can not be merged' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply merge command.' do
let(:content) { "/merge" }
let(:issuable) { build(:merge_request, source_project: project) }
end
@@ -786,7 +786,7 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { merge_request }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/title' }
let(:issuable) { issue }
end
@@ -869,12 +869,12 @@ RSpec.describe QuickActions::InterpretService do
end
end
- it_behaves_like 'empty command', "Failed to assign a user because no user was found." do
+ it_behaves_like 'failed command', "Failed to assign a user because no user was found." do
let(:content) { '/assign @abcd1234' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command', "Failed to assign a user because no user was found." do
+ it_behaves_like 'failed command', "Failed to assign a user because no user was found." do
let(:content) { '/assign' }
let(:issuable) { issue }
end
@@ -890,7 +890,7 @@ RSpec.describe QuickActions::InterpretService do
context 'with an issue instead of a merge request' do
let(:issuable) { issue }
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply assign_reviewer command.'
end
# CE does not have multiple reviewers
@@ -935,7 +935,7 @@ RSpec.describe QuickActions::InterpretService do
context 'with an incorrect user' do
let(:content) { '/assign_reviewer @abcd1234' }
- it_behaves_like 'empty command', "Failed to assign a reviewer because no user was found."
+ it_behaves_like 'failed command', "Failed to assign a reviewer because no user was found."
end
context 'with the "reviewer" alias' do
@@ -953,7 +953,7 @@ RSpec.describe QuickActions::InterpretService do
context 'with no user' do
let(:content) { '/assign_reviewer' }
- it_behaves_like 'empty command', "Failed to assign a reviewer because no user was found."
+ it_behaves_like 'failed command', "Failed to assign a reviewer because no user was found."
end
context 'includes only the user reference with extra text' do
@@ -977,7 +977,7 @@ RSpec.describe QuickActions::InterpretService do
context 'with an issue instead of a merge request' do
let(:issuable) { issue }
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply unassign_reviewer command.'
end
context 'with anything after the command' do
@@ -1035,14 +1035,20 @@ RSpec.describe QuickActions::InterpretService do
end
end
- it_behaves_like 'milestone command' do
- let(:content) { "/milestone %#{milestone.title}" }
- let(:issuable) { issue }
- end
+ context 'project milestones' do
+ before do
+ milestone
+ end
- it_behaves_like 'milestone command' do
- let(:content) { "/milestone %#{milestone.title}" }
- let(:issuable) { merge_request }
+ it_behaves_like 'milestone command' do
+ let(:content) { "/milestone %#{milestone.title}" }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'milestone command' do
+ let(:content) { "/milestone %#{milestone.title}" }
+ let(:issuable) { merge_request }
+ end
end
context 'only group milestones available' do
@@ -1181,7 +1187,7 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { merge_request }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply due command.' do
let(:content) { '/due 2016-08-28' }
let(:issuable) { merge_request }
end
@@ -1211,7 +1217,7 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { merge_request }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply remove_due_date command.' do
let(:content) { '/remove_due_date' }
let(:issuable) { merge_request }
end
@@ -1221,12 +1227,12 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/estimate' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/estimate abc' }
let(:issuable) { issue }
end
@@ -1236,8 +1242,18 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
+ it_behaves_like 'spend command' do
+ let(:content) { '/spent 1h' }
+ let(:issuable) { issue }
+ end
+
it_behaves_like 'spend command with negative time' do
- let(:content) { '/spend -30m' }
+ let(:content) { '/spend -120m' }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'spend command with negative time' do
+ let(:content) { '/spent -120m' }
let(:issuable) { issue }
end
@@ -1247,26 +1263,52 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
+ it_behaves_like 'spend command with valid date' do
+ let(:date) { '2016-02-02' }
+ let(:content) { "/spent 30m #{date}" }
+ let(:issuable) { issue }
+ end
+
it_behaves_like 'spend command with invalid date' do
let(:content) { '/spend 30m 17-99-99' }
let(:issuable) { issue }
end
+ it_behaves_like 'spend command with invalid date' do
+ let(:content) { '/spent 30m 17-99-99' }
+ let(:issuable) { issue }
+ end
+
it_behaves_like 'spend command with future date' do
let(:content) { '/spend 30m 6017-10-10' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'spend command with future date' do
+ let(:content) { '/spent 30m 6017-10-10' }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'failed command' do
let(:content) { '/spend' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
+ let(:content) { '/spent' }
+ let(:issuable) { issue }
+ end
+
+ it_behaves_like 'failed command' do
let(:content) { '/spend abc' }
let(:issuable) { issue }
end
+ it_behaves_like 'failed command' do
+ let(:content) { '/spent abc' }
+ let(:issuable) { issue }
+ end
+
it_behaves_like 'remove_estimate command' do
let(:content) { '/remove_estimate' }
let(:issuable) { issue }
@@ -1323,7 +1365,7 @@ RSpec.describe QuickActions::InterpretService do
end
context 'if issuable is a Commit' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply todo command.' do
let(:issuable) { commit }
end
end
@@ -1379,7 +1421,7 @@ RSpec.describe QuickActions::InterpretService do
end
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/copy_metadata' }
let(:issuable) { issue }
end
@@ -1419,19 +1461,19 @@ RSpec.describe QuickActions::InterpretService do
end
context 'cross project references' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:other_project) { create(:project, :public) }
let(:source_issuable) { create(:labeled_issue, project: other_project, labels: [todo_label, inreview_label]) }
let(:content) { "/copy_metadata #{source_issuable.to_reference(project)}" }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { "/copy_metadata imaginary##{non_existing_record_iid}" }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:other_project) { create(:project, :private) }
let(:source_issuable) { create(:issue, project: other_project) }
@@ -1448,7 +1490,7 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/duplicate' }
let(:issuable) { issue }
end
@@ -1461,12 +1503,12 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { issue }
end
- it_behaves_like 'empty command', _('Failed to mark this issue as a duplicate because referenced issue was not found.') do
+ it_behaves_like 'failed command', _('Failed to mark this issue as a duplicate because referenced issue was not found.') do
let(:content) { "/duplicate imaginary##{non_existing_record_iid}" }
let(:issuable) { issue }
end
- it_behaves_like 'empty command', _('Failed to mark this issue as a duplicate because referenced issue was not found.') do
+ it_behaves_like 'failed command', _('Failed to mark this issue as a duplicate because referenced issue was not found.') do
let(:other_project) { create(:project, :private) }
let(:issue_duplicate) { create(:issue, project: other_project) }
@@ -1481,62 +1523,62 @@ RSpec.describe QuickActions::InterpretService do
let(:issue) { create(:issue, project: project, author: visitor) }
let(:service) { described_class.new(project, visitor) }
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply assign command.' do
let(:content) { "/assign @#{developer.username}" }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply unassign command.' do
let(:content) { '/unassign' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply milestone command.' do
let(:content) { "/milestone %#{milestone.title}" }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply remove_milestone command.' do
let(:content) { '/remove_milestone' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply label command.' do
let(:content) { %(/label ~"#{inprogress.title}" ~#{bug.title} ~unknown) }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply unlabel command.' do
let(:content) { %(/unlabel ~"#{inprogress.title}") }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply relabel command.' do
let(:content) { %(/relabel ~"#{inprogress.title}") }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply due command.' do
let(:content) { '/due tomorrow' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply remove_due_date command.' do
let(:content) { '/remove_due_date' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply confidential command.' do
let(:content) { '/confidential' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply lock command.' do
let(:content) { '/lock' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply unlock command.' do
let(:content) { '/unlock' }
let(:issuable) { issue }
end
@@ -1554,19 +1596,19 @@ RSpec.describe QuickActions::InterpretService do
end
context 'ignores command with no argument' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/award' }
let(:issuable) { issue }
end
end
context 'ignores non-existing / invalid emojis' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/award noop' }
let(:issuable) { issue }
end
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/award :lorem_ipsum:' }
let(:issuable) { issue }
end
@@ -1576,7 +1618,7 @@ RSpec.describe QuickActions::InterpretService do
let(:content) { '/award :100:' }
let(:issuable) { commit }
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply award command.'
end
end
@@ -1622,14 +1664,14 @@ RSpec.describe QuickActions::InterpretService do
end
context 'ignores command with no argument' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply target_branch command.' do
let(:content) { '/target_branch' }
let(:issuable) { another_merge_request }
end
end
context 'ignores non-existing target branch' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command', 'Could not apply target_branch command.' do
let(:content) { '/target_branch totally_non_existing_branch' }
let(:issuable) { another_merge_request }
end
@@ -1697,34 +1739,34 @@ RSpec.describe QuickActions::InterpretService do
create(:board, project: project)
end
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply board_move command.'
end
context 'if the given label does not exist' do
let(:issuable) { issue }
let(:content) { '/board_move ~"Fake Label"' }
- it_behaves_like 'empty command', 'Failed to move this issue because label was not found.'
+ it_behaves_like 'failed command', 'Failed to move this issue because label was not found.'
end
context 'if multiple labels are given' do
let(:issuable) { issue }
let(:content) { %{/board_move ~"#{inreview.title}" ~"#{todo.title}"} }
- it_behaves_like 'empty command', 'Failed to move this issue because only a single label can be provided.'
+ it_behaves_like 'failed command', 'Failed to move this issue because only a single label can be provided.'
end
context 'if the given label is not a list on the board' do
let(:issuable) { issue }
let(:content) { %{/board_move ~"#{bug.title}"} }
- it_behaves_like 'empty command', 'Failed to move this issue because label was not found.'
+ it_behaves_like 'failed command', 'Failed to move this issue because label was not found.'
end
context 'if issuable is not an Issue' do
let(:issuable) { merge_request }
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply board_move command.'
end
end
@@ -1732,7 +1774,7 @@ RSpec.describe QuickActions::InterpretService do
let(:issuable) { commit }
context 'ignores command with no argument' do
- it_behaves_like 'empty command' do
+ it_behaves_like 'failed command' do
let(:content) { '/tag' }
end
end
@@ -1797,7 +1839,7 @@ RSpec.describe QuickActions::InterpretService do
context 'if issuable is not an Issue' do
let(:issuable) { merge_request }
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply create_merge_request command.'
end
context "when logged user cannot create_merge_requests in the project" do
@@ -1807,14 +1849,14 @@ RSpec.describe QuickActions::InterpretService do
project.add_developer(developer)
end
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply create_merge_request command.'
end
context 'when logged user cannot push code to the project' do
let(:project) { create(:project, :private) }
let(:service) { described_class.new(project, create(:user)) }
- it_behaves_like 'empty command'
+ it_behaves_like 'failed command', 'Could not apply create_merge_request command.'
end
it 'populates create_merge_request with branch_name and issue iid' do
@@ -1953,7 +1995,7 @@ RSpec.describe QuickActions::InterpretService do
context 'invite_email command' do
let_it_be(:issuable) { issue }
- it_behaves_like 'empty command', "No email participants were added. Either none were provided, or they already exist." do
+ it_behaves_like 'failed command', "No email participants were added. Either none were provided, or they already exist." do
let(:content) { '/invite_email' }
end
@@ -1964,7 +2006,7 @@ RSpec.describe QuickActions::InterpretService do
issuable.issue_email_participants.create!(email: "a@gitlab.com")
end
- it_behaves_like 'empty command', "No email participants were added. Either none were provided, or they already exist."
+ it_behaves_like 'failed command', "No email participants were added. Either none were provided, or they already exist."
end
context 'with new email participants' do
@@ -2241,10 +2283,14 @@ RSpec.describe QuickActions::InterpretService do
end
describe 'spend command' do
- let(:content) { '/spend -120m' }
+ it 'includes the formatted duration and proper verb when using /spend' do
+ _, explanations = service.explain('/spend -120m', issue)
- it 'includes the formatted duration and proper verb' do
- _, explanations = service.explain(content, issue)
+ expect(explanations).to eq(['Subtracts 2h spent time.'])
+ end
+
+ it 'includes the formatted duration and proper verb when using /spent' do
+ _, explanations = service.explain('/spent -120m', issue)
expect(explanations).to eq(['Subtracts 2h spent time.'])
end
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index dab38445ccf..02d60f076ca 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -130,13 +130,14 @@ RSpec.describe Repositories::ChangelogService do
describe '#start_of_commit_range' do
let(:project) { build_stubbed(:project) }
let(:user) { build_stubbed(:user) }
+ let(:config) { Gitlab::Changelog::Config.new(project) }
context 'when the "from" argument is specified' do
it 'returns the value of the argument' do
service = described_class
.new(project, user, version: '1.0.0', from: 'foo', to: 'bar')
- expect(service.start_of_commit_range).to eq('foo')
+ expect(service.start_of_commit_range(config)).to eq('foo')
end
end
@@ -145,12 +146,12 @@ RSpec.describe Repositories::ChangelogService do
service = described_class
.new(project, user, version: '1.0.0', to: 'bar')
- finder_spy = instance_spy(Repositories::PreviousTagFinder)
+ finder_spy = instance_spy(Repositories::ChangelogTagFinder)
tag = double(:tag, target_commit: double(:commit, id: '123'))
- allow(Repositories::PreviousTagFinder)
+ allow(Repositories::ChangelogTagFinder)
.to receive(:new)
- .with(project)
+ .with(project, regex: an_instance_of(String))
.and_return(finder_spy)
allow(finder_spy)
@@ -158,18 +159,18 @@ RSpec.describe Repositories::ChangelogService do
.with('1.0.0')
.and_return(tag)
- expect(service.start_of_commit_range).to eq('123')
+ expect(service.start_of_commit_range(config)).to eq('123')
end
it 'raises an error when no tag is found' do
service = described_class
.new(project, user, version: '1.0.0', to: 'bar')
- finder_spy = instance_spy(Repositories::PreviousTagFinder)
+ finder_spy = instance_spy(Repositories::ChangelogTagFinder)
- allow(Repositories::PreviousTagFinder)
+ allow(Repositories::ChangelogTagFinder)
.to receive(:new)
- .with(project)
+ .with(project, regex: an_instance_of(String))
.and_return(finder_spy)
allow(finder_spy)
@@ -177,7 +178,7 @@ RSpec.describe Repositories::ChangelogService do
.with('1.0.0')
.and_return(nil)
- expect { service.start_of_commit_range }
+ expect { service.start_of_commit_range(config) }
.to raise_error(Gitlab::Changelog::Error)
end
end
diff --git a/spec/services/security/ci_configuration/sast_parser_service_spec.rb b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
index 21490f993c7..4ebaddcfa4e 100644
--- a/spec/services/security/ci_configuration/sast_parser_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_parser_service_spec.rb
@@ -12,8 +12,8 @@ RSpec.describe Security::CiConfiguration::SastParserService do
let(:sast_analyzer_image_tag) { configuration['global'][2] }
let(:sast_pipeline_stage) { configuration['pipeline'][0] }
let(:sast_search_max_depth) { configuration['pipeline'][1] }
- let(:brakeman) { configuration['analyzers'][0] }
- let(:bandit) { configuration['analyzers'][1] }
+ let(:bandit) { configuration['analyzers'][0] }
+ let(:brakeman) { configuration['analyzers'][1] }
let(:sast_brakeman_level) { brakeman['variables'][0] }
it 'parses the configuration for SAST' do
diff --git a/spec/services/spam/spam_action_service_spec.rb b/spec/services/spam/spam_action_service_spec.rb
index 371923f1518..e8ac826df1c 100644
--- a/spec/services/spam/spam_action_service_spec.rb
+++ b/spec/services/spam/spam_action_service_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe Spam::SpamActionService do
include_context 'includes Spam constants'
+ let(:request) { double(:request, env: env, headers: {}) }
+ let(:issue) { create(:issue, project: project, author: user) }
let(:fake_ip) { '1.2.3.4' }
let(:fake_user_agent) { 'fake-user-agent' }
let(:fake_referrer) { 'fake-http-referrer' }
@@ -14,11 +16,8 @@ RSpec.describe Spam::SpamActionService do
'HTTP_REFERRER' => fake_referrer }
end
- let(:request) { double(:request, env: env) }
-
let_it_be(:project) { create(:project, :public) }
let_it_be(:user) { create(:user) }
- let(:issue) { create(:issue, project: project, author: user) }
before do
issue.spam = false
@@ -48,7 +47,7 @@ RSpec.describe Spam::SpamActionService do
shared_examples 'creates a spam log' do
it do
- expect { subject }.to change { SpamLog.count }.by(1)
+ expect { subject }.to change(SpamLog, :count).by(1)
new_spam_log = SpamLog.last
expect(new_spam_log.user_id).to eq(user.id)
@@ -62,7 +61,7 @@ RSpec.describe Spam::SpamActionService do
end
describe '#execute' do
- let(:request) { double(:request, env: env) }
+ let(:request) { double(:request, env: env, headers: nil) }
let(:fake_captcha_verification_service) { double(:captcha_verification_service) }
let(:fake_verdict_service) { double(:spam_verdict_service) }
let(:allowlisted) { false }
@@ -70,7 +69,7 @@ RSpec.describe Spam::SpamActionService do
let(:captcha_response) { 'abc123' }
let(:spam_log_id) { existing_spam_log.id }
let(:spam_params) do
- Spam::SpamActionService.filter_spam_params!(
+ ::Spam::SpamParams.new(
api: api,
captcha_response: captcha_response,
spam_log_id: spam_log_id
@@ -111,10 +110,30 @@ RSpec.describe Spam::SpamActionService do
allow(Spam::SpamVerdictService).to receive(:new).with(verdict_service_args).and_return(fake_verdict_service)
end
+ context 'when the captcha params are passed in the headers' do
+ let(:request) { double(:request, env: env, headers: headers) }
+ let(:spam_params) { Spam::SpamActionService.filter_spam_params!({ api: api }, request) }
+ let(:headers) do
+ {
+ 'X-GitLab-Captcha-Response' => captcha_response,
+ 'X-GitLab-Spam-Log-Id' => spam_log_id
+ }
+ end
+
+ it 'extracts the headers correctly' do
+ expect(fake_captcha_verification_service)
+ .to receive(:execute).with(captcha_response: captcha_response, request: request).and_return(true)
+ expect(SpamLog)
+ .to receive(:verify_recaptcha!).with(user_id: user.id, id: spam_log_id)
+
+ subject
+ end
+ end
+
context 'when captcha response verification returns true' do
before do
- expect(fake_captcha_verification_service)
- .to receive(:execute).with(captcha_response: captcha_response, request: request) { true }
+ allow(fake_captcha_verification_service)
+ .to receive(:execute).with(captcha_response: captcha_response, request: request).and_return(true)
end
it "doesn't check with the SpamVerdictService" do
@@ -136,8 +155,8 @@ RSpec.describe Spam::SpamActionService do
context 'when captcha response verification returns false' do
before do
- expect(fake_captcha_verification_service)
- .to receive(:execute).with(captcha_response: captcha_response, request: request) { false }
+ allow(fake_captcha_verification_service)
+ .to receive(:execute).with(captcha_response: captcha_response, request: request).and_return(false)
end
context 'when spammable attributes have not changed' do
@@ -146,21 +165,20 @@ RSpec.describe Spam::SpamActionService do
end
it 'does not create a spam log' do
- expect { subject }
- .not_to change { SpamLog.count }
+ expect { subject }.not_to change(SpamLog, :count)
end
end
context 'when spammable attributes have changed' do
let(:expected_service_check_response_message) do
- /check Issue spammable model for any errors or captcha requirement/
+ /Check Issue spammable model for any errors or CAPTCHA requirement/
end
before do
- issue.description = 'SPAM!'
+ issue.description = 'Lovely Spam! Wonderful Spam!'
end
- context 'if allowlisted' do
+ context 'when allowlisted' do
let(:allowlisted) { true }
it 'does not perform spam check' do
@@ -229,7 +247,7 @@ RSpec.describe Spam::SpamActionService do
response = subject
expect(response.message).to match(expected_service_check_response_message)
- expect(issue.needs_recaptcha?).to be_truthy
+ expect(issue).to be_needs_recaptcha
end
end
@@ -253,8 +271,7 @@ RSpec.describe Spam::SpamActionService do
end
it 'does not create a spam log' do
- expect { subject }
- .not_to change { SpamLog.count }
+ expect { subject }.not_to change(SpamLog, :count)
end
it 'clears spam flags' do
@@ -264,9 +281,9 @@ RSpec.describe Spam::SpamActionService do
end
end
- context 'spam verdict service options' do
+ context 'with spam verdict service options' do
before do
- allow(fake_verdict_service).to receive(:execute) { ALLOW }
+ allow(fake_verdict_service).to receive(:execute).and_return(ALLOW)
end
context 'when the request is nil' do
diff --git a/spec/services/submit_usage_ping_service_spec.rb b/spec/services/submit_usage_ping_service_spec.rb
index 24afa83ef2c..53cc33afcff 100644
--- a/spec/services/submit_usage_ping_service_spec.rb
+++ b/spec/services/submit_usage_ping_service_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe SubmitUsagePingService do
include StubRequests
include UsageDataHelpers
+ let(:usage_data_id) { 31643 }
let(:score_params) do
{
score: {
@@ -40,6 +41,8 @@ RSpec.describe SubmitUsagePingService do
leader_service_desk_issues: 15.8,
instance_service_desk_issues: 15.1,
+ usage_data_id: usage_data_id,
+
non_existing_column: 'value'
}
}
@@ -47,7 +50,6 @@ RSpec.describe SubmitUsagePingService do
let(:with_dev_ops_score_params) { { dev_ops_score: score_params[:score] } }
let(:with_conv_index_params) { { conv_index: score_params[:score] } }
- let(:without_dev_ops_score_params) { { dev_ops_score: {} } }
shared_examples 'does not run' do
it do
@@ -103,7 +105,7 @@ RSpec.describe SubmitUsagePingService do
end
it 'sends a POST request' do
- response = stub_response(body: without_dev_ops_score_params)
+ response = stub_response(body: with_dev_ops_score_params)
subject.execute
@@ -111,7 +113,7 @@ RSpec.describe SubmitUsagePingService do
end
it 'forces a refresh of usage data statistics before submitting' do
- stub_response(body: without_dev_ops_score_params)
+ stub_response(body: with_dev_ops_score_params)
expect(Gitlab::UsageData).to receive(:data).with(force_refresh: true).and_call_original
@@ -124,6 +126,33 @@ RSpec.describe SubmitUsagePingService do
end
it_behaves_like 'saves DevOps report data from the response'
+
+ it 'saves usage_data_id to version_usage_data_id_value' do
+ recorded_at = Time.current
+ usage_data = { uuid: 'uuid', recorded_at: recorded_at }
+
+ expect(Gitlab::UsageData).to receive(:data).with(force_refresh: true).and_return(usage_data)
+
+ subject.execute
+
+ raw_usage_data = RawUsageData.find_by(recorded_at: recorded_at)
+
+ expect(raw_usage_data.version_usage_data_id_value).to eq(31643)
+ end
+ end
+
+ context 'when version app usage_data_id is invalid' do
+ let(:usage_data_id) { -1000 }
+
+ before do
+ stub_response(body: with_conv_index_params)
+ end
+
+ it 'raises an exception' do
+ expect { subject.execute }.to raise_error(described_class::SubmissionError) do |error|
+ expect(error.message).to include('Invalid usage_data_id in response: -1000')
+ end
+ end
end
context 'when DevOps report data is passed' do
diff --git a/spec/services/submodules/update_service_spec.rb b/spec/services/submodules/update_service_spec.rb
index e7f92d5ba28..1a53da7b9fe 100644
--- a/spec/services/submodules/update_service_spec.rb
+++ b/spec/services/submodules/update_service_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe Submodules::UpdateService do
let(:submodule) { '../six' }
it_behaves_like 'returns error result' do
- let(:error_message) { 'Invalid parameters' }
+ let(:error_message) { 'Invalid submodule path' }
end
end
end
diff --git a/spec/services/system_hooks_service_spec.rb b/spec/services/system_hooks_service_spec.rb
index 446325e5f71..d8435c72896 100644
--- a/spec/services/system_hooks_service_spec.rb
+++ b/spec/services/system_hooks_service_spec.rb
@@ -113,37 +113,9 @@ RSpec.describe SystemHooksService do
expect(data[:old_path]).to eq('old-path')
end
end
-
- context 'user_rename' do
- it 'contains old and new username' do
- allow(user).to receive(:username_before_last_save).and_return('old-username')
-
- data = event_data(user, :rename)
-
- expect(data).to include(:event_name, :name, :created_at, :updated_at, :email, :user_id, :username, :old_username)
- expect(data[:username]).to eq(user.username)
- expect(data[:old_username]).to eq(user.username_before_last_save)
- end
- end
-
- context 'user_failed_login' do
- it 'contains state of user' do
- user.ldap_block!
-
- data = event_data(user, :failed_login)
-
- expect(data).to include(:event_name, :name, :created_at, :updated_at, :email, :user_id, :username, :state)
- expect(data[:username]).to eq(user.username)
- expect(data[:state]).to eq('ldap_blocked')
- end
- end
end
context 'event names' do
- it { expect(event_name(user, :create)).to eq "user_create" }
- it { expect(event_name(user, :destroy)).to eq "user_destroy" }
- it { expect(event_name(user, :rename)).to eq 'user_rename' }
- it { expect(event_name(user, :failed_login)).to eq 'user_failed_login' }
it { expect(event_name(project, :create)).to eq "project_create" }
it { expect(event_name(project, :destroy)).to eq "project_destroy" }
it { expect(event_name(project, :rename)).to eq "project_rename" }
diff --git a/spec/services/system_notes/alert_management_service_spec.rb b/spec/services/system_notes/alert_management_service_spec.rb
index fc71799d8c5..1c36a4036cc 100644
--- a/spec/services/system_notes/alert_management_service_spec.rb
+++ b/spec/services/system_notes/alert_management_service_spec.rb
@@ -69,7 +69,7 @@ RSpec.describe ::SystemNotes::AlertManagementService do
end
it 'has the appropriate message' do
- expect(subject.note).to eq('logged a resolving alert from **Some Service**')
+ expect(subject.note).to eq('logged a recovery alert from **Some Service**')
end
end
end
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index 743dc080b06..59f936509df 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe TodoService do
+ include AfterNextHelpers
+
let_it_be(:project) { create(:project, :repository) }
let_it_be(:author) { create(:user) }
let_it_be(:assignee) { create(:user) }
@@ -343,19 +345,19 @@ RSpec.describe TodoService do
describe '#destroy_target' do
it 'refreshes the todos count cache for users with todos on the target' do
- create(:todo, target: issue, user: john_doe, author: john_doe, project: issue.project)
+ create(:todo, state: :pending, target: issue, user: john_doe, author: john_doe, project: issue.project)
- expect_any_instance_of(User).to receive(:update_todos_count_cache).and_call_original
+ expect_next(Users::UpdateTodoCountCacheService, [john_doe]).to receive(:execute)
- service.destroy_target(issue) { }
+ service.destroy_target(issue) { issue.destroy! }
end
it 'does not refresh the todos count cache for users with only done todos on the target' do
create(:todo, :done, target: issue, user: john_doe, author: john_doe, project: issue.project)
- expect_any_instance_of(User).not_to receive(:update_todos_count_cache)
+ expect(Users::UpdateTodoCountCacheService).not_to receive(:new)
- service.destroy_target(issue) { }
+ service.destroy_target(issue) { issue.destroy! }
end
it 'yields the target to the caller' do
@@ -1007,7 +1009,8 @@ RSpec.describe TodoService do
end
describe '#update_note' do
- let(:noteable) { create(:issue, project: project) }
+ let_it_be(:noteable) { create(:issue, project: project) }
+
let(:note) { create(:note, project: project, note: mentions, noteable: noteable) }
let(:addressed_note) { create(:note, project: project, note: "#{directly_addressed}", noteable: noteable) }
@@ -1044,12 +1047,34 @@ RSpec.describe TodoService do
should_not_create_todo(user: skipped, target: noteable, action: Todo::DIRECTLY_ADDRESSED)
end
- it 'does not create a todo if user was already mentioned and todo is pending' do
- stub_feature_flags(multiple_todos: false)
+ context 'users already have pending todos and the multiple_todos feature is off' do
+ before do
+ stub_feature_flags(multiple_todos: false)
+ end
+
+ let_it_be(:pending_todo_for_member) { create(:todo, :mentioned, user: member, project: project, target: noteable) }
+ let_it_be(:pending_todo_for_guest) { create(:todo, :mentioned, user: guest, project: project, target: noteable) }
+ let_it_be(:pending_todo_for_admin) { create(:todo, :mentioned, user: admin, project: project, target: noteable) }
+ let_it_be(:note_mentioning_1_user) do
+ create(:note, project: project, note: "FYI #{member.to_reference}", noteable: noteable)
+ end
- create(:todo, :mentioned, user: member, project: project, target: noteable, author: author)
+ let_it_be(:note_mentioning_3_users) do
+ create(:note, project: project, note: 'FYI: ' + [member, guest, admin].map(&:to_reference).join(' '), noteable: noteable)
+ end
+
+ it 'does not create a todo if user was already mentioned and todo is pending' do
+ expect { service.update_note(note_mentioning_1_user, author, skip_users) }.not_to change(member.todos, :count)
+ end
- expect { service.update_note(note, author, skip_users) }.not_to change(member.todos, :count)
+ it 'does not create N+1 queries for pending todos' do
+ # Excluding queries for user permissions because those do execute N+1 queries
+ allow_any_instance_of(User).to receive(:can?).and_return(true)
+
+ control_count = ActiveRecord::QueryRecorder.new { service.update_note(note_mentioning_1_user, author, skip_users) }.count
+
+ expect { service.update_note(note_mentioning_3_users, author, skip_users) }.not_to exceed_query_limit(control_count)
+ end
end
it 'does not create a todo if user was already mentioned and todo is done' do
@@ -1076,13 +1101,9 @@ RSpec.describe TodoService do
it 'updates cached counts when a todo is created' do
issue = create(:issue, project: project, assignees: [john_doe], author: author)
- expect(john_doe.todos_pending_count).to eq(0)
- expect(john_doe).to receive(:update_todos_count_cache).and_call_original
+ expect_next(Users::UpdateTodoCountCacheService, [john_doe]).to receive(:execute)
service.new_issue(issue, author)
-
- expect(Todo.where(user_id: john_doe.id, state: :pending).count).to eq 1
- expect(john_doe.todos_pending_count).to eq(1)
end
shared_examples 'updating todos state' do |state, new_state, new_resolved_by = nil|
diff --git a/spec/services/todos/destroy/destroyed_issuable_service_spec.rb b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
new file mode 100644
index 00000000000..24f74bae7c8
--- /dev/null
+++ b/spec/services/todos/destroy/destroyed_issuable_service_spec.rb
@@ -0,0 +1,34 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Todos::Destroy::DestroyedIssuableService do
+ describe '#execute' do
+ let_it_be(:target) { create(:merge_request) }
+ let_it_be(:pending_todo) { create(:todo, :pending, project: target.project, target: target, user: create(:user)) }
+ let_it_be(:done_todo) { create(:todo, :done, project: target.project, target: target, user: create(:user)) }
+
+ def execute
+ described_class.new(target.id, target.class.name).execute
+ end
+
+ it 'deletes todos for specified target ID and type' do
+ control_count = ActiveRecord::QueryRecorder.new { execute }.count
+
+ # Create more todos for the target
+ create(:todo, :pending, project: target.project, target: target, user: create(:user))
+ create(:todo, :pending, project: target.project, target: target, user: create(:user))
+ create(:todo, :done, project: target.project, target: target, user: create(:user))
+ create(:todo, :done, project: target.project, target: target, user: create(:user))
+
+ expect { execute }.not_to exceed_query_limit(control_count)
+ expect(target.reload.todos.count).to eq(0)
+ end
+
+ it 'invalidates todos cache counts of todo users', :use_clean_rails_redis_caching do
+ expect { execute }
+ .to change { pending_todo.user.todos_pending_count }.from(1).to(0)
+ .and change { done_todo.user.todos_done_count }.from(1).to(0)
+ end
+ end
+end
diff --git a/spec/services/todos/destroy/entity_leave_service_spec.rb b/spec/services/todos/destroy/entity_leave_service_spec.rb
index 4126eb88b0b..03fa2482bbf 100644
--- a/spec/services/todos/destroy/entity_leave_service_spec.rb
+++ b/spec/services/todos/destroy/entity_leave_service_spec.rb
@@ -224,6 +224,8 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
context 'with nested groups' do
+ let(:parent_group) { create(:group, :public) }
+ let(:parent_subgroup) { create(:group)}
let(:subgroup) { create(:group, :private, parent: group) }
let(:subgroup2) { create(:group, :private, parent: group) }
let(:subproject) { create(:project, group: subgroup) }
@@ -235,12 +237,17 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
let!(:todo_subgroup2_user) { create(:todo, user: user, group: subgroup2) }
let!(:todo_subproject_user2) { create(:todo, user: user2, project: subproject) }
let!(:todo_subpgroup_user2) { create(:todo, user: user2, group: subgroup) }
+ let!(:todo_parent_group_user) { create(:todo, user: user, group: parent_group) }
+
+ before do
+ group.update!(parent: parent_group)
+ end
context 'when the user is not a member of any groups/projects' do
it 'removes todos for the user including subprojects todos' do
- expect { subject }.to change { Todo.count }.from(12).to(4)
+ expect { subject }.to change { Todo.count }.from(13).to(5)
- expect(user.todos).to be_empty
+ expect(user.todos).to eq([todo_parent_group_user])
expect(user2.todos)
.to match_array(
[todo_issue_c_user2, todo_group_user2, todo_subproject_user2, todo_subpgroup_user2]
@@ -250,8 +257,6 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
context 'when the user is member of a parent group' do
before do
- parent_group = create(:group)
- group.update!(parent: parent_group)
parent_group.add_developer(user)
end
@@ -264,9 +269,12 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
it 'does not remove group and subproject todos' do
- expect { subject }.to change { Todo.count }.from(12).to(7)
+ expect { subject }.to change { Todo.count }.from(13).to(8)
- expect(user.todos).to match_array([todo_group_user, todo_subgroup_user, todo_subproject_user])
+ expect(user.todos)
+ .to match_array(
+ [todo_group_user, todo_subgroup_user, todo_subproject_user, todo_parent_group_user]
+ )
expect(user2.todos)
.to match_array(
[todo_issue_c_user2, todo_group_user2, todo_subproject_user2, todo_subpgroup_user2]
@@ -280,9 +288,12 @@ RSpec.describe Todos::Destroy::EntityLeaveService do
end
it 'does not remove subproject and group todos' do
- expect { subject }.to change { Todo.count }.from(12).to(7)
+ expect { subject }.to change { Todo.count }.from(13).to(8)
- expect(user.todos).to match_array([todo_subgroup_user, todo_group_user, todo_subproject_user])
+ expect(user.todos)
+ .to match_array(
+ [todo_subgroup_user, todo_group_user, todo_subproject_user, todo_parent_group_user]
+ )
expect(user2.todos)
.to match_array(
[todo_issue_c_user2, todo_group_user2, todo_subproject_user2, todo_subpgroup_user2]
diff --git a/spec/services/upload_service_spec.rb b/spec/services/upload_service_spec.rb
index 89a28e6a098..48aa65451f3 100644
--- a/spec/services/upload_service_spec.rb
+++ b/spec/services/upload_service_spec.rb
@@ -67,6 +67,29 @@ RSpec.describe UploadService do
it { expect(@link_to_file).to eq({}) }
end
+
+ describe '#override_max_attachment_size' do
+ let(:txt) { fixture_file_upload('spec/fixtures/doc_sample.txt', 'text/plain') }
+ let(:service) { described_class.new(@project, txt, FileUploader) }
+
+ subject { service.execute.to_h }
+
+ before do
+ allow(txt).to receive(:size) { 100.megabytes.to_i }
+ end
+
+ it 'allows the upload' do
+ service.override_max_attachment_size = 101.megabytes
+
+ expect(subject.keys).to eq(%i(alt url markdown))
+ end
+
+ it 'disallows the upload' do
+ service.override_max_attachment_size = 99.megabytes
+
+ expect(subject).to eq({})
+ end
+ end
end
def upload_file(project, file)
diff --git a/spec/services/user_preferences/update_service_spec.rb b/spec/services/user_preferences/update_service_spec.rb
new file mode 100644
index 00000000000..59089a4a7af
--- /dev/null
+++ b/spec/services/user_preferences/update_service_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UserPreferences::UpdateService do
+ let(:user) { create(:user) }
+ let(:params) { { view_diffs_file_by_file: false } }
+
+ describe '#execute' do
+ subject(:service) { described_class.new(user, params) }
+
+ context 'successfully updating the record' do
+ it 'updates the preference and returns a success' do
+ result = service.execute
+
+ expect(result.status).to eq(:success)
+ expect(result.payload[:preferences].view_diffs_file_by_file).to eq(params[:view_diffs_file_by_file])
+ end
+ end
+
+ context 'unsuccessfully updating the record' do
+ before do
+ allow(user.user_preference).to receive(:update).and_return(false)
+ end
+
+ it 'returns an error' do
+ result = service.execute
+
+ expect(result.status).to eq(:error)
+ end
+ end
+ end
+end
diff --git a/spec/services/users/refresh_authorized_projects_service_spec.rb b/spec/services/users/refresh_authorized_projects_service_spec.rb
index 1e74ff3d9eb..a8ad0d02f60 100644
--- a/spec/services/users/refresh_authorized_projects_service_spec.rb
+++ b/spec/services/users/refresh_authorized_projects_service_spec.rb
@@ -163,168 +163,4 @@ RSpec.describe Users::RefreshAuthorizedProjectsService do
service.update_authorizations([], [[user.id, project.id, Gitlab::Access::MAINTAINER]])
end
end
-
- describe '#fresh_access_levels_per_project' do
- let(:hash) { service.fresh_access_levels_per_project }
-
- it 'returns a Hash' do
- expect(hash).to be_an_instance_of(Hash)
- end
-
- it 'sets the keys to the project IDs' do
- expect(hash.keys).to eq([project.id])
- end
-
- it 'sets the values to the access levels' do
- expect(hash.values).to eq([Gitlab::Access::MAINTAINER])
- end
-
- context 'personal projects' do
- it 'includes the project with the right access level' do
- expect(hash[project.id]).to eq(Gitlab::Access::MAINTAINER)
- end
- end
-
- context 'projects the user is a member of' do
- let!(:other_project) { create(:project) }
-
- before do
- other_project.team.add_reporter(user)
- end
-
- it 'includes the project with the right access level' do
- expect(hash[other_project.id]).to eq(Gitlab::Access::REPORTER)
- end
- end
-
- context 'projects of groups the user is a member of' do
- let(:group) { create(:group) }
- let!(:other_project) { create(:project, group: group) }
-
- before do
- group.add_owner(user)
- end
-
- it 'includes the project with the right access level' do
- expect(hash[other_project.id]).to eq(Gitlab::Access::OWNER)
- end
- end
-
- context 'projects of subgroups of groups the user is a member of' do
- let(:group) { create(:group) }
- let(:nested_group) { create(:group, parent: group) }
- let!(:other_project) { create(:project, group: nested_group) }
-
- before do
- group.add_maintainer(user)
- end
-
- it 'includes the project with the right access level' do
- expect(hash[other_project.id]).to eq(Gitlab::Access::MAINTAINER)
- end
- end
-
- context 'projects shared with groups the user is a member of' do
- let(:group) { create(:group) }
- let(:other_project) { create(:project) }
- let!(:project_group_link) { create(:project_group_link, project: other_project, group: group, group_access: Gitlab::Access::GUEST) }
-
- before do
- group.add_maintainer(user)
- end
-
- it 'includes the project with the right access level' do
- expect(hash[other_project.id]).to eq(Gitlab::Access::GUEST)
- end
- end
-
- context 'projects shared with subgroups of groups the user is a member of' do
- let(:group) { create(:group) }
- let(:nested_group) { create(:group, parent: group) }
- let(:other_project) { create(:project) }
- let!(:project_group_link) { create(:project_group_link, project: other_project, group: nested_group, group_access: Gitlab::Access::DEVELOPER) }
-
- before do
- group.add_maintainer(user)
- end
-
- it 'includes the project with the right access level' do
- expect(hash[other_project.id]).to eq(Gitlab::Access::DEVELOPER)
- end
- end
- end
-
- describe '#current_authorizations_per_project' do
- let(:hash) { service.current_authorizations_per_project }
-
- it 'returns a Hash' do
- expect(hash).to be_an_instance_of(Hash)
- end
-
- it 'sets the keys to the project IDs' do
- expect(hash.keys).to eq([project.id])
- end
-
- it 'sets the values to the project authorization rows' do
- expect(hash.values.length).to eq(1)
-
- value = hash.values[0]
-
- expect(value.project_id).to eq(project.id)
- expect(value.access_level).to eq(Gitlab::Access::MAINTAINER)
- end
- end
-
- describe '#current_authorizations' do
- context 'without authorizations' do
- it 'returns an empty list' do
- user.project_authorizations.delete_all
-
- expect(service.current_authorizations.empty?).to eq(true)
- end
- end
-
- context 'with an authorization' do
- let(:row) { service.current_authorizations.take }
-
- it 'returns the currently authorized projects' do
- expect(service.current_authorizations.length).to eq(1)
- end
-
- it 'includes the project ID for every row' do
- expect(row.project_id).to eq(project.id)
- end
-
- it 'includes the access level for every row' do
- expect(row.access_level).to eq(Gitlab::Access::MAINTAINER)
- end
- end
- end
-
- describe '#fresh_authorizations' do
- it 'returns the new authorized projects' do
- expect(service.fresh_authorizations.length).to eq(1)
- end
-
- it 'returns the highest access level' do
- project.team.add_guest(user)
-
- rows = service.fresh_authorizations.to_a
-
- expect(rows.length).to eq(1)
- expect(rows.first.access_level).to eq(Gitlab::Access::MAINTAINER)
- end
-
- context 'every returned row' do
- let(:row) { service.fresh_authorizations.take }
-
- it 'includes the project ID' do
- expect(row.project_id).to eq(project.id)
- end
-
- it 'includes the access level' do
- expect(row.access_level).to eq(Gitlab::Access::MAINTAINER)
- end
- end
- end
end
diff --git a/spec/services/users/update_todo_count_cache_service_spec.rb b/spec/services/users/update_todo_count_cache_service_spec.rb
new file mode 100644
index 00000000000..3e3618b1291
--- /dev/null
+++ b/spec/services/users/update_todo_count_cache_service_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UpdateTodoCountCacheService do
+ describe '#execute' do
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+
+ let_it_be(:todo1) { create(:todo, user: user1, state: :done) }
+ let_it_be(:todo2) { create(:todo, user: user1, state: :done) }
+ let_it_be(:todo3) { create(:todo, user: user1, state: :pending) }
+ let_it_be(:todo4) { create(:todo, user: user2, state: :done) }
+ let_it_be(:todo5) { create(:todo, user: user2, state: :pending) }
+ let_it_be(:todo6) { create(:todo, user: user2, state: :pending) }
+
+ it 'updates the todos_counts for users', :use_clean_rails_memory_store_caching do
+ Rails.cache.write(['users', user1.id, 'todos_done_count'], 0)
+ Rails.cache.write(['users', user1.id, 'todos_pending_count'], 0)
+ Rails.cache.write(['users', user2.id, 'todos_done_count'], 0)
+ Rails.cache.write(['users', user2.id, 'todos_pending_count'], 0)
+
+ expect { described_class.new([user1, user2]).execute }
+ .to change(user1, :todos_done_count).from(0).to(2)
+ .and change(user1, :todos_pending_count).from(0).to(1)
+ .and change(user2, :todos_done_count).from(0).to(1)
+ .and change(user2, :todos_pending_count).from(0).to(2)
+
+ Todo.delete_all
+
+ expect { described_class.new([user1, user2]).execute }
+ .to change(user1, :todos_done_count).from(2).to(0)
+ .and change(user1, :todos_pending_count).from(1).to(0)
+ .and change(user2, :todos_done_count).from(1).to(0)
+ .and change(user2, :todos_pending_count).from(2).to(0)
+ end
+
+ it 'avoids N+1 queries' do
+ control_count = ActiveRecord::QueryRecorder.new { described_class.new([user1]).execute }.count
+
+ expect { described_class.new([user1, user2]).execute }.not_to exceed_query_limit(control_count)
+ end
+
+ it 'executes one query per batch of users' do
+ stub_const("#{described_class}::QUERY_BATCH_SIZE", 1)
+
+ expect(ActiveRecord::QueryRecorder.new { described_class.new([user1]).execute }.count).to eq(1)
+ expect(ActiveRecord::QueryRecorder.new { described_class.new([user1, user2]).execute }.count).to eq(2)
+ end
+
+ it 'sets the cache expire time to the users count_cache_validity_period' do
+ allow(user1).to receive(:count_cache_validity_period).and_return(1.minute)
+ allow(user2).to receive(:count_cache_validity_period).and_return(1.hour)
+
+ expect(Rails.cache).to receive(:write).with(['users', user1.id, anything], anything, expires_in: 1.minute).twice
+ expect(Rails.cache).to receive(:write).with(['users', user2.id, anything], anything, expires_in: 1.hour).twice
+
+ described_class.new([user1, user2]).execute
+ end
+ end
+end
diff --git a/spec/spam/concerns/has_spam_action_response_fields_spec.rb b/spec/spam/concerns/has_spam_action_response_fields_spec.rb
index 4d5f8d9d431..9752f6a0b69 100644
--- a/spec/spam/concerns/has_spam_action_response_fields_spec.rb
+++ b/spec/spam/concerns/has_spam_action_response_fields_spec.rb
@@ -19,16 +19,12 @@ RSpec.describe Spam::Concerns::HasSpamActionResponseFields do
end
it 'merges in spam action fields from spammable' do
- result = subject.send(:with_spam_action_response_fields, spammable) do
- { other_field: true }
- end
- expect(result)
+ expect(subject.spam_action_response_fields(spammable))
.to eq({
spam: true,
needs_captcha_response: true,
spam_log_id: 1,
- captcha_site_key: recaptcha_site_key,
- other_field: true
+ captcha_site_key: recaptcha_site_key
})
end
end
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 60a8fb8cb9f..4179e6f7e91 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -15,6 +15,9 @@ Warning[:deprecated] = true unless ENV.key?('SILENCE_DEPRECATIONS')
require './spec/deprecation_toolkit_env'
DeprecationToolkitEnv.configure!
+require './spec/knapsack_env'
+KnapsackEnv.configure!
+
require './spec/simplecov_env'
SimpleCovEnv.start!
@@ -25,7 +28,7 @@ ENV["RAILS_ENV"] = 'test'
ENV["IN_MEMORY_APPLICATION_SETTINGS"] = 'true'
ENV["RSPEC_ALLOW_INVALID_URLS"] = 'true'
-require File.expand_path('../config/environment', __dir__)
+require_relative '../config/environment'
require 'rspec/mocks'
require 'rspec/rails'
@@ -47,16 +50,12 @@ if rspec_profiling_is_configured && (!ENV.key?('CI') || branch_can_be_profiled)
require 'rspec_profiling/rspec'
end
-if ENV['CI'] && ENV['KNAPSACK_GENERATE_REPORT'] && !ENV['NO_KNAPSACK']
- require 'knapsack'
- Knapsack::Adapters::RSpecAdapter.bind
-end
-
# require rainbow gem String monkeypatch, so we can test SystemChecks
require 'rainbow/ext/string'
Rainbow.enabled = false
require_relative('../ee/spec/spec_helper') if Gitlab.ee?
+require_relative('../jh/spec/spec_helper') if Gitlab.jh?
# Requires supporting ruby files with custom matchers and macros, etc,
# in spec/support/ and its subdirectories.
@@ -72,6 +71,8 @@ Dir[Rails.root.join("spec/support/shared_contexts/*.rb")].sort.each { |f| requir
Dir[Rails.root.join("spec/support/shared_examples/*.rb")].sort.each { |f| require f }
Dir[Rails.root.join("spec/support/**/*.rb")].sort.each { |f| require f }
+require_relative '../tooling/quality/test_level'
+
quality_level = Quality::TestLevel.new
RSpec.configure do |config|
@@ -79,7 +80,7 @@ RSpec.configure do |config|
config.run_all_when_everything_filtered = true
config.use_transactional_fixtures = true
- config.use_instantiated_fixtures = false
+ config.use_instantiated_fixtures = false
config.fixture_path = Rails.root
config.verbose_retry = true
@@ -92,6 +93,25 @@ RSpec.configure do |config|
config.full_backtrace = true
end
+ # Attempt to troubleshoot https://gitlab.com/gitlab-org/gitlab/-/issues/297359
+ if ENV['CI']
+ config.after do |example|
+ if example.exception.is_a?(GRPC::Unavailable)
+ warn "=== gRPC unavailable detected, process list:"
+ processes = `ps -ef | grep toml`
+ warn processes
+ warn "=== free memory"
+ warn `free -m`
+ warn "=== uptime"
+ warn `uptime`
+ warn "=== Prometheus metrics:"
+ warn `curl -s -o log/gitaly-metrics.log http://localhost:9236/metrics`
+ warn "=== Taking goroutine dump in log/goroutines.log..."
+ warn `curl -s -o log/goroutines.log http://localhost:9236/debug/pprof/goroutine?debug=2`
+ end
+ end
+ end
+
unless ENV['CI']
# Re-run failures locally with `--only-failures`
config.example_status_persistence_file_path = './spec/examples.txt'
@@ -246,16 +266,15 @@ RSpec.configure do |config|
stub_feature_flags(unified_diff_components: false)
- # Disable this feature flag as we iterate and
- # refactor filtered search to use gitlab ui
- # components to meet feature parody. More details found
- # https://gitlab.com/groups/gitlab-org/-/epics/5501
- stub_feature_flags(boards_filtered_search: false)
-
# The following `vue_issues_list` stub can be removed once the
# Vue issues page has feature parity with the current Haml page
stub_feature_flags(vue_issues_list: false)
+ # Disable `refactor_blob_viewer` as we refactor
+ # the blob viewer. See the follwing epic for more:
+ # https://gitlab.com/groups/gitlab-org/-/epics/5531
+ stub_feature_flags(refactor_blob_viewer: false)
+
allow(Gitlab::GitalyClient).to receive(:can_use_disk?).and_return(enable_rugged)
else
unstub_all_feature_flags
@@ -278,7 +297,7 @@ RSpec.configure do |config|
Sidekiq::Worker.clear_all
# Administrators have to re-authenticate in order to access administrative
- # functionality when feature flag :user_mode_in_session is active. Any spec
+ # functionality when application setting admin_mode is active. Any spec
# that requires administrative access can use the tag :enable_admin_mode
# to avoid the second auth step (provided the user is already an admin):
#
@@ -295,6 +314,9 @@ RSpec.configure do |config|
end
end
+ # Make sure specs test by default admin mode setting on, unless forced to the opposite
+ stub_application_setting(admin_mode: true) unless example.metadata[:do_not_mock_admin_mode_setting]
+
allow(Gitlab::CurrentSettings).to receive(:current_application_settings?).and_return(false)
end
@@ -317,7 +339,7 @@ RSpec.configure do |config|
config.around do |example|
# Wrap each example in it's own context to make sure the contexts don't
# leak
- Labkit::Context.with_context { example.run }
+ Gitlab::ApplicationContext.with_raw_context { example.run }
end
config.around do |example|
@@ -340,6 +362,9 @@ RSpec.configure do |config|
# Reset all feature flag stubs to default for testing
stub_all_feature_flags
+
+ # Re-enable query limiting in case it was disabled
+ Gitlab::QueryLimiting.enable!
end
config.before(:example, :mailer) do
diff --git a/spec/support/helpers/board_helpers.rb b/spec/support/helpers/board_helpers.rb
index 683ee3e4bf2..6e145fed733 100644
--- a/spec/support/helpers/board_helpers.rb
+++ b/spec/support/helpers/board_helpers.rb
@@ -5,14 +5,5 @@ module BoardHelpers
within card do
first('.board-card-number').click
end
-
- wait_for_sidebar
- end
-
- def wait_for_sidebar
- # loop until the CSS transition is complete
- Timeout.timeout(0.5) do
- loop until evaluate_script('$(".right-sidebar").outerWidth()') == 290
- end
end
end
diff --git a/spec/support/helpers/ci_artifact_metadata_generator.rb b/spec/support/helpers/ci_artifact_metadata_generator.rb
index e02501565a9..ae821d6582b 100644
--- a/spec/support/helpers/ci_artifact_metadata_generator.rb
+++ b/spec/support/helpers/ci_artifact_metadata_generator.rb
@@ -7,7 +7,7 @@
class CiArtifactMetadataGenerator
attr_accessor :entries, :output
- ARTIFACT_METADATA = "GitLab Build Artifacts Metadata 0.0.2\n".freeze
+ ARTIFACT_METADATA = "GitLab Build Artifacts Metadata 0.0.2\n"
def initialize(stream)
@entries = {}
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index 14041ad0ac6..9e62eef14de 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -3,15 +3,15 @@
module CycleAnalyticsHelpers
include GitHelpers
- def wait_for_stages_to_load
- expect(page).to have_selector '.js-stage-table'
+ def wait_for_stages_to_load(selector = '.js-path-navigation')
+ expect(page).to have_selector selector
wait_for_requests
end
- def select_group(target_group)
+ def select_group(target_group, ready_selector = '.js-path-navigation')
visit group_analytics_cycle_analytics_path(target_group)
- wait_for_stages_to_load
+ wait_for_stages_to_load(ready_selector)
end
def toggle_dropdown(field)
diff --git a/spec/support/helpers/graphql_helpers.rb b/spec/support/helpers/graphql_helpers.rb
index 75d9508f470..d714f04fbba 100644
--- a/spec/support/helpers/graphql_helpers.rb
+++ b/spec/support/helpers/graphql_helpers.rb
@@ -222,9 +222,12 @@ module GraphqlHelpers
lazy_vals.is_a?(Array) ? lazy_vals.map { |val| sync(val) } : sync(lazy_vals)
end
- def graphql_query_for(name, args = {}, selection = nil)
+ def graphql_query_for(name, args = {}, selection = nil, operation_name = nil)
type = GitlabSchema.types['Query'].fields[GraphqlHelpers.fieldnamerize(name)]&.type
- wrap_query(query_graphql_field(name, args, selection, type))
+ query = wrap_query(query_graphql_field(name, args, selection, type))
+ query = "query #{operation_name}#{query}" if operation_name
+
+ query
end
def wrap_query(query)
@@ -274,11 +277,11 @@ module GraphqlHelpers
# prepare_input_for_mutation({ 'my_key' => 1 })
# => { 'myKey' => 1}
def prepare_input_for_mutation(input)
- input.map do |name, value|
+ input.to_h do |name, value|
value = prepare_input_for_mutation(value) if value.is_a?(Hash)
[GraphqlHelpers.fieldnamerize(name), value]
- end.to_h
+ end
end
def input_variable_name_for_mutation(mutation_name)
@@ -304,7 +307,10 @@ module GraphqlHelpers
def query_graphql_field(name, attributes = {}, fields = nil, type = nil)
type ||= name.to_s.classify
- attributes, fields = [nil, attributes] if fields.nil? && !attributes.is_a?(Hash)
+ if fields.nil? && !attributes.is_a?(Hash)
+ fields = attributes
+ attributes = nil
+ end
field = field_with_params(name, attributes)
diff --git a/spec/support/helpers/javascript_fixtures_helpers.rb b/spec/support/helpers/javascript_fixtures_helpers.rb
index 09425c3742a..28375c1d51e 100644
--- a/spec/support/helpers/javascript_fixtures_helpers.rb
+++ b/spec/support/helpers/javascript_fixtures_helpers.rb
@@ -31,7 +31,7 @@ module JavaScriptFixturesHelpers
#
def clean_frontend_fixtures(directory_name)
full_directory_name = File.expand_path(directory_name, fixture_root_path)
- Dir[File.expand_path('*.html', full_directory_name)].each do |file_name|
+ Dir[File.expand_path('*.{html,json,md}', full_directory_name)].each do |file_name|
FileUtils.rm(file_name)
end
end
diff --git a/spec/support/helpers/jira_service_helper.rb b/spec/support/helpers/jira_service_helper.rb
index 698490c8c92..ce908d53f88 100644
--- a/spec/support/helpers/jira_service_helper.rb
+++ b/spec/support/helpers/jira_service_helper.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module JiraServiceHelper
- JIRA_URL = "http://jira.example.net".freeze
+ JIRA_URL = "http://jira.example.net"
JIRA_API = JIRA_URL + "/rest/api/2"
def jira_service_settings
diff --git a/spec/support/helpers/key_generator_helper.rb b/spec/support/helpers/key_generator_helper.rb
index 59c8eeb3692..58bde80a31f 100644
--- a/spec/support/helpers/key_generator_helper.rb
+++ b/spec/support/helpers/key_generator_helper.rb
@@ -27,7 +27,8 @@ module Spec
# Encodes an openssh-mpi-encoded integer.
def encode_mpi(n) # rubocop:disable Naming/UncommunicativeMethodParamName
- chars, n = [], n.to_i
+ chars = []
+ n = n.to_i
chars << (n & 0xff) && n >>= 8 while n != 0
chars << 0 if chars.empty? || chars.last >= 0x80
chars.reverse.pack('C*')
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index e18a708e41c..826108a63a5 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -29,7 +29,7 @@ module NavbarStructureHelper
)
end
- def insert_container_nav(within)
+ def insert_container_nav
insert_after_sub_nav_item(
_('Package Registry'),
within: _('Packages & Registries'),
@@ -37,11 +37,19 @@ module NavbarStructureHelper
)
end
- def insert_dependency_proxy_nav(within)
+ def insert_dependency_proxy_nav
insert_after_sub_nav_item(
_('Package Registry'),
within: _('Packages & Registries'),
new_sub_nav_item_name: _('Dependency Proxy')
)
end
+
+ def insert_infrastructure_registry_nav
+ insert_after_sub_nav_item(
+ _('Package Registry'),
+ within: _('Packages & Registries'),
+ new_sub_nav_item_name: _('Infrastructure Registry')
+ )
+ end
end
diff --git a/spec/support/helpers/next_instance_of.rb b/spec/support/helpers/next_instance_of.rb
index a8e9ab2bafe..95d8936588c 100644
--- a/spec/support/helpers/next_instance_of.rb
+++ b/spec/support/helpers/next_instance_of.rb
@@ -2,25 +2,26 @@
module NextInstanceOf
def expect_next_instance_of(klass, *new_args, &blk)
- stub_new(expect(klass), nil, *new_args, &blk)
+ stub_new(expect(klass), nil, false, *new_args, &blk)
end
- def expect_next_instances_of(klass, number, *new_args, &blk)
- stub_new(expect(klass), number, *new_args, &blk)
+ def expect_next_instances_of(klass, number, ordered = false, *new_args, &blk)
+ stub_new(expect(klass), number, ordered, *new_args, &blk)
end
def allow_next_instance_of(klass, *new_args, &blk)
- stub_new(allow(klass), nil, *new_args, &blk)
+ stub_new(allow(klass), nil, false, *new_args, &blk)
end
- def allow_next_instances_of(klass, number, *new_args, &blk)
- stub_new(allow(klass), number, *new_args, &blk)
+ def allow_next_instances_of(klass, number, ordered = false, *new_args, &blk)
+ stub_new(allow(klass), number, ordered, *new_args, &blk)
end
private
- def stub_new(target, number, *new_args, &blk)
+ def stub_new(target, number, ordered = false, *new_args, &blk)
receive_new = receive(:new)
+ receive_new.ordered if ordered
receive_new.exactly(number).times if number
receive_new.with(*new_args) if new_args.any?
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index 61634813a1c..2d880c7a8fe 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -3,37 +3,53 @@
module ActiveRecord
class QueryRecorder
attr_reader :log, :skip_cached, :cached, :data
- UNKNOWN = %w(unknown unknown).freeze
- def initialize(skip_cached: true, query_recorder_debug: false, &block)
- @data = Hash.new { |h, k| h[k] = { count: 0, occurrences: [], backtrace: [] } }
+ UNKNOWN = %w[unknown unknown].freeze
+
+ def initialize(skip_cached: true, log_file: nil, query_recorder_debug: false, &block)
+ @data = Hash.new { |h, k| h[k] = { count: 0, occurrences: [], backtrace: [], durations: [] } }
@log = []
@cached = []
@skip_cached = skip_cached
- @query_recorder_debug = query_recorder_debug
+ @query_recorder_debug = ENV['QUERY_RECORDER_DEBUG'] || query_recorder_debug
+ @log_file = log_file
# force replacement of bind parameters to give tests the ability to check for ids
ActiveRecord::Base.connection.unprepared_statement do
ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
end
end
- def show_backtrace(values)
- Rails.logger.debug("QueryRecorder SQL: #{values[:sql]}")
+ def show_backtrace(values, duration)
+ values[:sql].lines.each do |line|
+ print_to_log(:SQL, line)
+ end
+ print_to_log(:DURATION, duration)
Gitlab::BacktraceCleaner.clean_backtrace(caller).each do |line|
- Rails.logger.debug("QueryRecorder backtrace: --> #{line}")
+ print_to_log(:backtrace, line)
+ end
+ end
+
+ def print_to_log(label, line)
+ msg = "QueryRecorder #{label}: --> #{line}"
+
+ if @log_file
+ @log_file.puts(msg)
+ else
+ Rails.logger.debug(msg)
end
end
def get_sql_source(sql)
- matches = sql.match(/,line:(?<line>.*):in\s+`(?<method>.*)'\*\//)
+ matches = sql.match(%r{,line:(?<line>.*):in\s+`(?<method>.*)'\*/})
matches ? [matches[:line], matches[:method]] : UNKNOWN
end
- def store_sql_by_source(values: {}, backtrace: nil)
+ def store_sql_by_source(values: {}, duration: nil, backtrace: nil)
full_name = get_sql_source(values[:sql]).join(':')
@data[full_name][:count] += 1
@data[full_name][:occurrences] << values[:sql]
@data[full_name][:backtrace] << backtrace
+ @data[full_name][:durations] << duration
end
def find_query(query_regexp, limit, first_only: false)
@@ -55,14 +71,14 @@ module ActiveRecord
end
def callback(name, start, finish, message_id, values)
- store_backtrace = ENV['QUERY_RECORDER_DEBUG'] || @query_recorder_debug
- backtrace = store_backtrace ? show_backtrace(values) : nil
+ duration = finish - start
if values[:cached] && skip_cached
@cached << values[:sql]
elsif !values[:name]&.include?("SCHEMA")
+ backtrace = @query_recorder_debug ? show_backtrace(values, duration) : nil
@log << values[:sql]
- store_sql_by_source(values: values, backtrace: backtrace)
+ store_sql_by_source(values: values, duration: duration, backtrace: backtrace)
end
end
diff --git a/spec/support/helpers/reload_helpers.rb b/spec/support/helpers/reload_helpers.rb
new file mode 100644
index 00000000000..60811e4604f
--- /dev/null
+++ b/spec/support/helpers/reload_helpers.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+module ReloadHelpers
+ def reload_models(*models)
+ models.map(&:reload)
+ end
+
+ def subject_and_reload(*models)
+ subject
+ reload_models(*models)
+ end
+end
diff --git a/spec/support/helpers/rubygems_helpers.rb b/spec/support/helpers/rubygems_helpers.rb
new file mode 100644
index 00000000000..6a808f52e97
--- /dev/null
+++ b/spec/support/helpers/rubygems_helpers.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+module RubygemsHelpers
+ def gem_from_file(file)
+ full_path = File.expand_path(
+ Rails.root.join('spec', 'fixtures', 'packages', 'rubygems', file.filename)
+ )
+
+ Gem::Package.new(File.open(full_path))
+ end
+end
diff --git a/spec/support/helpers/seed_repo.rb b/spec/support/helpers/seed_repo.rb
index 20738b45129..74ac529a3de 100644
--- a/spec/support/helpers/seed_repo.rb
+++ b/spec/support/helpers/seed_repo.rb
@@ -31,64 +31,64 @@
module SeedRepo
module BigCommit
- ID = "913c66a37b4a45b9769037c55c2d238bd0942d2e".freeze
- PARENT_ID = "cfe32cf61b73a0d5e9f13e774abde7ff789b1660".freeze
- MESSAGE = "Files, encoding and much more".freeze
- AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze
+ ID = "913c66a37b4a45b9769037c55c2d238bd0942d2e"
+ PARENT_ID = "cfe32cf61b73a0d5e9f13e774abde7ff789b1660"
+ MESSAGE = "Files, encoding and much more"
+ AUTHOR_FULL_NAME = "Dmitriy Zaporozhets"
FILES_COUNT = 2
end
module Commit
- ID = "570e7b2abdd848b95f2f578043fc23bd6f6fd24d".freeze
- PARENT_ID = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9".freeze
- MESSAGE = "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n".freeze
- AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze
+ ID = "570e7b2abdd848b95f2f578043fc23bd6f6fd24d"
+ PARENT_ID = "6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9"
+ MESSAGE = "Change some files\n\nSigned-off-by: Dmitriy Zaporozhets <dmitriy.zaporozhets@gmail.com>\n"
+ AUTHOR_FULL_NAME = "Dmitriy Zaporozhets"
FILES = ["files/ruby/popen.rb", "files/ruby/regex.rb"].freeze
FILES_COUNT = 2
- C_FILE_PATH = "files/ruby".freeze
+ C_FILE_PATH = "files/ruby"
C_FILES = ["popen.rb", "regex.rb", "version_info.rb"].freeze
- BLOB_FILE = %{%h3= @key.title\n%hr\n%pre= @key.key\n.actions\n = link_to 'Remove', @key, :confirm => 'Are you sure?', :method => :delete, :class => \"btn danger delete-key\"\n\n\n}.freeze
- BLOB_FILE_PATH = "app/views/keys/show.html.haml".freeze
+ BLOB_FILE = %{%h3= @key.title\n%hr\n%pre= @key.key\n.actions\n = link_to 'Remove', @key, :confirm => 'Are you sure?', :method => :delete, :class => \"btn danger delete-key\"\n\n\n}
+ BLOB_FILE_PATH = "app/views/keys/show.html.haml"
end
module EmptyCommit
- ID = "b0e52af38d7ea43cf41d8a6f2471351ac036d6c9".freeze
- PARENT_ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d".freeze
- MESSAGE = "Empty commit".freeze
- AUTHOR_FULL_NAME = "Rémy Coutable".freeze
+ ID = "b0e52af38d7ea43cf41d8a6f2471351ac036d6c9"
+ PARENT_ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d"
+ MESSAGE = "Empty commit"
+ AUTHOR_FULL_NAME = "Rémy Coutable"
FILES = [].freeze
FILES_COUNT = FILES.count
end
module EncodingCommit
- ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d".freeze
- PARENT_ID = "66028349a123e695b589e09a36634d976edcc5e8".freeze
- MESSAGE = "Add ISO-8859-encoded file".freeze
- AUTHOR_FULL_NAME = "Stan Hu".freeze
+ ID = "40f4a7a617393735a95a0bb67b08385bc1e7c66d"
+ PARENT_ID = "66028349a123e695b589e09a36634d976edcc5e8"
+ MESSAGE = "Add ISO-8859-encoded file"
+ AUTHOR_FULL_NAME = "Stan Hu"
FILES = ["encoding/iso8859.txt"].freeze
FILES_COUNT = FILES.count
end
module FirstCommit
- ID = "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863".freeze
+ ID = "1a0b36b3cdad1d2ee32457c102a8c0b7056fa863"
PARENT_ID = nil
- MESSAGE = "Initial commit".freeze
- AUTHOR_FULL_NAME = "Dmitriy Zaporozhets".freeze
+ MESSAGE = "Initial commit"
+ AUTHOR_FULL_NAME = "Dmitriy Zaporozhets"
FILES = ["LICENSE", ".gitignore", "README.md"].freeze
FILES_COUNT = 3
end
module LastCommit
- ID = "4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6".freeze
- PARENT_ID = "0e1b353b348f8477bdbec1ef47087171c5032cd9".freeze
- MESSAGE = "Merge branch 'master' into 'master'".freeze
- AUTHOR_FULL_NAME = "Stan Hu".freeze
+ ID = "4b4918a572fa86f9771e5ba40fbd48e1eb03e2c6"
+ PARENT_ID = "0e1b353b348f8477bdbec1ef47087171c5032cd9"
+ MESSAGE = "Merge branch 'master' into 'master'"
+ AUTHOR_FULL_NAME = "Stan Hu"
FILES = ["bin/executable"].freeze
FILES_COUNT = FILES.count
end
module Repo
- HEAD = "master".freeze
+ HEAD = "master"
BRANCHES = %w[
feature
fix
@@ -111,9 +111,9 @@ module SeedRepo
end
module RubyBlob
- ID = "7e3e39ebb9b2bf433b4ad17313770fbe4051649c".freeze
- NAME = "popen.rb".freeze
- CONTENT = <<-eos.freeze
+ ID = "7e3e39ebb9b2bf433b4ad17313770fbe4051649c"
+ NAME = "popen.rb"
+ CONTENT = <<-eos
require 'fileutils'
require 'open3'
diff --git a/spec/support/helpers/stub_env.rb b/spec/support/helpers/stub_env.rb
index 8107ffc939f..5f344f8fb52 100644
--- a/spec/support/helpers/stub_env.rb
+++ b/spec/support/helpers/stub_env.rb
@@ -14,7 +14,7 @@ module StubENV
private
- STUBBED_KEY = '__STUBBED__'.freeze
+ STUBBED_KEY = '__STUBBED__'
def add_stubbed_value(key, value)
allow(ENV).to receive(:[]).with(key).and_return(value)
diff --git a/spec/support/helpers/stub_requests.rb b/spec/support/helpers/stub_requests.rb
index 473f07dd413..a3810323fee 100644
--- a/spec/support/helpers/stub_requests.rb
+++ b/spec/support/helpers/stub_requests.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
module StubRequests
- IP_ADDRESS_STUB = '8.8.8.9'.freeze
+ IP_ADDRESS_STUB = '8.8.8.9'
# Fully stubs a request using WebMock class. This class also
# stubs the IP address the URL is translated to (DNS lookup).
diff --git a/spec/support/helpers/test_env.rb b/spec/support/helpers/test_env.rb
index 266c0e18ccd..7ba15a9c00b 100644
--- a/spec/support/helpers/test_env.rb
+++ b/spec/support/helpers/test_env.rb
@@ -92,7 +92,7 @@ module TestEnv
}.freeze
TMP_TEST_PATH = Rails.root.join('tmp', 'tests').freeze
- REPOS_STORAGE = 'default'.freeze
+ REPOS_STORAGE = 'default'
SECOND_STORAGE_PATH = Rails.root.join('tmp', 'tests', 'second_storage')
# Test environment
@@ -170,7 +170,14 @@ module TestEnv
install_dir: gitaly_dir,
version: Gitlab::GitalyClient.expected_server_version,
task: "gitlab:gitaly:install[#{install_gitaly_args}]") do
- Gitlab::SetupHelper::Gitaly.create_configuration(gitaly_dir, { 'default' => repos_path }, force: true)
+ Gitlab::SetupHelper::Gitaly.create_configuration(
+ gitaly_dir,
+ { 'default' => repos_path },
+ force: true,
+ options: {
+ prometheus_listen_addr: 'localhost:9236'
+ }
+ )
Gitlab::SetupHelper::Gitaly.create_configuration(
gitaly_dir,
{ 'default' => repos_path },
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index df79049123d..d05676a649e 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -174,6 +174,22 @@ module UsageDataHelpers
allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
end
+ def stub_prometheus_queries
+ stub_request(:get, %r{^https?://::1:9090/-/ready})
+ .to_return(
+ status: 200,
+ body: [{}].to_json,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+
+ stub_request(:get, %r{^https?://::1:9090/api/v1/query\?query=.*})
+ .to_return(
+ status: 200,
+ body: [{}].to_json,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
def clear_memoized_values(values)
values.each { |v| described_class.clear_memoization(v) }
end
@@ -242,4 +258,12 @@ module UsageDataHelpers
end
end
end
+
+ def load_sample_metric_definition(filename: 'sample_metric.yml')
+ load_metric_yaml(fixture_file("lib/generators/gitlab/usage_metric_definition_generator/#{filename}"))
+ end
+
+ def load_metric_yaml(data)
+ ::Gitlab::Config::Loader::Yaml.new(data).load_raw!
+ end
end
diff --git a/spec/support/import_export/project_tree_expectations.rb b/spec/support/import_export/project_tree_expectations.rb
index 966c977e8e9..2423a58a3e6 100644
--- a/spec/support/import_export/project_tree_expectations.rb
+++ b/spec/support/import_export/project_tree_expectations.rb
@@ -97,13 +97,13 @@ module ImportExport
def normalize_elements(elem)
case elem
when Hash
- elem.map do |key, value|
+ elem.to_h do |key, value|
if ignore_key?(key, value)
[key, :ignored]
else
[key, normalize_elements(value)]
end
- end.to_h
+ end
when Array
elem.map { |a| normalize_elements(a) }
else
diff --git a/spec/support/matchers/exceed_query_limit.rb b/spec/support/matchers/exceed_query_limit.rb
index 7a66eff3a41..b48c7f905b2 100644
--- a/spec/support/matchers/exceed_query_limit.rb
+++ b/spec/support/matchers/exceed_query_limit.rb
@@ -20,6 +20,11 @@ module ExceedQueryLimitHelpers
self
end
+ def for_model(model)
+ table = model.table_name if model < ActiveRecord::Base
+ for_query(/(FROM|UPDATE|INSERT INTO|DELETE FROM)\s+"#{table}"/)
+ end
+
def show_common_queries
@show_common_queries = true
self
diff --git a/spec/support/matchers/graphql_matchers.rb b/spec/support/matchers/graphql_matchers.rb
index 565c21e0f85..904b7efdd7f 100644
--- a/spec/support/matchers/graphql_matchers.rb
+++ b/spec/support/matchers/graphql_matchers.rb
@@ -30,11 +30,13 @@ RSpec::Matchers.define :have_graphql_fields do |*expected|
end
match do |kls|
- if @allow_extra
- expect(kls.fields.keys).to include(*expected_field_names)
- else
- expect(kls.fields.keys).to contain_exactly(*expected_field_names)
- end
+ keys = kls.fields.keys.to_set
+ fields = expected_field_names.to_set
+
+ next true if fields == keys
+ next true if @allow_extra && fields.proper_subset?(keys)
+
+ false
end
failure_message do |kls|
@@ -108,7 +110,7 @@ RSpec::Matchers.define :have_graphql_arguments do |*expected|
names = expected_names(field).inspect
args = field.arguments.keys.inspect
- "expected that #{field.name} would have the following arguments: #{names}, but it has #{args}."
+ "expected #{field.name} to have the following arguments: #{names}, but it has #{args}."
end
end
diff --git a/spec/support/matchers/track_self_describing_event_matcher.rb b/spec/support/matchers/track_self_describing_event_matcher.rb
deleted file mode 100644
index c3723d2418f..00000000000
--- a/spec/support/matchers/track_self_describing_event_matcher.rb
+++ /dev/null
@@ -1,12 +0,0 @@
-# frozen_string_literal: true
-
-RSpec::Matchers.define :track_self_describing_event do |schema, data|
- match do
- expect(Gitlab::Tracking).to have_received(:self_describing_event)
- .with(schema, data: data)
- end
-
- match_when_negated do
- expect(Gitlab::Tracking).not_to have_received(:self_describing_event)
- end
-end
diff --git a/spec/support/shared_contexts/email_shared_context.rb b/spec/support/shared_contexts/email_shared_context.rb
index 9dffea7c94e..14c6c85cc43 100644
--- a/spec/support/shared_contexts/email_shared_context.rb
+++ b/spec/support/shared_contexts/email_shared_context.rb
@@ -82,8 +82,8 @@ RSpec.shared_examples :note_handler_shared_examples do |forwardable|
let!(:email_raw) { update_commands_only }
context 'and current user cannot update noteable' do
- it 'raises a CommandsOnlyNoteError' do
- expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidNoteError)
+ it 'does not raise an error' do
+ expect { receiver.execute }.not_to raise_error
end
end
@@ -92,15 +92,11 @@ RSpec.shared_examples :note_handler_shared_examples do |forwardable|
project.add_developer(user)
end
- it 'does not raise an error', unless: forwardable do
+ it 'does not raise an error' do
expect { receiver.execute }.to change { noteable.resource_state_events.count }.by(1)
expect(noteable.reload).to be_closed
end
-
- it 'raises an InvalidNoteError', if: forwardable do
- expect { receiver.execute }.to raise_error(Gitlab::Email::InvalidNoteError)
- end
end
end
end
@@ -189,6 +185,7 @@ RSpec.shared_examples :note_handler_shared_examples do |forwardable|
let(:email_raw) { with_quick_actions }
let!(:sent_notification) do
+ allow(Gitlab::ServiceDesk).to receive(:enabled?).with(project: project).and_return(true)
SentNotification.record_note(note, support_bot.id, mail_key)
end
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
new file mode 100644
index 00000000000..5a72b330707
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_logging/structured_logger_shared_context.rb
@@ -0,0 +1,87 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'structured_logger' do
+ let(:timestamp) { Time.iso8601('2018-01-01T12:00:00.000Z') }
+ let(:created_at) { timestamp - 1.second }
+ let(:scheduling_latency_s) { 1.0 }
+
+ let(:job) do
+ {
+ "class" => "TestWorker",
+ "args" => [1234, 'hello', { 'key' => 'value' }],
+ "retry" => false,
+ "queue" => "cronjob:test_queue",
+ "queue_namespace" => "cronjob",
+ "jid" => "da883554ee4fe414012f5f42",
+ "created_at" => created_at.to_f,
+ "enqueued_at" => created_at.to_f,
+ "correlation_id" => 'cid',
+ "error_message" => "wrong number of arguments (2 for 3)",
+ "error_class" => "ArgumentError",
+ "error_backtrace" => []
+ }
+ end
+
+ let(:logger) { double }
+ let(:clock_realtime_start) { 0.222222299 }
+ let(:clock_realtime_end) { 1.333333799 }
+ let(:clock_thread_cputime_start) { 0.222222299 }
+ let(:clock_thread_cputime_end) { 1.333333799 }
+ let(:start_payload) do
+ job.except('error_backtrace', 'error_class', 'error_message').merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: start',
+ 'job_status' => 'start',
+ 'pid' => Process.pid,
+ 'created_at' => created_at.to_f,
+ 'enqueued_at' => created_at.to_f,
+ 'scheduling_latency_s' => scheduling_latency_s,
+ 'job_size_bytes' => be > 0
+ )
+ end
+
+ let(:end_payload) do
+ start_payload.merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: done: 0.0 sec',
+ 'job_status' => 'done',
+ 'duration_s' => 0.0,
+ 'completed_at' => timestamp.to_f,
+ 'cpu_s' => 1.111112,
+ 'db_duration_s' => 0.0,
+ 'db_cached_count' => 0,
+ 'db_count' => 0,
+ 'db_write_count' => 0
+ )
+ end
+
+ let(:exception_payload) do
+ end_payload.merge(
+ 'message' => 'TestWorker JID-da883554ee4fe414012f5f42: fail: 0.0 sec',
+ 'job_status' => 'fail',
+ 'error_class' => 'ArgumentError',
+ 'error_message' => 'Something went wrong',
+ 'error_backtrace' => be_a(Array).and(be_present)
+ )
+ end
+
+ before do
+ allow(Sidekiq).to receive(:logger).and_return(logger)
+
+ allow(subject).to receive(:current_time).and_return(timestamp.to_f)
+
+ allow(Process).to receive(:clock_gettime).with(Process::CLOCK_REALTIME, :float_second)
+ .and_return(clock_realtime_start, clock_realtime_end)
+ allow(Process).to receive(:clock_gettime).with(Process::CLOCK_THREAD_CPUTIME_ID, :float_second)
+ .and_return(clock_thread_cputime_start, clock_thread_cputime_end)
+ end
+
+ subject { described_class.new }
+
+ def call_subject(job, queue)
+ # This structured logger strongly depends on execution of `InstrumentationLogger`
+ subject.call(job, queue) do
+ ::Gitlab::SidekiqMiddleware::InstrumentationLogger.new.call('worker', job, queue) do
+ yield
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
new file mode 100644
index 00000000000..73de631e293
--- /dev/null
+++ b/spec/support/shared_contexts/lib/gitlab/sidekiq_middleware/server_metrics_shared_context.rb
@@ -0,0 +1,92 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'server metrics with mocked prometheus' do
+ let(:concurrency_metric) { double('concurrency metric') }
+
+ let(:queue_duration_seconds) { double('queue duration seconds metric') }
+ let(:completion_seconds_metric) { double('completion seconds metric') }
+ let(:user_execution_seconds_metric) { double('user execution seconds metric') }
+ let(:db_seconds_metric) { double('db seconds metric') }
+ let(:gitaly_seconds_metric) { double('gitaly seconds metric') }
+ let(:failed_total_metric) { double('failed total metric') }
+ let(:retried_total_metric) { double('retried total metric') }
+ let(:redis_requests_total) { double('redis calls total metric') }
+ let(:running_jobs_metric) { double('running jobs metric') }
+ let(:redis_seconds_metric) { double('redis seconds metric') }
+ let(:elasticsearch_seconds_metric) { double('elasticsearch seconds metric') }
+ let(:elasticsearch_requests_total) { double('elasticsearch calls total metric') }
+
+ before do
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_queue_duration_seconds, anything, anything, anything).and_return(queue_duration_seconds)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_completion_seconds, anything, anything, anything).and_return(completion_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_cpu_seconds, anything, anything, anything).and_return(user_execution_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_db_seconds, anything, anything, anything).and_return(db_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_jobs_gitaly_seconds, anything, anything, anything).and_return(gitaly_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_redis_requests_duration_seconds, anything, anything, anything).and_return(redis_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:histogram).with(:sidekiq_elasticsearch_requests_duration_seconds, anything, anything, anything).and_return(elasticsearch_seconds_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_failed_total, anything).and_return(failed_total_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_jobs_retried_total, anything).and_return(retried_total_metric)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_redis_requests_total, anything).and_return(redis_requests_total)
+ allow(Gitlab::Metrics).to receive(:counter).with(:sidekiq_elasticsearch_requests_total, anything).and_return(elasticsearch_requests_total)
+ allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_running_jobs, anything, {}, :all).and_return(running_jobs_metric)
+ allow(Gitlab::Metrics).to receive(:gauge).with(:sidekiq_concurrency, anything, {}, :all).and_return(concurrency_metric)
+
+ allow(concurrency_metric).to receive(:set)
+ end
+end
+
+RSpec.shared_context 'server metrics call' do
+ let(:thread_cputime_before) { 1 }
+ let(:thread_cputime_after) { 2 }
+ let(:thread_cputime_duration) { thread_cputime_after - thread_cputime_before }
+
+ let(:monotonic_time_before) { 11 }
+ let(:monotonic_time_after) { 20 }
+ let(:monotonic_time_duration) { monotonic_time_after - monotonic_time_before }
+
+ let(:queue_duration_for_job) { 0.01 }
+
+ let(:db_duration) { 3 }
+ let(:gitaly_duration) { 4 }
+
+ let(:redis_calls) { 2 }
+ let(:redis_duration) { 0.01 }
+
+ let(:elasticsearch_calls) { 8 }
+ let(:elasticsearch_duration) { 0.54 }
+ let(:instrumentation) do
+ {
+ gitaly_duration_s: gitaly_duration,
+ redis_calls: redis_calls,
+ redis_duration_s: redis_duration,
+ elasticsearch_calls: elasticsearch_calls,
+ elasticsearch_duration_s: elasticsearch_duration
+ }
+ end
+
+ before do
+ allow(subject).to receive(:get_thread_cputime).and_return(thread_cputime_before, thread_cputime_after)
+ allow(Gitlab::Metrics::System).to receive(:monotonic_time).and_return(monotonic_time_before, monotonic_time_after)
+ allow(Gitlab::InstrumentationHelper).to receive(:queue_duration_for_job).with(job).and_return(queue_duration_for_job)
+ allow(ActiveRecord::LogSubscriber).to receive(:runtime).and_return(db_duration * 1000)
+
+ job[:instrumentation] = instrumentation
+ job[:gitaly_duration_s] = gitaly_duration
+ job[:redis_calls] = redis_calls
+ job[:redis_duration_s] = redis_duration
+
+ job[:elasticsearch_calls] = elasticsearch_calls
+ job[:elasticsearch_duration_s] = elasticsearch_duration
+
+ allow(running_jobs_metric).to receive(:increment)
+ allow(redis_requests_total).to receive(:increment)
+ allow(elasticsearch_requests_total).to receive(:increment)
+ allow(queue_duration_seconds).to receive(:observe)
+ allow(user_execution_seconds_metric).to receive(:observe)
+ allow(db_seconds_metric).to receive(:observe)
+ allow(gitaly_seconds_metric).to receive(:observe)
+ allow(completion_seconds_metric).to receive(:observe)
+ allow(redis_seconds_metric).to receive(:observe)
+ allow(elasticsearch_seconds_metric).to receive(:observe)
+ end
+end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index 671c0cdf79c..78d14ecb880 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -59,7 +59,7 @@ RSpec.shared_context 'project navbar structure' do
]
},
{
- nav_item: _('Merge Requests'),
+ nav_item: _('Merge requests'),
nav_sub_items: []
},
{
@@ -139,6 +139,7 @@ RSpec.shared_context 'group navbar structure' do
_('Projects'),
_('Repository'),
_('CI/CD'),
+ _('Applications'),
_('Packages & Registries'),
_('Webhooks')
]
@@ -189,7 +190,7 @@ RSpec.shared_context 'group navbar structure' do
]
},
{
- nav_item: _('Merge Requests'),
+ nav_item: _('Merge requests'),
nav_sub_items: []
},
security_and_compliance_nav_item,
diff --git a/spec/support/shared_contexts/project_service_jira_context.rb b/spec/support/shared_contexts/project_service_jira_context.rb
index 8e01de70846..54bb9fd108e 100644
--- a/spec/support/shared_contexts/project_service_jira_context.rb
+++ b/spec/support/shared_contexts/project_service_jira_context.rb
@@ -1,8 +1,8 @@
# frozen_string_literal: true
RSpec.shared_context 'project service Jira context' do
- let(:url) { 'http://jira.example.com' }
- let(:test_url) { 'http://jira.example.com/rest/api/2/serverInfo' }
+ let(:url) { 'https://jira.example.com' }
+ let(:test_url) { 'https://jira.example.com/rest/api/2/serverInfo' }
def fill_form(disable: false)
click_active_checkbox if disable
@@ -10,6 +10,5 @@ RSpec.shared_context 'project service Jira context' do
fill_in 'service_url', with: url
fill_in 'service_username', with: 'username'
fill_in 'service_password', with: 'password'
- fill_in 'service_jira_issue_transition_id', with: '25'
end
end
diff --git a/spec/support/shared_contexts/project_service_shared_context.rb b/spec/support/shared_contexts/project_service_shared_context.rb
index b4b9ab456e0..a8e75c624e8 100644
--- a/spec/support/shared_contexts/project_service_shared_context.rb
+++ b/spec/support/shared_contexts/project_service_shared_context.rb
@@ -15,7 +15,10 @@ RSpec.shared_context 'project service activation' do
def visit_project_integration(name)
visit_project_integrations
- click_link(name)
+
+ within('#content-body') do
+ click_link(name)
+ end
end
def click_active_checkbox
diff --git a/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb b/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
index f3bbb325475..ac53be1a1cb 100644
--- a/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
+++ b/spec/support/shared_contexts/requests/api/conan_packages_shared_context.rb
@@ -41,13 +41,6 @@ RSpec.shared_context 'conan recipe endpoints' do
let(:jwt) { build_jwt(personal_access_token) }
let(:headers) { build_token_auth_header(jwt.encoded) }
let(:conan_package_reference) { '123456789' }
- let(:presenter) { double('::Packages::Conan::PackagePresenter') }
-
- before do
- allow(::Packages::Conan::PackagePresenter).to receive(:new)
- .with(package, user, package.project, any_args)
- .and_return(presenter)
- end
end
RSpec.shared_context 'conan file download endpoints' do
diff --git a/spec/support/shared_contexts/requests/api/go_modules_shared_context.rb b/spec/support/shared_contexts/requests/api/go_modules_shared_context.rb
new file mode 100644
index 00000000000..5a90c3076b1
--- /dev/null
+++ b/spec/support/shared_contexts/requests/api/go_modules_shared_context.rb
@@ -0,0 +1,14 @@
+# frozen_string_literal: true
+
+RSpec.shared_context 'basic Go module' do
+ let_it_be(:user) { create :user }
+ let_it_be(:project) { create :project_empty_repo, creator: user, path: 'my-go-lib' }
+
+ let_it_be(:commit_v1_0_0) { create :go_module_commit, :files, project: project, tag: 'v1.0.0', files: { 'README.md' => 'Hi' } }
+ let_it_be(:commit_v1_0_1) { create :go_module_commit, :module, project: project, tag: 'v1.0.1' }
+ let_it_be(:commit_v1_0_2) { create :go_module_commit, :package, project: project, tag: 'v1.0.2', path: 'pkg' }
+ let_it_be(:commit_v1_0_3) { create :go_module_commit, :module, project: project, tag: 'v1.0.3', name: 'mod' }
+ let_it_be(:commit_file_y) { create :go_module_commit, :files, project: project, files: { 'y.go' => "package a\n" } }
+ let_it_be(:commit_mod_v2) { create :go_module_commit, :module, project: project, name: 'v2' }
+ let_it_be(:commit_v2_0_0) { create :go_module_commit, :files, project: project, tag: 'v2.0.0', files: { 'v2/x.go' => "package a\n" } }
+end
diff --git a/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb b/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb
index bcc98cf6416..80f011f622b 100644
--- a/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb
+++ b/spec/support/shared_contexts/services/projects/container_repository/delete_tags_service_shared_context.rb
@@ -23,7 +23,7 @@ RSpec.shared_context 'container repository delete tags service shared context' d
end
def stub_delete_reference_requests(tags)
- tags = Hash[Array.wrap(tags).map { |tag| [tag, 200] }] unless tags.is_a?(Hash)
+ tags = Array.wrap(tags).to_h { |tag| [tag, 200] } unless tags.is_a?(Hash)
tags.each do |tag, status|
stub_request(:delete, "http://registry.gitlab/v2/#{repository.path}/tags/reference/#{tag}")
diff --git a/spec/support/shared_contexts/services_shared_context.rb b/spec/support/shared_contexts/services_shared_context.rb
index 3322c6ef01a..f250632ff51 100644
--- a/spec/support/shared_contexts/services_shared_context.rb
+++ b/spec/support/shared_contexts/services_shared_context.rb
@@ -28,6 +28,8 @@ Service.available_services_names.each do |service|
hash.merge!(k => 1234)
elsif service == 'jira' && k == :jira_issue_transition_id
hash.merge!(k => '1,2,3')
+ elsif service == 'emails_on_push' && k == :recipients
+ hash.merge!(k => 'foo@bar.com')
else
hash.merge!(k => "someword")
end
diff --git a/spec/support/shared_examples/boards/destroy_service_shared_examples.rb b/spec/support/shared_examples/boards/destroy_service_shared_examples.rb
new file mode 100644
index 00000000000..33bae3da44b
--- /dev/null
+++ b/spec/support/shared_examples/boards/destroy_service_shared_examples.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'board destroy service' do
+ describe '#execute' do
+ let(:parent_type) { parent.is_a?(Project) ? :project : :group }
+ let!(:board) { create(board_factory, parent_type => parent) }
+
+ subject(:service) { described_class.new(parent, double) }
+
+ context 'when there is more than one board' do
+ let!(:board2) { create(board_factory, parent_type => parent) }
+
+ it 'destroys the board' do
+ create(board_factory, parent_type => parent)
+
+ expect do
+ expect(service.execute(board)).to be_success
+ end.to change(boards, :count).by(-1)
+ end
+ end
+
+ context 'when there is only one board' do
+ it 'does not remove board' do
+ expect do
+ expect(service.execute(board)).to be_error
+ end.not_to change(boards, :count)
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/boards/lists/update_service_shared_examples.rb b/spec/support/shared_examples/boards/lists/update_service_shared_examples.rb
new file mode 100644
index 00000000000..d8a74f2582d
--- /dev/null
+++ b/spec/support/shared_examples/boards/lists/update_service_shared_examples.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'moving list' do
+ context 'when user can admin list' do
+ it 'calls Lists::MoveService to update list position' do
+ board.resource_parent.add_developer(user)
+
+ expect_next_instance_of(Boards::Lists::MoveService, board.resource_parent, user, params) do |move_service|
+ expect(move_service).to receive(:execute).with(list).and_call_original
+ end
+
+ service.execute(list)
+ end
+ end
+
+ context 'when user cannot admin list' do
+ it 'does not call Lists::MoveService to update list position' do
+ expect(Boards::Lists::MoveService).not_to receive(:new)
+
+ service.execute(list)
+ end
+ end
+end
+
+RSpec.shared_examples 'updating list preferences' do
+ context 'when user can read list' do
+ it 'updates list preference for user' do
+ board.resource_parent.add_guest(user)
+
+ service.execute(list)
+
+ expect(list.preferences_for(user).collapsed).to eq(true)
+ end
+ end
+
+ context 'when user cannot read list' do
+ it 'does not update list preference for user' do
+ service.execute(list)
+
+ expect(list.preferences_for(user).collapsed).to be_falsy
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/snippet_blob_shared_examples.rb b/spec/support/shared_examples/controllers/snippet_blob_shared_examples.rb
index 62aaec85162..c939c306d93 100644
--- a/spec/support/shared_examples/controllers/snippet_blob_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/snippet_blob_shared_examples.rb
@@ -36,16 +36,6 @@ RSpec.shared_examples 'raw snippet blob' do
expect(response.header['Content-Disposition']).to match "attachment; filename=\"#{filepath}\""
end
-
- context 'when the feature flag attachment_with_filename is disabled' do
- it 'returns just attachment in the disposition header' do
- stub_feature_flags(attachment_with_filename: false)
-
- subject
-
- expect(response.header['Content-Disposition']).to eq 'attachment'
- end
- end
end
end
diff --git a/spec/support/shared_examples/controllers/snippet_shared_examples.rb b/spec/support/shared_examples/controllers/snippet_shared_examples.rb
new file mode 100644
index 00000000000..f49cc979368
--- /dev/null
+++ b/spec/support/shared_examples/controllers/snippet_shared_examples.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'snippets views' do
+ let(:params) { {} }
+
+ before do
+ sign_in(user)
+ end
+
+ context 'when rendered' do
+ render_views
+
+ it 'avoids N+1 database queries' do
+ # Warming call to load everything non snippet related
+ get(:index, params: params)
+
+ project = create(:project, namespace: user.namespace)
+ create(:project_snippet, project: project, author: user)
+
+ control_count = ActiveRecord::QueryRecorder.new { get(:index, params: params) }.count
+
+ project = create(:project, namespace: user.namespace)
+ create(:project_snippet, project: project, author: user)
+
+ expect { get(:index, params: params) }.not_to exceed_query_limit(control_count)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/trackable_shared_examples.rb b/spec/support/shared_examples/controllers/trackable_shared_examples.rb
deleted file mode 100644
index dac7d8c94ff..00000000000
--- a/spec/support/shared_examples/controllers/trackable_shared_examples.rb
+++ /dev/null
@@ -1,39 +0,0 @@
-# frozen_string_literal: true
-
-RSpec.shared_examples 'a Trackable Controller' do
- describe '#track_event', :snowplow do
- before do
- sign_in user
- end
-
- context 'with no params' do
- controller(described_class) do
- def index
- track_event
- head :ok
- end
- end
-
- it 'tracks the action name', :snowplow do
- get :index
-
- expect_snowplow_event(category: 'AnonymousController', action: 'index')
- end
- end
-
- context 'with params' do
- controller(described_class) do
- def index
- track_event('some_event', category: 'SomeCategory', label: 'errorlabel')
- head :ok
- end
- end
-
- it 'tracks with the specified param' do
- get :index
-
- expect_snowplow_event(category: 'SomeCategory', action: 'some_event', label: 'errorlabel')
- end
- end
- end
-end
diff --git a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
index 428389a9a01..3f97c031e27 100644
--- a/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/unique_visits_shared_examples.rb
@@ -4,27 +4,30 @@ RSpec.shared_examples 'tracking unique visits' do |method|
let(:request_params) { {} }
it 'tracks unique visit if the format is HTML' do
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).to receive(:track_visit).with(instance_of(String), target_id)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event).with(target_id, values: kind_of(String))
get method, params: request_params, format: :html
end
it 'tracks unique visit if DNT is not enabled' do
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).to receive(:track_visit).with(instance_of(String), target_id)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter)
+ .to receive(:track_event).with(target_id, values: kind_of(String))
+
request.headers['DNT'] = '0'
get method, params: request_params, format: :html
end
it 'does not track unique visit if DNT is enabled' do
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).not_to receive(:track_visit)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
request.headers['DNT'] = '1'
get method, params: request_params, format: :html
end
it 'does not track unique visit if the format is JSON' do
- expect_any_instance_of(Gitlab::Analytics::UniqueVisits).not_to receive(:track_visit)
+ expect(Gitlab::UsageDataCounters::HLLRedisCounter).not_to receive(:track_event)
get method, params: request_params, format: :json
end
diff --git a/spec/support/shared_examples/features/cascading_settings_shared_examples.rb b/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
new file mode 100644
index 00000000000..29ef3da9a85
--- /dev/null
+++ b/spec/support/shared_examples/features/cascading_settings_shared_examples.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'a cascading setting' do
+ context 'when setting is enforced by an ancestor group' do
+ before do
+ visit group_path
+
+ page.within form_group_selector do
+ find(setting_field_selector).check
+ find('[data-testid="enforce-for-all-subgroups-checkbox"]').check
+ end
+
+ click_save_button
+ end
+
+ it 'disables setting in subgroups' do
+ visit subgroup_path
+
+ expect(find("#{setting_field_selector}[disabled]")).to be_checked
+ end
+
+ it 'does not show enforcement checkbox in subgroups' do
+ visit subgroup_path
+
+ expect(page).not_to have_selector '[data-testid="enforce-for-all-subgroups-checkbox"]'
+ end
+
+ it 'displays lock icon with popover', :js do
+ visit subgroup_path
+
+ page.within form_group_selector do
+ find('[data-testid="cascading-settings-lock-icon"]').click
+ end
+
+ page.within '[data-testid="cascading-settings-lock-popover"]' do
+ expect(page).to have_text 'This setting has been enforced by an owner of Foo bar.'
+ expect(page).to have_link 'Foo bar', href: setting_path
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/creatable_merge_request_shared_examples.rb b/spec/support/shared_examples/features/creatable_merge_request_shared_examples.rb
index da966fd2200..1c816ee4b0a 100644
--- a/spec/support/shared_examples/features/creatable_merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/features/creatable_merge_request_shared_examples.rb
@@ -43,7 +43,7 @@ RSpec.shared_examples 'a creatable merge request' do
expect(page.all('input[name="merge_request[label_ids][]"]', visible: false)[1].value).to match(label.id.to_s)
expect(page.all('input[name="merge_request[label_ids][]"]', visible: false)[2].value).to match(label2.id.to_s)
- click_button 'Submit merge request'
+ click_button 'Create merge request'
page.within '.issuable-sidebar' do
page.within '.assignee' do
diff --git a/spec/support/shared_examples/features/discussion_comments_shared_example.rb b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
index 86ba2821c78..808e0be6be2 100644
--- a/spec/support/shared_examples/features/discussion_comments_shared_example.rb
+++ b/spec/support/shared_examples/features/discussion_comments_shared_example.rb
@@ -304,7 +304,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
let(:reply_id) { find("#{comments_selector} .note:last-of-type", match: :first)['data-note-id'] }
it 'can be replied to after resolving' do
- click_button "Resolve thread"
+ find('button[data-qa-selector="resolve_discussion_button"]').click
wait_for_requests
refresh
@@ -316,7 +316,7 @@ RSpec.shared_examples 'thread comments for issue, epic and merge request' do |re
it 'shows resolved thread when toggled' do
submit_reply('a')
- click_button "Resolve thread"
+ find('button[data-qa-selector="resolve_discussion_button"]').click
wait_for_requests
expect(page).to have_selector(".note-row-#{note_id}", visible: true)
diff --git a/spec/support/shared_examples/features/error_tracking_shared_example.rb b/spec/support/shared_examples/features/error_tracking_shared_example.rb
index 92fc54ce0b0..1bdc5355408 100644
--- a/spec/support/shared_examples/features/error_tracking_shared_example.rb
+++ b/spec/support/shared_examples/features/error_tracking_shared_example.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'error tracking index page' do
it 'renders the error index page', quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/217810' } do
- within('div.js-title-container') do
+ within('[data-testid="breadcrumb-links"]') do
expect(page).to have_content(project.namespace.name)
expect(page).to have_content(project.name)
end
diff --git a/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
index 7a32f61d4fa..49c3674277d 100644
--- a/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
+++ b/spec/support/shared_examples/features/issuable_invite_members_shared_examples.rb
@@ -2,7 +2,7 @@
RSpec.shared_examples 'issuable invite members experiments' do
context 'when a privileged user can invite' do
- it 'shows a link for inviting members and follows through to the members page' do
+ it 'shows a link for inviting members and launches invite modal' do
project.add_maintainer(user)
visit issuable_path
@@ -11,14 +11,14 @@ RSpec.shared_examples 'issuable invite members experiments' do
wait_for_requests
page.within '.dropdown-menu-user' do
- expect(page).to have_link('Invite Members', href: project_project_members_path(project))
+ expect(page).to have_link('Invite Members')
expect(page).to have_selector('[data-track-event="click_invite_members"]')
expect(page).to have_selector('[data-track-label="edit_assignee"]')
end
click_link 'Invite Members'
- expect(current_path).to eq project_project_members_path(project)
+ expect(page).to have_content("You're inviting members to the")
end
end
diff --git a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
index 00d3bd08218..7adf303bde4 100644
--- a/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
+++ b/spec/support/shared_examples/features/project_upload_files_shared_examples.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
-RSpec.shared_examples 'it uploads and commit a new text file' do
- it 'uploads and commit a new text file', :js do
+RSpec.shared_examples 'it uploads and commits a new text file' do
+ it 'uploads and commits a new text file', :js do
find('.add-to-tree').click
page.within('.dropdown-menu') do
@@ -10,7 +10,7 @@ RSpec.shared_examples 'it uploads and commit a new text file' do
wait_for_requests
end
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -32,8 +32,8 @@ RSpec.shared_examples 'it uploads and commit a new text file' do
end
end
-RSpec.shared_examples 'it uploads and commit a new image file' do
- it 'uploads and commit a new image file', :js do
+RSpec.shared_examples 'it uploads and commits a new image file' do
+ it 'uploads and commits a new image file', :js do
find('.add-to-tree').click
page.within('.dropdown-menu') do
@@ -42,7 +42,7 @@ RSpec.shared_examples 'it uploads and commit a new image file' do
wait_for_requests
end
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg'))
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'logo_sample.svg'), make_visible: true)
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -58,21 +58,49 @@ RSpec.shared_examples 'it uploads and commit a new image file' do
end
end
-RSpec.shared_examples 'it uploads and commit a new file to a forked project' do
+RSpec.shared_examples 'it uploads and commits a new pdf file' do
+ it 'uploads and commits a new pdf file', :js do
+ find('.add-to-tree').click
+
+ page.within('.dropdown-menu') do
+ click_link('Upload file')
+
+ wait_for_requests
+ end
+
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'git-cheat-sheet.pdf'), make_visible: true)
+
+ page.within('#modal-upload-blob') do
+ fill_in(:commit_message, with: 'New commit message')
+ fill_in(:branch_name, with: 'upload_image', visible: true)
+ click_button('Upload file')
+ end
+
+ wait_for_all_requests
+
+ visit(project_blob_path(project, 'upload_image/git-cheat-sheet.pdf'))
+
+ expect(page).to have_css('.js-pdf-viewer')
+ end
+end
+
+RSpec.shared_examples 'it uploads and commits a new file to a forked project' do
let(:fork_message) do
"You're not allowed to make changes to this project directly. "\
"A fork of this project has been created that you can make changes in, so you can submit a merge request."
end
- it 'uploads and commit a new file to a forked project', :js, :sidekiq_might_not_need_inline do
+ it 'uploads and commits a new file to a forked project', :js, :sidekiq_might_not_need_inline do
find('.add-to-tree').click
click_link('Upload file')
expect(page).to have_content(fork_message)
+ wait_for_all_requests
+
find('.add-to-tree').click
click_link('Upload file')
- drop_in_dropzone(File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'))
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
page.within('#modal-upload-blob') do
fill_in(:commit_message, with: 'New commit message')
@@ -95,6 +123,33 @@ RSpec.shared_examples 'it uploads and commit a new file to a forked project' do
end
end
+RSpec.shared_examples 'it uploads a file to a sub-directory' do
+ it 'uploads a file to a sub-directory', :js do
+ click_link 'files'
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_content('files')
+ end
+
+ find('.add-to-tree').click
+ click_link('Upload file')
+ attach_file('upload_file', File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt'), make_visible: true)
+
+ page.within('#modal-upload-blob') do
+ fill_in(:commit_message, with: 'New commit message')
+ end
+
+ click_button('Upload file')
+
+ expect(page).to have_content('New commit message')
+
+ page.within('.repo-breadcrumb') do
+ expect(page).to have_content('files')
+ expect(page).to have_content('doc_sample.txt')
+ end
+ end
+end
+
RSpec.shared_examples 'uploads and commits a new text file via "upload file" button' do
it 'uploads and commits a new text file via "upload file" button', :js do
find('[data-testid="upload-file-button"]').click
diff --git a/spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb b/spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb
index 06127f2ed8c..6d44a6fde85 100644
--- a/spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb
+++ b/spec/support/shared_examples/features/resolving_discussions_in_issues_shared_examples.rb
@@ -14,11 +14,11 @@ RSpec.shared_examples 'creating an issue for a thread' do
end
it 'can create a new issue for the project' do
- expect { click_button 'Submit issue' }.to change { project.issues.reload.size }.by(1)
+ expect { click_button 'Create issue' }.to change { project.issues.reload.size }.by(1)
end
it 'resolves the discussion in the merge request' do
- click_button 'Submit issue'
+ click_button 'Create issue'
discussion.first_note.reload
@@ -26,7 +26,7 @@ RSpec.shared_examples 'creating an issue for a thread' do
end
it 'shows a flash messaage after resolving a discussion' do
- click_button 'Submit issue'
+ click_button 'Create issue'
page.within '.flash-notice' do
# Only check for the word 'Resolved' since the spec might have resolved
diff --git a/spec/support/shared_examples/features/search_settings_shared_examples.rb b/spec/support/shared_examples/features/search_settings_shared_examples.rb
index 6a507c4be56..dda780690b2 100644
--- a/spec/support/shared_examples/features/search_settings_shared_examples.rb
+++ b/spec/support/shared_examples/features/search_settings_shared_examples.rb
@@ -7,9 +7,7 @@ RSpec.shared_examples 'cannot search settings' do
end
RSpec.shared_examples 'can search settings' do |search_term, non_match_section|
- it 'has search settings field' do
- expect(page).to have_field(placeholder: SearchHelpers::INPUT_PLACEHOLDER)
- end
+ it_behaves_like 'can highlight results', search_term
it 'hides unmatching sections on search' do
expect(page).to have_content(non_match_section)
@@ -21,22 +19,19 @@ RSpec.shared_examples 'can search settings' do |search_term, non_match_section|
end
end
-RSpec.shared_examples 'can search settings with feature flag check' do |search_term, non_match_section|
- let(:flag) { true }
-
- before do
- stub_feature_flags(search_settings_in_page: flag)
-
- visit(visit_path)
+RSpec.shared_examples 'can highlight results' do |search_term|
+ it 'has search settings field' do
+ expect(page).to have_field(placeholder: SearchHelpers::INPUT_PLACEHOLDER)
end
- context 'with feature flag on' do
- it_behaves_like 'can search settings', search_term, non_match_section
- end
+ it 'highlights the search terms' do
+ selector = '.gl-bg-orange-100'
+ fill_in SearchHelpers::INPUT_PLACEHOLDER, with: search_term
- context 'with feature flag off' do
- let(:flag) { false }
+ expect(page).to have_css(selector)
- it_behaves_like 'cannot search settings'
+ page.find_all(selector) do |element|
+ expect(element).to have_content(search_term)
+ end
end
end
diff --git a/spec/support/shared_examples/features/sidebar_shared_examples.rb b/spec/support/shared_examples/features/sidebar_shared_examples.rb
new file mode 100644
index 00000000000..429efbe6ba0
--- /dev/null
+++ b/spec/support/shared_examples/features/sidebar_shared_examples.rb
@@ -0,0 +1,165 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'issue boards sidebar' do
+ include MobileHelpers
+
+ before do
+ first_card.click
+ end
+
+ it 'shows sidebar when clicking issue' do
+ expect(page).to have_selector('[data-testid="issue-boards-sidebar"]')
+ end
+
+ it 'closes sidebar when clicking issue' do
+ expect(page).to have_selector('[data-testid="issue-boards-sidebar"]')
+
+ first_card.click
+
+ expect(page).not_to have_selector('[data-testid="issue-boards-sidebar"]')
+ end
+
+ it 'shows issue details when sidebar is open', :aggregate_failures do
+ page.within('[data-testid="issue-boards-sidebar"]') do
+ expect(page).to have_content(issue.title)
+ expect(page).to have_content(issue.to_reference)
+ end
+ end
+
+ context 'when clicking close button' do
+ before do
+ find('[data-testid="issue-boards-sidebar"] .gl-drawer-close-button').click
+ end
+
+ it 'unhighlights the active issue card' do
+ expect(first_card[:class]).not_to include('is-active')
+ expect(first_card[:class]).not_to include('multi-select')
+ end
+
+ it 'closes sidebar when clicking close button' do
+ expect(page).not_to have_selector('[data-testid="issue-boards-sidebar"]')
+ end
+ end
+
+ context 'in notifications subscription' do
+ it 'displays notifications toggle', :aggregate_failures do
+ page.within('[data-testid="sidebar-notifications"]') do
+ expect(page).to have_selector('[data-testid="notification-subscribe-toggle"]')
+ expect(page).to have_content('Notifications')
+ expect(page).not_to have_content('Notifications have been disabled by the project or group owner')
+ end
+ end
+
+ it 'shows toggle as on then as off as user toggles to subscribe and unsubscribe', :aggregate_failures do
+ toggle = find('[data-testid="notification-subscribe-toggle"]')
+
+ toggle.click
+
+ expect(toggle).to have_css("button.is-checked")
+
+ toggle.click
+
+ expect(toggle).not_to have_css("button.is-checked")
+ end
+
+ context 'when notifications have been disabled' do
+ before do
+ project.update_attribute(:emails_disabled, true)
+
+ refresh_and_click_first_card
+ end
+
+ it 'displays a message that notifications have been disabled' do
+ page.within('[data-testid="sidebar-notifications"]') do
+ expect(page).not_to have_selector('[data-testid="notification-subscribe-toggle"]')
+ expect(page).to have_content('Notifications have been disabled by the project or group owner')
+ end
+ end
+ end
+ end
+
+ context 'in time tracking' do
+ it 'displays time tracking feature with default message' do
+ page.within('[data-testid="time-tracker"]') do
+ expect(page).to have_content('Time tracking')
+ expect(page).to have_content('No estimate or time spent')
+ end
+ end
+
+ context 'when only spent time is recorded' do
+ before do
+ issue.timelogs.create!(time_spent: 3600, user: user)
+
+ refresh_and_click_first_card
+ end
+
+ it 'shows the total time spent only' do
+ page.within('[data-testid="time-tracker"]') do
+ expect(page).to have_content('Spent: 1h')
+ expect(page).not_to have_content('Estimated')
+ end
+ end
+ end
+
+ context 'when only estimated time is recorded' do
+ before do
+ issue.update!(time_estimate: 3600)
+
+ refresh_and_click_first_card
+ end
+
+ it 'shows the estimated time only', :aggregate_failures do
+ page.within('[data-testid="time-tracker"]') do
+ expect(page).to have_content('Estimated: 1h')
+ expect(page).not_to have_content('Spent')
+ end
+ end
+ end
+
+ context 'when estimated and spent times are available' do
+ before do
+ issue.timelogs.create!(time_spent: 1800, user: user)
+ issue.update!(time_estimate: 3600)
+
+ refresh_and_click_first_card
+ end
+
+ it 'shows time tracking progress bar' do
+ page.within('[data-testid="time-tracker"]') do
+ expect(page).to have_selector('[data-testid="timeTrackingComparisonPane"]')
+ end
+ end
+
+ it 'shows both estimated and spent time text', :aggregate_failures do
+ page.within('[data-testid="time-tracker"]') do
+ expect(page).to have_content('Spent 30m')
+ expect(page).to have_content('Est 1h')
+ end
+ end
+ end
+
+ context 'when limitedToHours instance option is turned on' do
+ before do
+ # 3600+3600*24 = 1d 1h or 25h
+ issue.timelogs.create!(time_spent: 3600 + 3600 * 24, user: user)
+ stub_application_setting(time_tracking_limit_to_hours: true)
+
+ refresh_and_click_first_card
+ end
+
+ it 'shows the total time spent only' do
+ page.within('[data-testid="time-tracker"]') do
+ expect(page).to have_content('Spent: 25h')
+ end
+ end
+ end
+ end
+
+ def refresh_and_click_first_card
+ page.refresh
+
+ wait_for_requests
+
+ first_card.click
+ end
+end
diff --git a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
index 2f8ebd0d264..8a6d5d88ca6 100644
--- a/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_creates_wiki_page_shared_examples.rb
@@ -240,7 +240,7 @@ RSpec.shared_examples 'User creates wiki page' do
end
end
- it "shows the emoji autocompletion dropdown" do
+ it "shows the emoji autocompletion dropdown", :js do
click_link("New page")
page.within(".wiki-form") do
diff --git a/spec/support/shared_examples/features/wiki/user_git_access_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_git_access_wiki_page_shared_examples.rb
index d3d2a36147d..4fea450bd64 100644
--- a/spec/support/shared_examples/features/wiki/user_git_access_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_git_access_wiki_page_shared_examples.rb
@@ -13,7 +13,7 @@ RSpec.shared_examples 'User views Git access wiki page' do
expect(page).to have_text("Clone repository #{wiki.full_path}")
- within('.git-clone-holder') do
+ within('.js-git-clone-holder') do
expect(page).to have_css('#clone-dropdown', text: 'HTTP')
expect(page).to have_field('clone_url', with: wiki.http_url_to_repo)
diff --git a/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
index a22d98f20c4..1a981f42086 100644
--- a/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_previews_wiki_changes_shared_examples.rb
@@ -38,19 +38,19 @@ RSpec.shared_examples 'User previews wiki changes' do
end
end
- context "when there are no spaces or hyphens in the page name" do
+ context "when there are no spaces or hyphens in the page name", :js do
let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a/b/c/d', content: page_content) }
it_behaves_like 'rewrites relative links'
end
- context "when there are spaces in the page name" do
+ context "when there are spaces in the page name", :js do
let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a page/b page/c page/d page', content: page_content) }
it_behaves_like 'rewrites relative links'
end
- context "when there are hyphens in the page name" do
+ context "when there are hyphens in the page name", :js do
let(:wiki_page) { build(:wiki_page, wiki: wiki, title: 'a-page/b-page/c-page/d-page', content: page_content) }
it_behaves_like 'rewrites relative links'
diff --git a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
index 1e325535e81..d185e9dd81c 100644
--- a/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_updates_wiki_page_shared_examples.rb
@@ -11,7 +11,7 @@ RSpec.shared_examples 'User updates wiki page' do
sign_in(user)
end
- context 'when wiki is empty' do
+ context 'when wiki is empty', :js do
before do |example|
visit(wiki_path(wiki))
@@ -57,7 +57,7 @@ RSpec.shared_examples 'User updates wiki page' do
it_behaves_like 'wiki file attachments'
end
- context 'when wiki is not empty' do
+ context 'when wiki is not empty', :js do
let!(:wiki_page) { create(:wiki_page, wiki: wiki, title: 'home', content: 'Home page') }
before do
@@ -147,7 +147,7 @@ RSpec.shared_examples 'User updates wiki page' do
it_behaves_like 'wiki file attachments'
end
- context 'when the page is in a subdir' do
+ context 'when the page is in a subdir', :js do
let(:page_name) { 'page_name' }
let(:page_dir) { "foo/bar/#{page_name}" }
let!(:wiki_page) { create(:wiki_page, wiki: wiki, title: page_dir, content: 'Home page') }
diff --git a/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
index 14180d503df..3514ce286d6 100644
--- a/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
+++ b/spec/support/shared_examples/features/wiki/user_views_wiki_empty_shared_examples.rb
@@ -20,11 +20,11 @@ RSpec.shared_examples 'User views empty wiki' do
end
end
- shared_examples 'empty wiki message' do |writable: false, issuable: false, confluence: false|
+ shared_examples 'empty wiki message' do |writable: false, issuable: false, confluence: false, expect_button: true|
# This mirrors the logic in:
# - app/views/shared/empty_states/_wikis.html.haml
# - WikiHelper#wiki_empty_state_messages
- it 'shows the empty state message with the expected elements' do
+ it 'shows the empty state message with the expected elements', :js do
visit wiki_path(wiki)
if writable
@@ -37,7 +37,7 @@ RSpec.shared_examples 'User views empty wiki' do
if issuable && !writable
expect(element).to have_content("improve the wiki for this #{container_name}")
expect(element).to have_link("issue tracker", href: project_issues_path(project))
- expect(element).to have_link("Suggest wiki improvement", href: new_project_issue_path(project))
+ expect(element.has_link?("Suggest wiki improvement", href: new_project_issue_path(project))).to be(expect_button)
else
expect(element).not_to have_content("improve the wiki for this #{container_name}")
expect(element).not_to have_link("issue tracker")
diff --git a/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb b/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
index bb4270d7db6..fc795012ce7 100644
--- a/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/can_mutate_spammable_examples.rb
@@ -21,13 +21,13 @@ RSpec.shared_examples 'a mutation which can mutate a spammable' do
end
end
- describe "#with_spam_action_response_fields" do
+ describe "#spam_action_response_fields" do
it 'resolves with spam action fields' do
subject
# NOTE: We do not need to assert on the specific values of spam action fields here, we only need
- # to verify that #with_spam_action_response_fields was invoked and that the fields are present in the
- # response. The specific behavior of #with_spam_action_response_fields is covered in the
+ # to verify that #spam_action_response_fields was invoked and that the fields are present in the
+ # response. The specific behavior of #spam_action_response_fields is covered in the
# HasSpamActionResponseFields unit tests.
expect(mutation_response.keys)
.to include('spam', 'spamLogId', 'needsCaptchaResponse', 'captchaSiteKey')
diff --git a/spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb b/spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb
index cfa12171b7e..022e2308517 100644
--- a/spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/mutations/set_assignees_shared_examples.rb
@@ -10,22 +10,40 @@ RSpec.shared_examples 'an assignable resource' do
describe '#resolve' do
let_it_be(:assignee) { create(:user) }
let_it_be(:assignee2) { create(:user) }
+
let(:assignee_usernames) { [assignee.username] }
let(:mutated_resource) { subject[resource.class.name.underscore.to_sym] }
+ let(:mode) { described_class.arguments['operationMode'].default_value }
- subject { mutation.resolve(project_path: resource.project.full_path, iid: resource.iid, assignee_usernames: assignee_usernames) }
-
- before do
- resource.project.add_developer(assignee)
- resource.project.add_developer(assignee2)
+ subject do
+ mutation.resolve(project_path: resource.project.full_path,
+ iid: resource.iid,
+ operation_mode: mode,
+ assignee_usernames: assignee_usernames)
end
it 'raises an error if the resource is not accessible to the user' do
expect { subject }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
end
+ it 'does not change assignees if the resource is not accessible to the assignees' do
+ resource.project.add_developer(user)
+
+ expect { subject }.not_to change { resource.reload.assignee_ids }
+ end
+
+ it 'returns an operational error if the resource is not accessible to the assignees' do
+ resource.project.add_developer(user)
+
+ result = subject
+
+ expect(result[:errors]).to include a_string_matching(/Cannot assign/)
+ end
+
context 'when the user can update the resource' do
before do
+ resource.project.add_developer(assignee)
+ resource.project.add_developer(assignee2)
resource.project.add_developer(user)
end
diff --git a/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb b/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
index f78ea364147..eaeb5faee3b 100644
--- a/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/sorted_paginated_query_shared_examples.rb
@@ -44,7 +44,7 @@
# end
# end
#
-RSpec.shared_examples 'sorted paginated query' do
+RSpec.shared_examples 'sorted paginated query' do |conditions = {}|
# Provided as a convenience when constructing queries using string concatenation
let(:page_info) { 'pageInfo { startCursor endCursor }' }
# Convenience for using default implementation of pagination_results_data
@@ -123,6 +123,16 @@ RSpec.shared_examples 'sorted paginated query' do
expect(results).to eq first_page
end
end
+
+ context 'when last and sort params are present', if: conditions[:is_reversible] do
+ let(:params) { sort_argument.merge(last: 1) }
+
+ it 'fetches last elements without error' do
+ post_graphql(pagination_query(params), current_user: current_user)
+
+ expect(results.first).to eq(expected_results.last)
+ end
+ end
end
end
end
diff --git a/spec/support/shared_examples/graphql/spam_protection_shared_examples.rb b/spec/support/shared_examples/graphql/spam_protection_shared_examples.rb
new file mode 100644
index 00000000000..8fb89a4f80e
--- /dev/null
+++ b/spec/support/shared_examples/graphql/spam_protection_shared_examples.rb
@@ -0,0 +1,85 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'has spam protection' do
+ include AfterNextHelpers
+
+ describe '#check_spam_action_response!' do
+ let(:variables) { nil }
+ let(:headers) { {} }
+ let(:spam_log_id) { 123 }
+ let(:captcha_site_key) { 'abc123' }
+
+ def send_request
+ post_graphql_mutation(mutation, current_user: current_user)
+ end
+
+ before do
+ allow_next(mutation_class).to receive(:spam_action_response_fields).and_return(
+ spam: spam,
+ needs_captcha_response: render_captcha,
+ spam_log_id: spam_log_id,
+ captcha_site_key: captcha_site_key
+ )
+ end
+
+ context 'when the object is spam (DISALLOW)' do
+ shared_examples 'disallow response' do
+ it 'informs the client that the request was denied as spam' do
+ send_request
+
+ expect(graphql_errors)
+ .to contain_exactly a_hash_including('message' => ::Mutations::SpamProtection::SPAM_DISALLOWED_MESSAGE)
+ expect(graphql_errors)
+ .to contain_exactly a_hash_including('extensions' => { "spam" => true })
+ end
+ end
+
+ let(:spam) { true }
+
+ context 'and no CAPTCHA is available' do
+ let(:render_captcha) { false }
+
+ it_behaves_like 'disallow response'
+ end
+
+ context 'and a CAPTCHA is required' do
+ let(:render_captcha) { true }
+
+ it_behaves_like 'disallow response'
+ end
+ end
+
+ context 'when the object is not spam (CONDITIONAL ALLOW)' do
+ let(:spam) { false }
+
+ context 'and no CAPTCHA is required' do
+ let(:render_captcha) { false }
+
+ it 'does not return a to-level error' do
+ send_request
+
+ expect(graphql_errors).to be_blank
+ end
+ end
+
+ context 'and a CAPTCHA is required' do
+ let(:render_captcha) { true }
+
+ it 'informs the client that the request may be retried after solving the CAPTCHA' do
+ send_request
+
+ expect(graphql_errors)
+ .to contain_exactly a_hash_including('message' => ::Mutations::SpamProtection::NEEDS_CAPTCHA_RESPONSE_MESSAGE)
+ expect(graphql_errors)
+ .to contain_exactly a_hash_including('extensions' => {
+ "captcha_site_key" => captcha_site_key,
+ "needs_captcha_response" => true,
+ "spam_log_id" => spam_log_id
+ })
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
index bc091a678e2..efb2c466f70 100644
--- a/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/types/gitlab_style_deprecations_shared_examples.rb
@@ -13,18 +13,18 @@ RSpec.shared_examples 'Gitlab-style deprecations' do
it 'raises an error if a required property is missing', :aggregate_failures do
expect { subject(deprecated: { milestone: '1.10' }) }.to raise_error(
ArgumentError,
- 'Please provide a `reason` within `deprecated`'
+ include("Reason can't be blank")
)
expect { subject(deprecated: { reason: 'Deprecation reason' }) }.to raise_error(
ArgumentError,
- 'Please provide a `milestone` within `deprecated`'
+ include("Milestone can't be blank")
)
end
it 'raises an error if milestone is not a String', :aggregate_failures do
expect { subject(deprecated: { milestone: 1.10, reason: 'Deprecation reason' }) }.to raise_error(
ArgumentError,
- '`milestone` must be a `String`'
+ include("Milestone must be a string")
)
end
end
@@ -49,4 +49,22 @@ RSpec.shared_examples 'Gitlab-style deprecations' do
expect(deprecable.description).to be_nil
end
+
+ it 'adds information about the replacement if provided' do
+ deprecable = subject(deprecated: { milestone: '1.10', reason: :renamed, replacement: 'Foo.bar' })
+
+ expect(deprecable.deprecation_reason).to include 'Please use `Foo.bar`'
+ end
+
+ it 'supports named reasons: renamed' do
+ deprecable = subject(deprecated: { milestone: '1.10', reason: :renamed })
+
+ expect(deprecable.deprecation_reason).to include 'This was renamed.'
+ end
+
+ it 'supports named reasons: discouraged' do
+ deprecable = subject(deprecated: { milestone: '1.10', reason: :discouraged })
+
+ expect(deprecable.deprecation_reason).to include 'Use of this is not recommended.'
+ end
end
diff --git a/spec/support/shared_examples/helpers/groups_shared_examples.rb b/spec/support/shared_examples/helpers/groups_shared_examples.rb
new file mode 100644
index 00000000000..9c74d25b31f
--- /dev/null
+++ b/spec/support/shared_examples/helpers/groups_shared_examples.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+# This shared_example requires the following variables:
+# - current_user
+# - group
+# - type, the issuable type (ie :issues, :merge_requests)
+# - count_service, the Service used by the specified issuable type
+
+RSpec.shared_examples 'cached issuables count' do
+ subject { helper.cached_issuables_count(group, type: type) }
+
+ before do
+ allow(helper).to receive(:current_user) { current_user }
+ allow(count_service).to receive(:new).and_call_original
+ end
+
+ it 'calls the correct service class' do
+ subject
+ expect(count_service).to have_received(:new).with(group, current_user)
+ end
+
+ it 'returns all digits for count value under 1000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(999)
+ end
+
+ expect(subject).to eq('999')
+ end
+
+ it 'returns truncated digits for count value over 1000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(2300)
+ end
+
+ expect(subject).to eq('2.3k')
+ end
+
+ it 'returns truncated digits for count value over 10000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(12560)
+ end
+
+ expect(subject).to eq('12.6k')
+ end
+
+ it 'returns truncated digits for count value over 100000' do
+ allow_next_instance_of(count_service) do |service|
+ allow(service).to receive(:count).and_return(112560)
+ end
+
+ expect(subject).to eq('112.6k')
+ end
+end
diff --git a/spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb b/spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb
index bdb0316bf5a..d5ebda28f0a 100644
--- a/spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb
+++ b/spec/support/shared_examples/lib/api/ci/runner_shared_examples.rb
@@ -8,7 +8,7 @@ RSpec.shared_examples 'API::CI::Runner application context metadata' do |api_rou
send_request
- Labkit::Context.with_context do |context|
+ Gitlab::ApplicationContext.with_raw_context do |context|
expected_context = {
'meta.caller_id' => api_route,
'meta.user' => job.user.username,
diff --git a/spec/support/shared_examples/lib/api/internal_base_shared_examples.rb b/spec/support/shared_examples/lib/api/internal_base_shared_examples.rb
index dfa1388e0bb..ef08537dfe9 100644
--- a/spec/support/shared_examples/lib/api/internal_base_shared_examples.rb
+++ b/spec/support/shared_examples/lib/api/internal_base_shared_examples.rb
@@ -1,17 +1,6 @@
# frozen_string_literal: true
RSpec.shared_examples 'actor key validations' do
- context 'key id is not provided' do
- let(:key_id) { nil }
-
- it 'returns an error message' do
- subject
-
- expect(json_response['success']).to be_falsey
- expect(json_response['message']).to eq('Could not find a user without a key')
- end
- end
-
context 'key does not exist' do
let(:key_id) { non_existing_record_id }
diff --git a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
index 0df1af3b10a..9c95d1ff9d9 100644
--- a/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/ci/ci_trace_shared_examples.rb
@@ -843,6 +843,17 @@ RSpec.shared_examples 'trace with enabled live trace feature' do
expect { subject }.to raise_error(Gitlab::Ci::Trace::AlreadyArchivedError)
expect(build.job_artifacts_trace.file.exists?).to be_truthy
end
+
+ context 'when live trace chunks still exist' do
+ before do
+ create(:ci_build_trace_chunk, build: build)
+ end
+
+ it 'removes the traces' do
+ expect { subject }.to raise_error(Gitlab::Ci::Trace::AlreadyArchivedError)
+ expect(build.trace_chunks).to be_empty
+ end
+ end
end
context 'when job is not finished yet' do
diff --git a/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
new file mode 100644
index 00000000000..88e6ffd15a8
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/database/cte_materialized_shared_examples.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'CTE with MATERIALIZED keyword examples' do
+ describe 'adding MATERIALIZE to the CTE' do
+ let(:options) { {} }
+
+ before do
+ # Clear the cached value before the test
+ Gitlab::Database::AsWithMaterialized.clear_memoization(:materialized_supported)
+ end
+
+ context 'when PG version is <12' do
+ it 'does not add MATERIALIZE keyword' do
+ allow(Gitlab::Database).to receive(:version).and_return('11.1')
+
+ expect(query).to include(expected_query_block_without_materialized)
+ end
+ end
+
+ context 'when PG version is >=12' do
+ it 'adds MATERIALIZE keyword' do
+ allow(Gitlab::Database).to receive(:version).and_return('12.1')
+
+ expect(query).to include(expected_query_block_with_materialized)
+ end
+
+ context 'when version is higher than 12' do
+ it 'adds MATERIALIZE keyword' do
+ allow(Gitlab::Database).to receive(:version).and_return('15.1')
+
+ expect(query).to include(expected_query_block_with_materialized)
+ end
+ end
+
+ context 'when materialized is disabled' do
+ let(:options) { { materialized: false } }
+
+ it 'does not add MATERIALIZE keyword' do
+ expect(query).to include(expected_query_block_without_materialized)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/sidekiq_middleware/metrics_middleware_with_worker_attribution_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/sidekiq_middleware/metrics_middleware_with_worker_attribution_shared_examples.rb
new file mode 100644
index 00000000000..48dc47e8e9b
--- /dev/null
+++ b/spec/support/shared_examples/lib/gitlab/sidekiq_middleware/metrics_middleware_with_worker_attribution_shared_examples.rb
@@ -0,0 +1,132 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'metrics middleware with worker attribution' do
+ subject { described_class.new }
+
+ let(:queue) { :test }
+ let(:worker_class) { worker.class }
+ let(:job) { {} }
+ let(:default_labels) do
+ { queue: queue.to_s,
+ worker: worker_class.to_s,
+ boundary: "",
+ external_dependencies: "no",
+ feature_category: "",
+ urgency: "low" }
+ end
+
+ context "when workers are not attributed" do
+ before do
+ stub_const('TestNonAttributedWorker', Class.new)
+ TestNonAttributedWorker.class_eval do
+ include Sidekiq::Worker
+ end
+ end
+
+ it_behaves_like "a metrics middleware" do
+ let(:worker) { TestNonAttributedWorker.new }
+ let(:labels) { default_labels.merge(urgency: "") }
+ end
+ end
+
+ context "when a worker is wrapped into ActiveJob" do
+ before do
+ stub_const('TestWrappedWorker', Class.new)
+ TestWrappedWorker.class_eval do
+ include Sidekiq::Worker
+ end
+ end
+
+ it_behaves_like "a metrics middleware" do
+ let(:job) do
+ {
+ "class" => ActiveJob::QueueAdapters::SidekiqAdapter::JobWrapper,
+ "wrapped" => TestWrappedWorker
+ }
+ end
+
+ let(:worker) { TestWrappedWorker.new }
+ let(:labels) { default_labels.merge(urgency: "") }
+ end
+ end
+
+ context "when workers are attributed" do
+ def create_attributed_worker_class(urgency, external_dependencies, resource_boundary, category)
+ klass = Class.new do
+ include Sidekiq::Worker
+ include WorkerAttributes
+
+ urgency urgency if urgency
+ worker_has_external_dependencies! if external_dependencies
+ worker_resource_boundary resource_boundary unless resource_boundary == :unknown
+ feature_category category unless category.nil?
+ end
+ stub_const("TestAttributedWorker", klass)
+ end
+
+ let(:urgency) { nil }
+ let(:external_dependencies) { false }
+ let(:resource_boundary) { :unknown }
+ let(:feature_category) { nil }
+ let(:worker_class) { create_attributed_worker_class(urgency, external_dependencies, resource_boundary, feature_category) }
+ let(:worker) { worker_class.new }
+
+ context "high urgency" do
+ it_behaves_like "a metrics middleware" do
+ let(:urgency) { :high }
+ let(:labels) { default_labels.merge(urgency: "high") }
+ end
+ end
+
+ context "no urgency" do
+ it_behaves_like "a metrics middleware" do
+ let(:urgency) { :throttled }
+ let(:labels) { default_labels.merge(urgency: "throttled") }
+ end
+ end
+
+ context "external dependencies" do
+ it_behaves_like "a metrics middleware" do
+ let(:external_dependencies) { true }
+ let(:labels) { default_labels.merge(external_dependencies: "yes") }
+ end
+ end
+
+ context "cpu boundary" do
+ it_behaves_like "a metrics middleware" do
+ let(:resource_boundary) { :cpu }
+ let(:labels) { default_labels.merge(boundary: "cpu") }
+ end
+ end
+
+ context "memory boundary" do
+ it_behaves_like "a metrics middleware" do
+ let(:resource_boundary) { :memory }
+ let(:labels) { default_labels.merge(boundary: "memory") }
+ end
+ end
+
+ context "feature category" do
+ it_behaves_like "a metrics middleware" do
+ let(:feature_category) { :authentication }
+ let(:labels) { default_labels.merge(feature_category: "authentication") }
+ end
+ end
+
+ context "combined" do
+ it_behaves_like "a metrics middleware" do
+ let(:urgency) { :high }
+ let(:external_dependencies) { true }
+ let(:resource_boundary) { :cpu }
+ let(:feature_category) { :authentication }
+ let(:labels) do
+ default_labels.merge(
+ urgency: "high",
+ external_dependencies: "yes",
+ boundary: "cpu",
+ feature_category: "authentication")
+ end
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
index 73beef06855..aa6a51c3646 100644
--- a/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/sql/set_operator_shared_examples.rb
@@ -43,4 +43,33 @@ RSpec.shared_examples 'SQL set operator' do |operator_keyword|
expect(set_operator.to_sql).to eq('NULL')
end
end
+
+ describe 'remove_order parameter' do
+ let(:scopes) do
+ [
+ User.where(id: 1).order(id: :desc).limit(1),
+ User.where(id: 2).order(id: :asc).limit(1)
+ ]
+ end
+
+ subject(:union_query) { described_class.new(scopes, remove_order: remove_order).to_sql }
+
+ context 'when remove_order: true' do
+ let(:remove_order) { true }
+
+ it 'removes the ORDER BY from the query' do
+ expect(union_query).not_to include('ORDER BY "users"."id" DESC')
+ expect(union_query).not_to include('ORDER BY "users"."id" ASC')
+ end
+ end
+
+ context 'when remove_order: false' do
+ let(:remove_order) { false }
+
+ it 'does not remove the ORDER BY from the query' do
+ expect(union_query).to include('ORDER BY "users"."id" DESC')
+ expect(union_query).to include('ORDER BY "users"."id" ASC')
+ end
+ end
+ end
end
diff --git a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
index aa6e64a3820..4b956c2b566 100644
--- a/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
+++ b/spec/support/shared_examples/lib/gitlab/usage_data_counters/issuable_activity_shared_examples.rb
@@ -14,10 +14,6 @@ RSpec.shared_examples 'a daily tracked issuable event' do
expect(track_action(author: user1)).to be_truthy
expect(track_action(author: user1)).to be_truthy
expect(track_action(author: user2)).to be_truthy
- expect(track_action(author: user3, time: time - 3.days)).to be_truthy
-
- expect(count_unique(date_from: time, date_to: time)).to eq(2)
- expect(count_unique(date_from: time - 5.days, date_to: 1.day.since(time))).to eq(3)
end
end
diff --git a/spec/support/shared_examples/mailers/notify_shared_examples.rb b/spec/support/shared_examples/mailers/notify_shared_examples.rb
index 0143bf693c7..b10ebb4d2a3 100644
--- a/spec/support/shared_examples/mailers/notify_shared_examples.rb
+++ b/spec/support/shared_examples/mailers/notify_shared_examples.rb
@@ -225,7 +225,7 @@ RSpec.shared_examples 'a note email' do
sender = subject.header[:from].addrs[0]
aggregate_failures do
- expect(sender.display_name).to eq(note_author.name)
+ expect(sender.display_name).to eq("#{note_author.name} (@#{note_author.username})")
expect(sender.address).to eq(gitlab_sender)
expect(subject).to deliver_to(recipient.notification_email)
end
diff --git a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
index 7bf2456c548..1b110ab02b5 100644
--- a/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
+++ b/spec/support/shared_examples/metrics/active_record_subscriber_shared_examples.rb
@@ -16,7 +16,9 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
db_primary_duration_s: record_query ? 0.002 : 0,
db_replica_cached_count: 0,
db_replica_count: 0,
- db_replica_duration_s: 0.0
+ db_replica_duration_s: 0.0,
+ db_primary_wal_count: record_wal_query ? 1 : 0,
+ db_replica_wal_count: 0
)
elsif db_role == :replica
expect(described_class.db_counter_payload).to eq(
@@ -28,7 +30,9 @@ RSpec.shared_examples 'store ActiveRecord info in RequestStore' do |db_role|
db_primary_duration_s: 0.0,
db_replica_cached_count: record_cached_query ? 1 : 0,
db_replica_count: record_query ? 1 : 0,
- db_replica_duration_s: record_query ? 0.002 : 0
+ db_replica_duration_s: record_query ? 0.002 : 0,
+ db_replica_wal_count: record_wal_query ? 1 : 0,
+ db_primary_wal_count: 0
)
else
expect(described_class.db_counter_payload).to eq(
@@ -66,6 +70,12 @@ RSpec.shared_examples 'record ActiveRecord metrics in a metrics transaction' do
expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_cached_count_total".to_sym, 1) if db_role
end
+ if record_wal_query
+ expect(transaction).to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1) if db_role
+ else
+ expect(transaction).not_to receive(:increment).with("gitlab_transaction_db_#{db_role}_wal_count_total".to_sym, 1) if db_role
+ end
+
subscriber.sql(event)
end
diff --git a/spec/support/shared_examples/models/boards/listable_shared_examples.rb b/spec/support/shared_examples/models/boards/listable_shared_examples.rb
index e733a5488fb..250a4c1b1bd 100644
--- a/spec/support/shared_examples/models/boards/listable_shared_examples.rb
+++ b/spec/support/shared_examples/models/boards/listable_shared_examples.rb
@@ -16,18 +16,23 @@ RSpec.shared_examples 'boards listable model' do |list_factory|
end
describe 'scopes' do
+ let_it_be(:list1) { create(list_factory, list_type: :backlog) }
+ let_it_be(:list2) { create(list_factory, list_type: :closed) }
+ let_it_be(:list3) { create(list_factory, position: 1) }
+ let_it_be(:list4) { create(list_factory, position: 2) }
+
describe '.ordered' do
it 'returns lists ordered by type and position' do
- # rubocop:disable Rails/SaveBang
- lists = [
- create(list_factory, list_type: :backlog),
- create(list_factory, list_type: :closed),
- create(list_factory, position: 1),
- create(list_factory, position: 2)
- ]
- # rubocop:enable Rails/SaveBang
-
- expect(described_class.where(id: lists).ordered).to eq([lists[0], lists[2], lists[3], lists[1]])
+ expect(described_class.where(id: [list1, list2, list3, list4]).ordered)
+ .to eq([list1, list3, list4, list2])
+ end
+ end
+
+ describe '.without_types' do
+ it 'excludes lists of given types' do
+ lists = described_class.without_types([:label, :closed])
+
+ expect(lists).to match_array([list1])
end
end
end
diff --git a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
index 7603787a54e..d3f3e15d299 100644
--- a/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_status_shared_examples.rb
@@ -138,7 +138,7 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
it 'is installed' do
subject.make_externally_installed
- expect(subject).to be_installed
+ expect(subject).to be_externally_installed
end
context 'helm record does not exist' do
@@ -170,7 +170,7 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
it 'is installed' do
subject.make_externally_installed
- expect(subject).to be_installed
+ expect(subject).to be_externally_installed
end
end
@@ -180,7 +180,7 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
it 'is installed' do
subject.make_externally_installed
- expect(subject).to be_installed
+ expect(subject).to be_externally_installed
end
it 'clears #status_reason' do
@@ -317,6 +317,7 @@ RSpec.shared_examples 'cluster application status specs' do |application_name|
:uninstall_errored | false
:uninstalled | false
:timed_out | false
+ :externally_installed | true
end
with_them do
diff --git a/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb b/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
index ed2e4fee2de..3acc43eb0da 100644
--- a/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
+++ b/spec/support/shared_examples/models/cluster_application_version_shared_examples.rb
@@ -47,4 +47,15 @@ RSpec.shared_examples 'cluster application version specs' do |application_name|
end
end
end
+
+ describe '#make_externally_installed' do
+ subject { build(application_name) }
+
+ it 'sets to a special version' do
+ subject.make_externally_installed!
+
+ expect(subject).to be_persisted
+ expect(subject.version).to eq('EXTERNALLY_INSTALLED')
+ end
+ end
end
diff --git a/spec/support/shared_examples/models/clusters/prometheus_client_shared.rb b/spec/support/shared_examples/models/clusters/prometheus_client_shared.rb
new file mode 100644
index 00000000000..8d6dcfef925
--- /dev/null
+++ b/spec/support/shared_examples/models/clusters/prometheus_client_shared.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+# Input
+# - factory: [:clusters_applications_prometheus, :clusters_integrations_prometheus]
+RSpec.shared_examples '#prometheus_client shared' do
+ shared_examples 'exception caught for prometheus client' do
+ before do
+ allow(kube_client).to receive(:proxy_url).and_raise(exception)
+ end
+
+ it 'returns nil' do
+ expect(subject.prometheus_client).to be_nil
+ end
+ end
+
+ context 'cluster is nil' do
+ it 'returns nil' do
+ expect(subject.cluster).to be_nil
+ expect(subject.prometheus_client).to be_nil
+ end
+ end
+
+ context "cluster doesn't have kubeclient" do
+ let(:cluster) { create(:cluster) }
+
+ subject { create(factory, cluster: cluster) }
+
+ it 'returns nil' do
+ expect(subject.prometheus_client).to be_nil
+ end
+ end
+
+ context 'cluster has kubeclient' do
+ let(:cluster) { create(:cluster, :project, :provided_by_gcp) }
+ let(:kubernetes_url) { subject.cluster.platform_kubernetes.api_url }
+ let(:kube_client) { subject.cluster.kubeclient.core_client }
+
+ subject { create(factory, cluster: cluster) }
+
+ before do
+ subject.cluster.platform_kubernetes.namespace = 'a-namespace'
+ stub_kubeclient_discover(cluster.platform_kubernetes.api_url)
+
+ create(:cluster_kubernetes_namespace,
+ cluster: cluster,
+ cluster_project: cluster.cluster_project,
+ project: cluster.cluster_project.project)
+ end
+
+ it 'creates proxy prometheus_client' do
+ expect(subject.prometheus_client).to be_instance_of(Gitlab::PrometheusClient)
+ end
+
+ it 'merges proxy_url, options and headers from kube client with prometheus_client options' do
+ expect(Gitlab::PrometheusClient)
+ .to(receive(:new))
+ .with(a_valid_url, kube_client.rest_client.options.merge({
+ headers: kube_client.headers,
+ timeout: PrometheusAdapter::DEFAULT_PROMETHEUS_REQUEST_TIMEOUT_SEC
+ }))
+ subject.prometheus_client
+ end
+
+ context 'when cluster is not reachable' do
+ it_behaves_like 'exception caught for prometheus client' do
+ let(:exception) { Kubeclient::HttpError.new(401, 'Unauthorized', nil) }
+ end
+ end
+
+ context 'when there is a socket error while contacting cluster' do
+ it_behaves_like 'exception caught for prometheus client' do
+ let(:exception) { Errno::ECONNREFUSED }
+ end
+
+ it_behaves_like 'exception caught for prometheus client' do
+ let(:exception) { Errno::ECONNRESET }
+ end
+ end
+
+ context 'when the network is unreachable' do
+ it_behaves_like 'exception caught for prometheus client' do
+ let(:exception) { Errno::ENETUNREACH }
+ end
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
index b73ff516670..fbb94b4f5c1 100644
--- a/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/architecture_shared_examples.rb
@@ -34,7 +34,7 @@ RSpec.shared_examples 'Debian Distribution Architecture' do |factory, container,
subject { described_class.with_distribution(architecture.distribution) }
it 'does not return other distributions' do
- expect(subject.to_a).to eq([architecture, architecture_same_distribution])
+ expect(subject.to_a).to match_array([architecture, architecture_same_distribution])
end
end
@@ -42,7 +42,7 @@ RSpec.shared_examples 'Debian Distribution Architecture' do |factory, container,
subject { described_class.with_name(architecture.name) }
it 'does not return other distributions' do
- expect(subject.to_a).to eq([architecture, architecture_same_name])
+ expect(subject.to_a).to match_array([architecture, architecture_same_name])
end
end
end
diff --git a/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
index bf6fc23116c..23e76d32fb0 100644
--- a/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/component_shared_examples.rb
@@ -36,7 +36,7 @@ RSpec.shared_examples 'Debian Distribution Component' do |factory, container, ca
subject { described_class.with_distribution(component.distribution) }
it 'does not return other distributions' do
- expect(subject.to_a).to eq([component, component_same_distribution])
+ expect(subject.to_a).to match_array([component, component_same_distribution])
end
end
@@ -44,7 +44,7 @@ RSpec.shared_examples 'Debian Distribution Component' do |factory, container, ca
subject { described_class.with_name(component.name) }
it 'does not return other distributions' do
- expect(subject.to_a).to eq([component, component_same_name])
+ expect(subject.to_a).to match_array([component, component_same_name])
end
end
end
diff --git a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
index b4ec146df14..9eacacf725f 100644
--- a/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
+++ b/spec/support/shared_examples/models/packages/debian/distribution_shared_examples.rb
@@ -179,7 +179,7 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
subject { described_class.with_codename_or_suite(distribution_with_suite.codename) }
it 'does not return other distributions' do
- expect(subject.to_a).to eq([distribution_with_suite, distribution_with_same_codename, distribution_with_codename_and_suite_flipped])
+ expect(subject.to_a).to contain_exactly(distribution_with_suite, distribution_with_same_codename, distribution_with_codename_and_suite_flipped)
end
end
@@ -187,7 +187,7 @@ RSpec.shared_examples 'Debian Distribution' do |factory, container, can_freeze|
subject { described_class.with_codename_or_suite(distribution_with_suite.suite) }
it 'does not return other distributions' do
- expect(subject.to_a).to eq([distribution_with_suite, distribution_with_same_suite, distribution_with_codename_and_suite_flipped])
+ expect(subject.to_a).to contain_exactly(distribution_with_suite, distribution_with_same_suite, distribution_with_codename_and_suite_flipped)
end
end
end
diff --git a/spec/support/shared_examples/models/wiki_shared_examples.rb b/spec/support/shared_examples/models/wiki_shared_examples.rb
index abc6e3ecce8..6b243aef3e6 100644
--- a/spec/support/shared_examples/models/wiki_shared_examples.rb
+++ b/spec/support/shared_examples/models/wiki_shared_examples.rb
@@ -354,33 +354,29 @@ RSpec.shared_examples 'wiki model' do
subject.repository.create_file(user, 'image.png', image, branch_name: subject.default_branch, message: 'add image')
end
- shared_examples 'find_file results' do
- it 'returns the latest version of the file if it exists' do
- file = subject.find_file('image.png')
+ it 'returns the latest version of the file if it exists' do
+ file = subject.find_file('image.png')
- expect(file.mime_type).to eq('image/png')
- end
+ expect(file.mime_type).to eq('image/png')
+ end
- it 'returns nil if the page does not exist' do
- expect(subject.find_file('non-existent')).to eq(nil)
- end
+ it 'returns nil if the page does not exist' do
+ expect(subject.find_file('non-existent')).to eq(nil)
+ end
- it 'returns a Gitlab::Git::WikiFile instance' do
- file = subject.find_file('image.png')
+ it 'returns a Gitlab::Git::WikiFile instance' do
+ file = subject.find_file('image.png')
- expect(file).to be_a Gitlab::Git::WikiFile
- end
+ expect(file).to be_a Gitlab::Git::WikiFile
+ end
- it 'returns the whole file' do
- file = subject.find_file('image.png')
- image.rewind
+ it 'returns the whole file' do
+ file = subject.find_file('image.png')
+ image.rewind
- expect(file.raw_data.b).to eq(image.read.b)
- end
+ expect(file.raw_data.b).to eq(image.read.b)
end
- it_behaves_like 'find_file results'
-
context 'when load_content is disabled' do
it 'includes the file data in the Gitlab::Git::WikiFile' do
file = subject.find_file('image.png', load_content: false)
@@ -388,14 +384,6 @@ RSpec.shared_examples 'wiki model' do
expect(file.raw_data).to be_empty
end
end
-
- context 'when feature flag :gitaly_find_file is disabled' do
- before do
- stub_feature_flags(gitaly_find_file: false)
- end
-
- it_behaves_like 'find_file results'
- end
end
describe '#create_page' do
@@ -481,28 +469,53 @@ RSpec.shared_examples 'wiki model' do
end
describe '#delete_page' do
- let(:page) { create(:wiki_page, wiki: wiki) }
+ shared_examples 'delete_page operations' do
+ let(:page) { create(:wiki_page, wiki: wiki) }
- it 'deletes the page' do
- subject.delete_page(page)
+ it 'deletes the page' do
+ subject.delete_page(page)
- expect(subject.list_pages.count).to eq(0)
- end
+ expect(subject.list_pages.count).to eq(0)
+ end
- it 'sets the correct commit email' do
- subject.delete_page(page)
+ it 'sets the correct commit email' do
+ subject.delete_page(page)
- expect(user.commit_email).not_to eq(user.email)
- expect(commit.author_email).to eq(user.commit_email)
- expect(commit.committer_email).to eq(user.commit_email)
+ expect(user.commit_email).not_to eq(user.email)
+ expect(commit.author_email).to eq(user.commit_email)
+ expect(commit.committer_email).to eq(user.commit_email)
+ end
+
+ it 'runs after_wiki_activity callbacks' do
+ page
+
+ expect(subject).to receive(:after_wiki_activity)
+
+ subject.delete_page(page)
+ end
end
- it 'runs after_wiki_activity callbacks' do
- page
+ it_behaves_like 'delete_page operations'
- expect(subject).to receive(:after_wiki_activity)
+ context 'when an error is raised' do
+ it 'logs the error and returns false' do
+ page = build(:wiki_page, wiki: wiki)
+ exception = Gitlab::Git::Index::IndexError.new('foo')
+
+ allow(subject.repository).to receive(:delete_file).and_raise(exception)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(exception, action: :deleted, wiki_id: wiki.id)
+
+ expect(subject.delete_page(page)).to be_falsey
+ end
+ end
+
+ context 'when feature flag :gitaly_replace_wiki_delete_page is disabled' do
+ before do
+ stub_feature_flags(gitaly_replace_wiki_delete_page: false)
+ end
- subject.delete_page(page)
+ it_behaves_like 'delete_page operations'
end
end
diff --git a/spec/support/shared_examples/namespaces/recursive_traversal_examples.rb b/spec/support/shared_examples/namespaces/namespace_traversal_examples.rb
index 2c94be61bc1..36e5808fa28 100644
--- a/spec/support/shared_examples/namespaces/recursive_traversal_examples.rb
+++ b/spec/support/shared_examples/namespaces/namespace_traversal_examples.rb
@@ -1,6 +1,22 @@
# frozen_string_literal: true
-RSpec.shared_examples 'recursive namespace traversal' do
+RSpec.shared_examples 'namespace traversal' do
+ shared_examples 'recursive version' do |method|
+ let(:recursive_method) { "recursive_#{method}" }
+
+ it "is equivalent to ##{method}" do
+ groups.each do |group|
+ expect(group.public_send(method)).to match_array group.public_send(recursive_method)
+ end
+ end
+
+ it "makes a recursive query" do
+ groups.each do |group|
+ expect { group.public_send(recursive_method).load }.to make_queries_matching(/WITH RECURSIVE/)
+ end
+ end
+ end
+
describe '#self_and_hierarchy' do
let!(:group) { create(:group, path: 'git_lab') }
let!(:nested_group) { create(:group, parent: group) }
@@ -14,6 +30,12 @@ RSpec.shared_examples 'recursive namespace traversal' do
expect(nested_group.self_and_hierarchy).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
expect(very_deep_nested_group.self_and_hierarchy).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
end
+
+ describe '#recursive_self_and_hierarchy' do
+ let(:groups) { [group, nested_group, very_deep_nested_group] }
+
+ it_behaves_like 'recursive version', :self_and_hierarchy
+ end
end
describe '#ancestors' do
@@ -28,6 +50,12 @@ RSpec.shared_examples 'recursive namespace traversal' do
expect(nested_group.ancestors).to include(group)
expect(group.ancestors).to eq([])
end
+
+ describe '#recursive_ancestors' do
+ let(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
+
+ it_behaves_like 'recursive version', :ancestors
+ end
end
describe '#self_and_ancestors' do
@@ -42,6 +70,12 @@ RSpec.shared_examples 'recursive namespace traversal' do
expect(nested_group.self_and_ancestors).to contain_exactly(group, nested_group)
expect(group.self_and_ancestors).to contain_exactly(group)
end
+
+ describe '#recursive_self_and_ancestors' do
+ let(:groups) { [nested_group, deep_nested_group, very_deep_nested_group] }
+
+ it_behaves_like 'recursive version', :self_and_ancestors
+ end
end
describe '#descendants' do
@@ -58,6 +92,12 @@ RSpec.shared_examples 'recursive namespace traversal' do
expect(nested_group.descendants.to_a).to include(deep_nested_group, very_deep_nested_group)
expect(group.descendants.to_a).to include(nested_group, deep_nested_group, very_deep_nested_group)
end
+
+ describe '#recursive_descendants' do
+ let(:groups) { [group, nested_group, deep_nested_group, very_deep_nested_group] }
+
+ it_behaves_like 'recursive version', :descendants
+ end
end
describe '#self_and_descendants' do
@@ -74,5 +114,11 @@ RSpec.shared_examples 'recursive namespace traversal' do
expect(nested_group.self_and_descendants).to contain_exactly(nested_group, deep_nested_group, very_deep_nested_group)
expect(group.self_and_descendants).to contain_exactly(group, nested_group, deep_nested_group, very_deep_nested_group)
end
+
+ describe '#recursive_self_and_descendants' do
+ let(:groups) { [group, nested_group, deep_nested_group, very_deep_nested_group] }
+
+ it_behaves_like 'recursive version', :self_and_descendants
+ end
end
end
diff --git a/spec/support/shared_examples/nav_sidebar_shared_examples.rb b/spec/support/shared_examples/nav_sidebar_shared_examples.rb
index e084a957785..3e500683712 100644
--- a/spec/support/shared_examples/nav_sidebar_shared_examples.rb
+++ b/spec/support/shared_examples/nav_sidebar_shared_examples.rb
@@ -24,3 +24,13 @@ RSpec.shared_examples 'page has active sub tab' do |title|
.to have_content(title)
end
end
+
+RSpec.shared_examples 'sidebar includes snowplow attributes' do |track_action, track_label, track_property|
+ specify do
+ allow(view).to receive(:tracking_enabled?).and_return(true)
+
+ render
+
+ expect(rendered).to have_css(".nav-sidebar[data-track-action=\"#{track_action}\"][data-track-label=\"#{track_label}\"][data-track-property=\"#{track_property}\"]")
+ end
+end
diff --git a/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb b/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb
index 7710e756e5b..337ad024fc0 100644
--- a/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb
+++ b/spec/support/shared_examples/policies/resource_access_token_shared_examples.rb
@@ -5,16 +5,70 @@ RSpec.shared_examples 'Self-managed Core resource access tokens' do
allow(::Gitlab).to receive(:com?).and_return(false)
end
- context 'with owner' do
+ context 'with owner access' do
let(:current_user) { owner }
- it { is_expected.to be_allowed(:admin_resource_access_tokens) }
+ context 'create resource access tokens' do
+ it { is_expected.to be_allowed(:create_resource_access_tokens) }
+
+ context 'when resource access token creation is not allowed' do
+ let(:group) { create(:group) }
+ let(:project) { create(:project, group: group) }
+
+ before do
+ group.namespace_settings.update_column(:resource_access_token_creation_allowed, false)
+ end
+
+ it { is_expected.not_to be_allowed(:create_resource_access_tokens) }
+ end
+
+ context 'when parent group has project access token creation disabled' do
+ let(:parent) { create(:group) }
+ let(:group) { create(:group, parent: parent) }
+ let(:project) { create(:project, group: group) }
+
+ before do
+ parent.namespace_settings.update_column(:resource_access_token_creation_allowed, false)
+ end
+
+ it { is_expected.not_to be_allowed(:create_resource_access_tokens) }
+ end
+
+ context 'with a personal namespace project' do
+ let(:namespace) { create(:namespace) }
+ let(:project) { create(:project, namespace: namespace) }
+
+ before do
+ project.add_maintainer(current_user)
+ end
+
+ it { is_expected.to be_allowed(:create_resource_access_tokens) }
+ end
+ end
+
+ context 'read resource access tokens' do
+ it { is_expected.to be_allowed(:read_resource_access_tokens) }
+ end
+
+ context 'destroy resource access tokens' do
+ it { is_expected.to be_allowed(:destroy_resource_access_tokens) }
+ end
end
- context 'with developer' do
+ context 'with developer access' do
let(:current_user) { developer }
- it { is_expected.not_to be_allowed(:admin_resource_access_tokens) }
+ context 'create resource access tokens' do
+ it { is_expected.not_to be_allowed(:create_resource_access_tokens) }
+ end
+
+ context 'read resource access tokens' do
+ it { is_expected.not_to be_allowed(:read_resource_access_tokens) }
+ end
+
+ context 'destroy resource access tokens' do
+ it { is_expected.not_to be_allowed(:destroy_resource_access_tokens) }
+ end
end
end
@@ -24,9 +78,19 @@ RSpec.shared_examples 'GitLab.com Core resource access tokens' do
stub_ee_application_setting(should_check_namespace_plan: true)
end
- context 'with owner' do
+ context 'with owner access' do
let(:current_user) { owner }
- it { is_expected.not_to be_allowed(:admin_resource_access_tokens) }
+ context 'create resource access tokens' do
+ it { is_expected.not_to be_allowed(:create_resource_access_tokens) }
+ end
+
+ context 'read resource access tokens' do
+ it { is_expected.not_to be_allowed(:read_resource_access_tokens) }
+ end
+
+ context 'destroy resource access tokens' do
+ it { is_expected.not_to be_allowed(:destroy_resource_access_tokens) }
+ end
end
end
diff --git a/spec/support/shared_examples/querying_shared_examples.rb b/spec/support/shared_examples/querying_shared_examples.rb
new file mode 100644
index 00000000000..1f554ddb441
--- /dev/null
+++ b/spec/support/shared_examples/querying_shared_examples.rb
@@ -0,0 +1,23 @@
+# frozen_string_literal: true
+
+def update_column_regex(column)
+ /UPDATE.+SET.+#{column}[^=*]=.+FROM.*/m
+end
+
+RSpec.shared_examples 'update on column' do |column|
+ it "#{column} column updated" do
+ qr = ActiveRecord::QueryRecorder.new do
+ subject
+ end
+ expect(qr.log).to include a_string_matching update_column_regex(column)
+ end
+end
+
+RSpec.shared_examples 'no update on column' do |column|
+ it "#{column} column is not updated" do
+ qr = ActiveRecord::QueryRecorder.new do
+ subject
+ end
+ expect(qr.log).not_to include a_string_matching update_column_regex(column)
+ end
+end
diff --git a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
index 4fde68efd60..ca6536444fd 100644
--- a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
@@ -23,7 +23,7 @@ RSpec.shared_examples 'close quick action' do |issuable_type|
it "creates the #{issuable_type} and interprets close quick action accordingly" do
fill_in "#{issuable_type}_title", with: 'bug 345'
fill_in "#{issuable_type}_description", with: "bug description\n/close"
- click_button "Submit #{issuable_type}".humanize
+ click_button "Create #{issuable_type}".humanize
issuable = project.public_send(issuable_type.to_s.pluralize).first
diff --git a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
index 54ea876bed2..87aaac673c1 100644
--- a/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/conan_packages_shared_examples.rb
@@ -205,6 +205,14 @@ RSpec.shared_examples 'empty recipe for not found package' do
'aa/bb/%{project}/ccc' % { project: ::Packages::Conan::Metadatum.package_username_from(full_path: project.full_path) }
end
+ let(:presenter) { double('::Packages::Conan::PackagePresenter') }
+
+ before do
+ allow(::Packages::Conan::PackagePresenter).to receive(:new)
+ .with(package, user, package.project, any_args)
+ .and_return(presenter)
+ end
+
it 'returns not found' do
allow(::Packages::Conan::PackagePresenter).to receive(:new)
.with(
@@ -248,8 +256,6 @@ RSpec.shared_examples 'recipe download_urls' do
'conanmanifest.txt' => "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/export/conanmanifest.txt"
}
- allow(presenter).to receive(:recipe_urls) { expected_response }
-
subject
expect(json_response).to eq(expected_response)
@@ -268,8 +274,6 @@ RSpec.shared_examples 'package download_urls' do
'conan_package.tgz' => "#{url_prefix}/packages/conan/v1/files/#{package.conan_recipe_path}/0/package/123456789/0/conan_package.tgz"
}
- allow(presenter).to receive(:package_urls) { expected_response }
-
subject
expect(json_response).to eq(expected_response)
@@ -309,13 +313,14 @@ RSpec.shared_examples 'recipe snapshot endpoint' do
context 'with existing package' do
it 'returns a hash of files with their md5 hashes' do
+ conan_file_file = package.package_files.find_by(file_name: 'conanfile.py')
+ conan_manifest_file = package.package_files.find_by(file_name: 'conanmanifest.txt')
+
expected_response = {
- 'conanfile.py' => 'md5hash1',
- 'conanmanifest.txt' => 'md5hash2'
+ 'conanfile.py' => conan_file_file.file_md5,
+ 'conanmanifest.txt' => conan_manifest_file.file_md5
}
- allow(presenter).to receive(:recipe_snapshot) { expected_response }
-
subject
expect(json_response).to eq(expected_response)
@@ -333,13 +338,11 @@ RSpec.shared_examples 'package snapshot endpoint' do
context 'with existing package' do
it 'returns a hash of md5 values for the files' do
expected_response = {
- 'conaninfo.txt' => "md5hash1",
- 'conanmanifest.txt' => "md5hash2",
- 'conan_package.tgz' => "md5hash3"
+ 'conaninfo.txt' => "12345abcde",
+ 'conanmanifest.txt' => "12345abcde",
+ 'conan_package.tgz' => "12345abcde"
}
- allow(presenter).to receive(:package_snapshot) { expected_response }
-
subject
expect(json_response).to eq(expected_response)
diff --git a/spec/support/shared_examples/requests/api/graphql/projects/alert_management/integrations_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/projects/alert_management/integrations_shared_examples.rb
new file mode 100644
index 00000000000..c134f7d1839
--- /dev/null
+++ b/spec/support/shared_examples/requests/api/graphql/projects/alert_management/integrations_shared_examples.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'GraphQL query with several integrations requested' do |graphql_query_name:|
+ context 'when several HTTP integrations requested' do
+ let(:params_ai) { { id: global_id_of(active_http_integration) } }
+ let(:params_ii) { { id: global_id_of(inactive_http_integration) } }
+ let(:fields) { "nodes { id name }" }
+
+ let(:single_selection_query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ <<~QUERY
+ ai: #{query_graphql_field(graphql_query_name, params_ai, fields)}
+ QUERY
+ )
+ end
+
+ let(:multi_selection_query) do
+ graphql_query_for(
+ 'project',
+ { 'fullPath' => project.full_path },
+ <<~QUERY
+ ai: #{query_graphql_field(graphql_query_name, params_ai, fields)}
+ ii: #{query_graphql_field(graphql_query_name, params_ii, fields)}
+ QUERY
+ )
+ end
+
+ it 'returns the correct properties of the integrations', :aggregate_failures do
+ post_graphql(multi_selection_query, current_user: current_user)
+
+ expect(graphql_data.dig('project', 'ai', 'nodes')).to include(
+ 'id' => global_id_of(active_http_integration),
+ 'name' => active_http_integration.name
+ )
+
+ expect(graphql_data.dig('project', 'ii', 'nodes')).to include(
+ 'id' => global_id_of(inactive_http_integration),
+ 'name' => inactive_http_integration.name
+ )
+ end
+
+ it 'batches queries' do
+ expect { post_graphql(multi_selection_query, current_user: current_user) }
+ .to issue_same_number_of_queries_as { post_graphql(single_selection_query, current_user: current_user) }.ignoring_cached_queries
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb b/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
index 4a71b696d57..cb06c9fa596 100644
--- a/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/logging_application_context_shared_examples.rb
@@ -1,21 +1,13 @@
# frozen_string_literal: true
RSpec.shared_examples 'storing arguments in the application context' do
- around do |example|
- Labkit::Context.with_context { example.run }
- end
-
it 'places the expected params in the application context' do
# Stub the clearing of the context so we can validate it later
- # The `around` block above makes sure we do clean it up later
allow(Labkit::Context).to receive(:pop)
subject
- Labkit::Context.with_context do |context|
- expect(context.to_h)
- .to include(log_hash(expected_params))
- end
+ expect(Gitlab::ApplicationContext.current).to include(log_hash(expected_params))
end
def log_hash(hash)
diff --git a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
index 7b7d2a33e8c..db70bc75c63 100644
--- a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
@@ -156,7 +156,7 @@ RSpec.shared_examples 'handling nuget metadata requests with package name and pa
include_context 'with expected presenters dependency groups'
let_it_be(:package_name) { 'Dummy.Package' }
- let_it_be(:package) { create(:nuget_package, :with_metadatum, name: 'Dummy.Package', project: project) }
+ let_it_be(:package) { create(:nuget_package, :with_metadatum, name: package_name, project: project) }
let_it_be(:tag) { create(:packages_tag, package: package, name: 'test') }
subject { get api(url) }
@@ -225,7 +225,7 @@ RSpec.shared_examples 'handling nuget search requests' do |anonymous_requests_ex
let(:take) { 26 }
let(:skip) { 0 }
let(:include_prereleases) { true }
- let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases } }
+ let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases }.compact }
subject { get api(url) }
diff --git a/spec/support/shared_examples/requests/api/packages_shared_examples.rb b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
index 15976eed021..eb86b7c37d5 100644
--- a/spec/support/shared_examples/requests/api/packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/packages_shared_examples.rb
@@ -100,7 +100,7 @@ RSpec.shared_examples 'job token for package GET requests' do
end
end
-RSpec.shared_examples 'job token for package uploads' do
+RSpec.shared_examples 'job token for package uploads' do |authorize_endpoint: false|
context 'with job token headers' do
let(:headers) { basic_auth_header(::Gitlab::Auth::CI_JOB_USER, job.token).merge(workhorse_headers) }
@@ -111,6 +111,17 @@ RSpec.shared_examples 'job token for package uploads' do
context 'valid token' do
it_behaves_like 'returning response status', :success
+
+ unless authorize_endpoint
+ it 'creates a package with build info' do
+ expect { subject }.to change { Packages::Package.count }.by(1)
+
+ pkg = ::Packages::Package.order_created
+ .last
+
+ expect(pkg.build_infos).to be
+ end
+ end
end
context 'invalid token' do
diff --git a/spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb
index 15fb6611b90..abdb468353a 100644
--- a/spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/rubygems_packages_shared_examples.rb
@@ -43,6 +43,8 @@ end
RSpec.shared_examples 'process rubygems upload' do |user_type, status, add_member = true|
RSpec.shared_examples 'creates rubygems package files' do
it 'creates package files', :aggregate_failures do
+ expect(::Packages::Rubygems::ExtractionWorker).to receive(:perform_async).once
+
expect { subject }
.to change { project.packages.count }.by(1)
.and change { Packages::PackageFile.count }.by(1)
@@ -51,6 +53,17 @@ RSpec.shared_examples 'process rubygems upload' do |user_type, status, add_membe
package_file = project.packages.last.package_files.reload.last
expect(package_file.file_name).to eq('package.gem')
end
+
+ it 'returns bad request if package creation fails' do
+ file_service = double('file_service', execute: nil)
+
+ expect(::Packages::CreatePackageFileService).to receive(:new).and_return(file_service)
+ expect(::Packages::Rubygems::ExtractionWorker).not_to receive(:perform_async)
+
+ subject
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
end
context "for user type #{user_type}" do
diff --git a/spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb b/spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb
new file mode 100644
index 00000000000..490c7d12115
--- /dev/null
+++ b/spec/support/shared_examples/requests/clusters/integrations_controller_shared_examples.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples '#create_or_update action' do
+ let(:params) do
+ { integration: { application_type: Clusters::Applications::Prometheus.application_name, enabled: true } }
+ end
+
+ let(:path) { raise NotImplementedError }
+ let(:redirect_path) { raise NotImplementedError }
+
+ describe 'authorization' do
+ subject do
+ post path, params: params
+ end
+
+ it_behaves_like 'a secure endpoint'
+ end
+
+ describe 'functionality' do
+ before do
+ sign_in(user)
+ end
+
+ it 'redirects on success' do
+ post path, params: params
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ expect(response).to redirect_to(redirect_path)
+ expect(flash[:notice]).to be_present
+ end
+
+ it 'redirects on error' do
+ error = ServiceResponse.error(message: 'failed')
+
+ expect_next_instance_of(Clusters::Integrations::CreateService) do |service|
+ expect(service).to receive(:execute).and_return(error)
+ end
+
+ post path, params: params
+
+ expect(response).to have_gitlab_http_status(:redirect)
+ expect(response).to redirect_to(redirect_path)
+ expect(flash[:alert]).to eq(error.message)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
new file mode 100644
index 00000000000..00146335ef7
--- /dev/null
+++ b/spec/support/shared_examples/serializers/environment_serializer_shared_examples.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+RSpec.shared_examples 'avoid N+1 on environments serialization' do
+ it 'avoids N+1 database queries with grouping', :request_store do
+ create_environment_with_associations(project)
+
+ control = ActiveRecord::QueryRecorder.new { serialize(grouping: true) }
+
+ create_environment_with_associations(project)
+
+ expect { serialize(grouping: true) }.not_to exceed_query_limit(control.count)
+ end
+
+ it 'avoids N+1 database queries without grouping', :request_store do
+ create_environment_with_associations(project)
+
+ control = ActiveRecord::QueryRecorder.new { serialize(grouping: false) }
+
+ create_environment_with_associations(project)
+
+ expect { serialize(grouping: false) }.not_to exceed_query_limit(control.count)
+ end
+
+ def serialize(grouping:)
+ EnvironmentSerializer.new(current_user: user, project: project).yield_self do |serializer|
+ serializer.within_folders if grouping
+ serializer.represent(Environment.where(project: project))
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb b/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
index 6a4f284ec54..94da405e491 100644
--- a/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/lists_destroy_service_shared_examples.rb
@@ -13,7 +13,7 @@ RSpec.shared_examples 'lists destroy service' do
development = create(:list, board: board, position: 0)
review = create(:list, board: board, position: 1)
staging = create(:list, board: board, position: 2)
- closed = board.closed_list
+ closed = board.lists.closed.first
described_class.new(parent, user).execute(development)
@@ -24,7 +24,7 @@ RSpec.shared_examples 'lists destroy service' do
end
it 'does not remove list from board when list type is closed' do
- list = board.closed_list
+ list = board.lists.closed.first
service = described_class.new(parent, user)
expect { service.execute(list) }.not_to change(board.lists, :count)
diff --git a/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
index 41fd286682e..e1143562661 100644
--- a/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/boards/lists_list_service_shared_examples.rb
@@ -2,14 +2,34 @@
RSpec.shared_examples 'lists list service' do
context 'when the board has a backlog list' do
- let!(:backlog_list) { create(:backlog_list, board: board) }
+ let!(:backlog_list) { create_backlog_list(board) }
it 'does not create a backlog list' do
expect { service.execute(board) }.not_to change(board.lists, :count)
end
it "returns board's lists" do
- expect(service.execute(board)).to eq [backlog_list, list, board.closed_list]
+ expect(service.execute(board)).to eq [backlog_list, list, board.lists.closed.first]
+ end
+
+ context 'when hide_backlog_list is true' do
+ before do
+ board.update_column(:hide_backlog_list, true)
+ end
+
+ it 'hides backlog list' do
+ expect(service.execute(board)).to match_array([board.lists.closed.first, list])
+ end
+ end
+
+ context 'when hide_closed_list is true' do
+ before do
+ board.update_column(:hide_closed_list, true)
+ end
+
+ it 'hides closed list' do
+ expect(service.execute(board)).to match_array([backlog_list, list])
+ end
end
end
@@ -23,25 +43,21 @@ RSpec.shared_examples 'lists list service' do
end
it "returns board's lists" do
- expect(service.execute(board)).to eq [board.backlog_list, list, board.closed_list]
+ expect(service.execute(board)).to eq [board.lists.backlog.first, list, board.lists.closed.first]
end
end
context 'when wanting a specific list' do
- let!(:list1) { create(:list, board: board) }
-
it 'returns list specified by id' do
- service = described_class.new(parent, user, list_id: list1.id)
+ service = described_class.new(parent, user, list_id: list.id)
- expect(service.execute(board, create_default_lists: false)).to eq [list1]
+ expect(service.execute(board, create_default_lists: false)).to eq [list]
end
it 'returns empty result when list is not found' do
- external_board = create(:board, resource_parent: create(:project))
- external_list = create(:list, board: external_board)
- service = described_class.new(parent, user, list_id: external_list.id)
+ service = described_class.new(parent, user, list_id: unrelated_list.id)
- expect(service.execute(board, create_default_lists: false)).to eq(List.none)
+ expect(service.execute(board, create_default_lists: false)).to be_empty
end
end
end
diff --git a/spec/support/shared_examples/services/clusters/parse_cluster_applications_artifact_shared_examples.rb b/spec/support/shared_examples/services/clusters/parse_cluster_applications_artifact_shared_examples.rb
index cbe20928f98..466300017d9 100644
--- a/spec/support/shared_examples/services/clusters/parse_cluster_applications_artifact_shared_examples.rb
+++ b/spec/support/shared_examples/services/clusters/parse_cluster_applications_artifact_shared_examples.rb
@@ -41,7 +41,7 @@ RSpec.shared_examples 'parse cluster applications artifact' do |release_name|
end.to change(application_class, :count)
expect(cluster_application).to be_persisted
- expect(cluster_application).to be_installed
+ expect(cluster_application).to be_externally_installed
end
end
@@ -53,7 +53,7 @@ RSpec.shared_examples 'parse cluster applications artifact' do |release_name|
it 'marks the application as installed' do
described_class.new(job, user).execute(artifact)
- expect(cluster_application).to be_installed
+ expect(cluster_application).to be_externally_installed
end
end
end
diff --git a/spec/support/shared_examples/services/groups_count_service_shared_examples.rb b/spec/support/shared_examples/services/groups_count_service_shared_examples.rb
new file mode 100644
index 00000000000..84937c3d4d7
--- /dev/null
+++ b/spec/support/shared_examples/services/groups_count_service_shared_examples.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+# The calling spec should use `:use_clean_rails_memory_store_caching`
+# when including this shared example. E.g.:
+#
+# describe MyCountService, :use_clean_rails_memory_store_caching do
+# it_behaves_like 'a counter caching service with threshold'
+# end
+RSpec.shared_examples 'a counter caching service with threshold' do
+ let(:cache_key) { subject.cache_key }
+ let(:under_threshold) { described_class::CACHED_COUNT_THRESHOLD - 1 }
+ let(:over_threshold) { described_class::CACHED_COUNT_THRESHOLD + 1 }
+
+ context 'when cache is empty' do
+ before do
+ Rails.cache.delete(cache_key)
+ end
+
+ it 'refreshes cache if value over threshold' do
+ allow(subject).to receive(:uncached_count).and_return(over_threshold)
+
+ expect(subject.count).to eq(over_threshold)
+ expect(Rails.cache.read(cache_key)).to eq(over_threshold)
+ end
+
+ it 'does not refresh cache if value under threshold' do
+ allow(subject).to receive(:uncached_count).and_return(under_threshold)
+
+ expect(subject.count).to eq(under_threshold)
+ expect(Rails.cache.read(cache_key)).to be_nil
+ end
+ end
+
+ context 'when cached count is under the threshold value' do
+ before do
+ Rails.cache.write(cache_key, under_threshold)
+ end
+
+ it 'does not refresh cache' do
+ expect(Rails.cache).not_to receive(:write)
+ expect(subject.count).to eq(under_threshold)
+ end
+ end
+
+ context 'when cached count is over the threshold value' do
+ before do
+ Rails.cache.write(cache_key, over_threshold)
+ end
+
+ it 'does not refresh cache' do
+ expect(Rails.cache).not_to receive(:write)
+ expect(subject.count).to eq(over_threshold)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb b/spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb
new file mode 100644
index 00000000000..ccc287c10de
--- /dev/null
+++ b/spec/support/shared_examples/services/issuable/destroy_service_shared_examples.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+shared_examples_for 'service deleting todos' do
+ before do
+ stub_feature_flags(destroy_issuable_todos_async: group)
+ end
+
+ it 'destroys associated todos asynchronously' do
+ expect(TodosDestroyer::DestroyedIssuableWorker)
+ .to receive(:perform_async)
+ .with(issuable.id, issuable.class.name)
+
+ subject.execute(issuable)
+ end
+
+ context 'when destroy_issuable_todos_async feature is disabled for group' do
+ before do
+ stub_feature_flags(destroy_issuable_todos_async: false)
+ end
+
+ it 'destroy associated todos synchronously' do
+ expect_next_instance_of(TodosDestroyer::DestroyedIssuableWorker) do |worker|
+ expect(worker)
+ .to receive(:perform)
+ .with(issuable.id, issuable.class.name)
+ end
+
+ subject.execute(issuable)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/services/merge_request_shared_examples.rb b/spec/support/shared_examples/services/merge_request_shared_examples.rb
index 56179b6cd00..178b6bc47e1 100644
--- a/spec/support/shared_examples/services/merge_request_shared_examples.rb
+++ b/spec/support/shared_examples/services/merge_request_shared_examples.rb
@@ -73,3 +73,93 @@ RSpec.shared_examples 'merge request reviewers cache counters invalidator' do
described_class.new(project, user, {}).execute(merge_request)
end
end
+
+RSpec.shared_examples_for 'a service that can create a merge request' do
+ subject(:last_mr) { MergeRequest.last }
+
+ it 'creates a merge request with the correct target branch' do
+ branch = push_options[:target] || project.default_branch
+
+ expect { service.execute }.to change { MergeRequest.count }.by(1)
+ expect(last_mr.target_branch).to eq(branch)
+ end
+
+ context 'when project has been forked', :sidekiq_might_not_need_inline do
+ let(:forked_project) { fork_project(project, user1, repository: true) }
+ let(:service) { described_class.new(forked_project, user1, changes, push_options) }
+
+ before do
+ allow(forked_project).to receive(:empty_repo?).and_return(false)
+ end
+
+ it 'sets the correct source and target project' do
+ service.execute
+
+ expect(last_mr.source_project).to eq(forked_project)
+ expect(last_mr.target_project).to eq(project)
+ end
+ end
+end
+
+RSpec.shared_examples_for 'a service that does not create a merge request' do
+ it do
+ expect { service.execute }.not_to change { MergeRequest.count }
+ end
+end
+
+# In the non-foss version of GitLab, there can be many assignees, so
+# there 'count' can be something other than 0 or 1. In the foss
+# version of GitLab, there can be only one assignee though, so 'count'
+# can only be 0 or 1.
+RSpec.shared_examples_for 'a service that can change assignees of a merge request' do |count|
+ subject(:last_mr) { MergeRequest.last }
+
+ it 'changes assignee count' do
+ service.execute
+
+ expect(last_mr.assignees.count).to eq(count)
+ end
+end
+
+RSpec.shared_examples 'with an existing branch that has a merge request open' do |count|
+ let(:changes) { existing_branch_changes }
+ let!(:merge_request) { create(:merge_request, source_project: project, source_branch: source_branch)}
+
+ it_behaves_like 'a service that does not create a merge request'
+ it_behaves_like 'a service that can change assignees of a merge request', count
+end
+
+RSpec.shared_examples 'when coupled with the `create` push option' do |count|
+ let(:push_options) { { create: true, assign: assigned, unassign: unassigned } }
+
+ it_behaves_like 'a service that can create a merge request'
+ it_behaves_like 'a service that can change assignees of a merge request', count
+end
+
+RSpec.shared_examples 'with a new branch' do |count|
+ let(:changes) { new_branch_changes }
+
+ it_behaves_like 'a service that does not create a merge request'
+
+ it 'adds an error to the service' do
+ service.execute
+
+ expect(service.errors).to include(error_mr_required)
+ end
+
+ it_behaves_like 'when coupled with the `create` push option', count
+end
+
+RSpec.shared_examples 'with an existing branch but no open MR' do |count|
+ let(:changes) { existing_branch_changes }
+
+ it_behaves_like 'a service that does not create a merge request'
+
+ it 'adds an error to the service' do
+ service.execute
+
+ expect(service.errors).to include(error_mr_required)
+ end
+
+ it_behaves_like 'when coupled with the `create` push option', count
+end
diff --git a/spec/support/shared_examples/services/notification_service_shared_examples.rb b/spec/support/shared_examples/services/notification_service_shared_examples.rb
index 43fe6789145..cfd674e3c43 100644
--- a/spec/support/shared_examples/services/notification_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/notification_service_shared_examples.rb
@@ -45,7 +45,7 @@ RSpec.shared_examples 'group emails are disabled' do
before do
reset_delivered_emails!
- target_group.clear_memoization(:emails_disabled)
+ target_group.clear_memoization(:emails_disabled_memoized)
end
it 'sends no emails with group emails disabled' do
diff --git a/spec/support/shared_examples/services/snippets_shared_examples.rb b/spec/support/shared_examples/services/snippets_shared_examples.rb
index 10add3a7299..0c4db7ded69 100644
--- a/spec/support/shared_examples/services/snippets_shared_examples.rb
+++ b/spec/support/shared_examples/services/snippets_shared_examples.rb
@@ -1,7 +1,8 @@
# frozen_string_literal: true
RSpec.shared_examples 'checking spam' do
- let(:request) { double(:request) }
+ let(:request) { double(:request, headers: headers) }
+ let(:headers) { nil }
let(:api) { true }
let(:captcha_response) { 'abc123' }
let(:spam_log_id) { 1 }
@@ -44,6 +45,44 @@ RSpec.shared_examples 'checking spam' do
subject
end
+ context 'when CAPTCHA arguments are passed in the headers' do
+ let(:headers) do
+ {
+ 'X-GitLab-Spam-Log-Id' => spam_log_id,
+ 'X-GitLab-Captcha-Response' => captcha_response
+ }
+ end
+
+ let(:extra_opts) do
+ {
+ request: request,
+ api: api,
+ disable_spam_action_service: disable_spam_action_service
+ }
+ end
+
+ it 'executes the SpamActionService correctly' do
+ spam_params = Spam::SpamParams.new(
+ api: api,
+ captcha_response: captcha_response,
+ spam_log_id: spam_log_id
+ )
+ expect_next_instance_of(
+ Spam::SpamActionService,
+ {
+ spammable: kind_of(Snippet),
+ request: request,
+ user: an_instance_of(User),
+ action: action
+ }
+ ) do |instance|
+ expect(instance).to receive(:execute).with(spam_params: spam_params)
+ end
+
+ subject
+ end
+ end
+
context 'when spam action service is disabled' do
let(:disable_spam_action_service) { true }
diff --git a/spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb b/spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb
new file mode 100644
index 00000000000..c4391f61369
--- /dev/null
+++ b/spec/support/shared_examples/workers/in_product_marketing_email_shared_example.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'in-product marketing email' do
+ before do
+ stub_application_setting(in_product_marketing_emails_enabled: in_product_marketing_emails_enabled)
+ stub_experiment(in_product_marketing_emails: experiment_active)
+ allow(::Gitlab).to receive(:com?).and_return(is_gitlab_com)
+ end
+
+ it 'executes the email service service' do
+ expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals).exactly(executes_service).times
+
+ subject.perform
+ end
+end
diff --git a/spec/support/shared_examples/workers/worker_with_data_consistency_shared_example.rb b/spec/support/shared_examples/workers/worker_with_data_consistency_shared_example.rb
new file mode 100644
index 00000000000..ec09c0380f9
--- /dev/null
+++ b/spec/support/shared_examples/workers/worker_with_data_consistency_shared_example.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'worker with data consistency' do |worker_class, data_consistency: :always, feature_flag: nil|
+ describe '.get_data_consistency_feature_flag_enabled?' do
+ it 'returns true' do
+ expect(worker_class.get_data_consistency_feature_flag_enabled?).to be(true)
+ end
+
+ if feature_flag
+ context "when feature flag :#{feature_flag} is disabled" do
+ before do
+ stub_feature_flags(feature_flag => false)
+ end
+
+ it 'returns false' do
+ expect(worker_class.get_data_consistency_feature_flag_enabled?).to be(false)
+ end
+ end
+ end
+ end
+
+ describe '.get_data_consistency' do
+ it 'returns correct data consistency' do
+ expect(worker_class.get_data_consistency).to eq(data_consistency)
+ end
+ end
+end
diff --git a/spec/support/sidekiq_middleware.rb b/spec/support/sidekiq_middleware.rb
index 62f81ef1669..cbd6163d46b 100644
--- a/spec/support/sidekiq_middleware.rb
+++ b/spec/support/sidekiq_middleware.rb
@@ -15,20 +15,14 @@ end
# If Sidekiq::Testing.inline! is used, SQL transactions done inside
# Sidekiq worker are included in the SQL query limit (in a real
-# deployment sidekiq worker is executed separately). To avoid
-# increasing SQL limit counter, the request is marked as whitelisted
-# during Sidekiq block
+# deployment sidekiq worker is executed separately). To avoid increasing
+# SQL limit counter, query limiting is disabled during Sidekiq block
class DisableQueryLimit
def call(worker_instance, msg, queue)
- transaction = Gitlab::QueryLimiting::Transaction.current
-
- if !transaction.respond_to?(:whitelisted) || transaction.whitelisted
- yield
- else
- transaction.whitelisted = true
- yield
- transaction.whitelisted = false
- end
+ ::Gitlab::QueryLimiting.disable!('https://mock-issue')
+ yield
+ ensure
+ ::Gitlab::QueryLimiting.enable!
end
end
diff --git a/spec/support_specs/helpers/active_record/query_recorder_spec.rb b/spec/support_specs/helpers/active_record/query_recorder_spec.rb
index f968f511a2a..f1af9ceffb9 100644
--- a/spec/support_specs/helpers/active_record/query_recorder_spec.rb
+++ b/spec/support_specs/helpers/active_record/query_recorder_spec.rb
@@ -11,6 +11,72 @@ RSpec.describe ActiveRecord::QueryRecorder do
end
end
+ describe 'printing to the log' do
+ let(:backtrace) { %r{QueryRecorder backtrace: --> (\w+/)*\w+\.rb:\d+:in `.*'} }
+ let(:duration_line) { %r{QueryRecorder DURATION: --> \d+\.\d+} }
+
+ def expect_section(query, lines)
+ query_lines = lines.take(query.size)
+
+ # the query comes first
+ expect(query_lines).to match(query)
+
+ # followed by the duration
+ expect(lines[query.size]).to match(duration_line)
+
+ # and then one or more lines of backtrace
+ backtrace_lines = lines.drop(query.size + 1).take_while { |line| line.match(backtrace) }
+ expect(backtrace_lines).not_to be_empty
+
+ # Advance to the next section
+ lines.drop(query.size + 1 + backtrace_lines.size)
+ end
+
+ it 'prints SQL, duration and backtrace, all prefixed with QueryRecorder', :aggregate_failures do
+ io = StringIO.new
+
+ control = ActiveRecord::QueryRecorder.new(log_file: io, query_recorder_debug: true) do
+ TestQueries.count
+ TestQueries.first
+ TestQueries.where(<<~FRAGMENT).to_a # tests multi-line SQL
+ version = 'foo'
+ OR
+ version = 'bar'
+ FRAGMENT
+ end
+
+ query_a = start_with(%q[QueryRecorder SQL: --> SELECT COUNT(*) FROM "schema_migrations"])
+
+ query_b = start_with(%q[QueryRecorder SQL: --> SELECT "schema_migrations".* FROM "schema_migrations" ORDER BY "schema_migrations"."version" ASC LIMIT 1])
+
+ query_c_a = eq(%q[QueryRecorder SQL: --> SELECT "schema_migrations".* FROM "schema_migrations" WHERE (version = 'foo'])
+ query_c_b = eq(%q(QueryRecorder SQL: --> OR))
+ query_c_c = eq(%q(QueryRecorder SQL: --> version = 'bar'))
+ query_c_d = start_with("QueryRecorder SQL: --> )")
+
+ expect(control.count).to eq(3)
+
+ lines = io.string.lines.map(&:chomp)
+
+ expect(lines).to all(start_with('QueryRecorder'))
+ lines = expect_section([query_a], lines)
+ lines = expect_section([query_b], lines)
+ lines = expect_section([query_c_a, query_c_b, query_c_c, query_c_d], lines)
+
+ expect(lines).to be_empty
+ end
+ end
+
+ it 'includes duration information' do
+ control = ActiveRecord::QueryRecorder.new do
+ TestQueries.count
+ TestQueries.first
+ end
+
+ expect(control.count).to eq(2)
+ expect(control.data.values.flat_map { _1[:durations] }).to match([be > 0, be > 0])
+ end
+
describe 'detecting the right number of calls and their origin' do
it 'detects two separate queries' do
control = ActiveRecord::QueryRecorder.new query_recorder_debug: true do
@@ -23,10 +89,10 @@ RSpec.describe ActiveRecord::QueryRecorder do
.to eq(control.data.keys.size)
# Ensure exactly 2 COUNT queries were detected
expect(control.occurrences_by_line_method.last[1][:occurrences]
- .find_all {|i| i.match(/SELECT COUNT/) }.count).to eq(2)
+ .count { |str| str.start_with?('SELECT COUNT') }).to eq(2)
# Ensure exactly 1 LIMIT 1 (#first)
expect(control.occurrences_by_line_method.first[1][:occurrences]
- .find_all { |i| i.match(/ORDER BY.*#{TestQueries.table_name}.*LIMIT 1/) }.count).to eq(1)
+ .count { |str| str.match(/ORDER BY.*#{TestQueries.table_name}.*LIMIT 1/) }).to eq(1)
# Ensure 3 DB calls overall were executed
expect(control.log.size).to eq(3)
diff --git a/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb b/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
index 6d8d9ba0754..67d87fe3c2f 100644
--- a/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
+++ b/spec/support_specs/matchers/exceed_query_limit_helpers_spec.rb
@@ -225,6 +225,16 @@ RSpec.describe ExceedQueryLimitHelpers do
expect(test_matcher.actual_count).to eq(2)
end
+ it 'can filter specific models' do
+ test_matcher = TestMatcher.new.for_model(TestQueries)
+ test_matcher.verify_count do
+ TestQueries.first
+ TestQueries.connection.execute('select 1')
+ end
+
+ expect(test_matcher.actual_count).to eq(1)
+ end
+
it 'can ignore specific queries' do
test_matcher = TestMatcher.new.ignoring(/foobar/)
test_matcher.verify_count do
diff --git a/spec/tasks/gitlab/gitaly_rake_spec.rb b/spec/tasks/gitlab/gitaly_rake_spec.rb
index efc983d526f..1b38580f484 100644
--- a/spec/tasks/gitlab/gitaly_rake_spec.rb
+++ b/spec/tasks/gitlab/gitaly_rake_spec.rb
@@ -41,6 +41,7 @@ RSpec.describe 'gitlab:gitaly namespace rake task' do
describe 'checkout or clone' do
before do
+ stub_env('CI', false)
expect(Dir).to receive(:chdir).with(clone_path)
end
@@ -86,18 +87,14 @@ RSpec.describe 'gitlab:gitaly namespace rake task' do
end
context 'when Rails.env is test' do
- let(:command) do
- %W[make
- BUNDLE_FLAGS=--no-deployment
- GEM_HOME=#{Bundler.bundle_path}]
- end
+ let(:command) { %w[make] }
before do
stub_rails_env('test')
end
- it 'calls make in the gitaly directory with --no-deployment flag for bundle' do
- expect(Gitlab::Popen).to receive(:popen).with(command, nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil }).and_return(true)
+ it 'calls make in the gitaly directory with BUNDLE_DEPLOYMENT and GEM_HOME variables' do
+ expect(Gitlab::Popen).to receive(:popen).with(command, nil, { "BUNDLE_GEMFILE" => nil, "RUBYOPT" => nil, "BUNDLE_DEPLOYMENT" => 'false', "GEM_HOME" => Bundler.bundle_path.to_s }).and_return(true)
subject
end
diff --git a/spec/tasks/gitlab/pages_rake_spec.rb b/spec/tasks/gitlab/pages_rake_spec.rb
index 08194f4d1c9..664899c361b 100644
--- a/spec/tasks/gitlab/pages_rake_spec.rb
+++ b/spec/tasks/gitlab/pages_rake_spec.rb
@@ -12,10 +12,9 @@ RSpec.describe 'gitlab:pages' do
it 'calls migration service' do
expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- migration_threads: 3,
- batch_size: 10,
- ignore_invalid_entries: false) do |service|
- expect(service).to receive(:execute).and_call_original
+ ignore_invalid_entries: false,
+ mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 10).and_call_original
end
subject
@@ -25,10 +24,9 @@ RSpec.describe 'gitlab:pages' do
stub_env('PAGES_MIGRATION_THREADS', '5')
expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- migration_threads: 5,
- batch_size: 10,
- ignore_invalid_entries: false) do |service|
- expect(service).to receive(:execute).and_call_original
+ ignore_invalid_entries: false,
+ mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute_with_threads).with(threads: 5, batch_size: 10).and_call_original
end
subject
@@ -38,10 +36,9 @@ RSpec.describe 'gitlab:pages' do
stub_env('PAGES_MIGRATION_BATCH_SIZE', '100')
expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- migration_threads: 3,
- batch_size: 100,
- ignore_invalid_entries: false) do |service|
- expect(service).to receive(:execute).and_call_original
+ ignore_invalid_entries: false,
+ mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 100).and_call_original
end
subject
@@ -51,10 +48,21 @@ RSpec.describe 'gitlab:pages' do
stub_env('PAGES_MIGRATION_IGNORE_INVALID_ENTRIES', 'true')
expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
- migration_threads: 3,
- batch_size: 10,
- ignore_invalid_entries: true) do |service|
- expect(service).to receive(:execute).and_call_original
+ ignore_invalid_entries: true,
+ mark_projects_as_not_deployed: false) do |service|
+ expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 10).and_call_original
+ end
+
+ subject
+ end
+
+ it 'uses PAGES_MIGRATION_MARK_PROJECTS_AS_NOT_DEPLOYED environment variable' do
+ stub_env('PAGES_MIGRATION_MARK_PROJECTS_AS_NOT_DEPLOYED', 'true')
+
+ expect_next_instance_of(::Pages::MigrateFromLegacyStorageService, anything,
+ ignore_invalid_entries: false,
+ mark_projects_as_not_deployed: true) do |service|
+ expect(service).to receive(:execute_with_threads).with(threads: 3, batch_size: 10).and_call_original
end
subject
@@ -78,4 +86,80 @@ RSpec.describe 'gitlab:pages' do
expect(PagesDeployment.find_by_id(migrated_deployment.id)).to be_nil
end
end
+
+ describe 'gitlab:pages:deployments:migrate_to_object_storage' do
+ subject { run_rake_task('gitlab:pages:deployments:migrate_to_object_storage') }
+
+ before do
+ stub_pages_object_storage(::Pages::DeploymentUploader, enabled: object_storage_enabled)
+ end
+
+ let!(:deployment) { create(:pages_deployment, file_store: store) }
+ let(:object_storage_enabled) { true }
+
+ context 'when local storage is used' do
+ let(:store) { ObjectStorage::Store::LOCAL }
+
+ context 'and remote storage is defined' do
+ it 'migrates file to remote storage' do
+ subject
+
+ expect(deployment.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+
+ context 'and remote storage is not defined' do
+ let(:object_storage_enabled) { false }
+
+ it 'fails to migrate to remote storage' do
+ subject
+
+ expect(deployment.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+ end
+ end
+
+ context 'when remote storage is used' do
+ let(:store) { ObjectStorage::Store::REMOTE }
+
+ it 'file stays on remote storage' do
+ subject
+
+ expect(deployment.reload.file_store).to eq(ObjectStorage::Store::REMOTE)
+ end
+ end
+ end
+
+ describe 'gitlab:pages:deployments:migrate_to_local' do
+ subject { run_rake_task('gitlab:pages:deployments:migrate_to_local') }
+
+ before do
+ stub_pages_object_storage(::Pages::DeploymentUploader, enabled: object_storage_enabled)
+ end
+
+ let!(:deployment) { create(:pages_deployment, file_store: store) }
+ let(:object_storage_enabled) { true }
+
+ context 'when remote storage is used' do
+ let(:store) { ObjectStorage::Store::REMOTE }
+
+ context 'and job has remote file store defined' do
+ it 'migrates file to local storage' do
+ subject
+
+ expect(deployment.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+ end
+ end
+
+ context 'when local storage is used' do
+ let(:store) { ObjectStorage::Store::LOCAL }
+
+ it 'file stays on local storage' do
+ subject
+
+ expect(deployment.reload.file_store).to eq(ObjectStorage::Store::LOCAL)
+ end
+ end
+ end
end
diff --git a/spec/tasks/gitlab/usage_data_rake_spec.rb b/spec/tasks/gitlab/usage_data_rake_spec.rb
index 0ee6fbef53f..84269568b8b 100644
--- a/spec/tasks/gitlab/usage_data_rake_spec.rb
+++ b/spec/tasks/gitlab/usage_data_rake_spec.rb
@@ -3,22 +3,12 @@
require 'rake_helper'
RSpec.describe 'gitlab:usage data take tasks' do
+ include UsageDataHelpers
+
before do
Rake.application.rake_require 'tasks/gitlab/usage_data'
# stub prometheus external http calls https://gitlab.com/gitlab-org/gitlab/-/issues/245277
- stub_request(:get, %r{^http[s]?://::1:9090/-/ready})
- .to_return(
- status: 200,
- body: [{}].to_json,
- headers: { 'Content-Type' => 'application/json' }
- )
-
- stub_request(:get, %r{^http[s]?://::1:9090/api/v1/query\?query=.*})
- .to_return(
- status: 200,
- body: [{}].to_json,
- headers: { 'Content-Type' => 'application/json' }
- )
+ stub_prometheus_queries
end
describe 'dump_sql_in_yaml' do
diff --git a/spec/tooling/danger/changelog_spec.rb b/spec/tooling/danger/changelog_spec.rb
index b74039b3cd1..7ea2288fd45 100644
--- a/spec/tooling/danger/changelog_spec.rb
+++ b/spec/tooling/danger/changelog_spec.rb
@@ -161,23 +161,42 @@ RSpec.describe Tooling::Danger::Changelog do
describe '#modified_text' do
subject { changelog.modified_text }
- context "when title is not changed from sanitization", :aggregate_failures do
- let(:mr_title) { 'Fake Title' }
+ context 'when in CI context' do
+ shared_examples 'changelog modified text' do |key|
+ specify do
+ expect(subject).to include('CHANGELOG.md was edited')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
+ end
- specify do
- expect(subject).to include('CHANGELOG.md was edited')
- expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ before do
+ allow(fake_helper).to receive(:ci?).and_return(true)
+ end
+
+ context "when title is not changed from sanitization", :aggregate_failures do
+ let(:mr_title) { 'Fake Title' }
+
+ it_behaves_like 'changelog modified text'
+ end
+
+ context "when title needs sanitization", :aggregate_failures do
+ let(:mr_title) { 'DRAFT: Fake Title' }
+
+ it_behaves_like 'changelog modified text'
end
end
- context "when title needs sanitization", :aggregate_failures do
- let(:mr_title) { 'DRAFT: Fake Title' }
+ context 'when in local context' do
+ let(:mr_title) { 'Fake Title' }
+
+ before do
+ allow(fake_helper).to receive(:ci?).and_return(false)
+ end
specify do
expect(subject).to include('CHANGELOG.md was edited')
- expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ expect(subject).not_to include('bin/changelog')
end
end
end
@@ -187,56 +206,116 @@ RSpec.describe Tooling::Danger::Changelog do
subject { changelog.required_texts }
- shared_examples 'changelog required text' do |key|
- specify do
- expect(subject).to have_key(key)
- expect(subject[key]).to include('CHANGELOG missing')
- expect(subject[key]).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject[key]).not_to include('--ee')
+ context 'when in CI context' do
+ before do
+ allow(fake_helper).to receive(:ci?).and_return(true)
end
- end
- context 'with a new migration file' do
- let(:changes) { changes_class.new([change_class.new('foo', :added, :migration)]) }
+ shared_examples 'changelog required text' do |key|
+ specify do
+ expect(subject).to have_key(key)
+ expect(subject[key]).to include('CHANGELOG missing')
+ expect(subject[key]).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject[key]).not_to include('--ee')
+ end
+ end
- context "when title is not changed from sanitization", :aggregate_failures do
- it_behaves_like 'changelog required text', :db_changes
+ context 'with a new migration file' do
+ let(:changes) { changes_class.new([change_class.new('foo', :added, :migration)]) }
+
+ context "when title is not changed from sanitization", :aggregate_failures do
+ it_behaves_like 'changelog required text', :db_changes
+ end
+
+ context "when title needs sanitization", :aggregate_failures do
+ let(:mr_title) { 'DRAFT: Fake Title' }
+
+ it_behaves_like 'changelog required text', :db_changes
+ end
end
- context "when title needs sanitization", :aggregate_failures do
- let(:mr_title) { 'DRAFT: Fake Title' }
+ context 'with a removed feature flag file' do
+ let(:changes) { changes_class.new([change_class.new('foo', :deleted, :feature_flag)]) }
- it_behaves_like 'changelog required text', :db_changes
+ it_behaves_like 'changelog required text', :feature_flag_removed
end
end
- context 'with a removed feature flag file' do
- let(:changes) { changes_class.new([change_class.new('foo', :deleted, :feature_flag)]) }
+ context 'when in local context' do
+ before do
+ allow(fake_helper).to receive(:ci?).and_return(false)
+ end
+
+ shared_examples 'changelog required text' do |key|
+ specify do
+ expect(subject).to have_key(key)
+ expect(subject[key]).to include('CHANGELOG missing')
+ expect(subject[key]).not_to include('bin/changelog')
+ expect(subject[key]).not_to include('--ee')
+ end
+ end
+
+ context 'with a new migration file' do
+ let(:changes) { changes_class.new([change_class.new('foo', :added, :migration)]) }
+
+ context "when title is not changed from sanitization", :aggregate_failures do
+ it_behaves_like 'changelog required text', :db_changes
+ end
+
+ context "when title needs sanitization", :aggregate_failures do
+ let(:mr_title) { 'DRAFT: Fake Title' }
- it_behaves_like 'changelog required text', :feature_flag_removed
+ it_behaves_like 'changelog required text', :db_changes
+ end
+ end
+
+ context 'with a removed feature flag file' do
+ let(:changes) { changes_class.new([change_class.new('foo', :deleted, :feature_flag)]) }
+
+ it_behaves_like 'changelog required text', :feature_flag_removed
+ end
end
end
describe '#optional_text' do
subject { changelog.optional_text }
- context "when title is not changed from sanitization", :aggregate_failures do
- let(:mr_title) { 'Fake Title' }
+ context 'when in CI context' do
+ shared_examples 'changelog optional text' do |key|
+ specify do
+ expect(subject).to include('CHANGELOG missing')
+ expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
+ expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ end
+ end
- specify do
- expect(subject).to include('CHANGELOG missing')
- expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ before do
+ allow(fake_helper).to receive(:ci?).and_return(true)
+ end
+
+ context "when title is not changed from sanitization", :aggregate_failures do
+ let(:mr_title) { 'Fake Title' }
+
+ it_behaves_like 'changelog optional text'
+ end
+
+ context "when title needs sanitization", :aggregate_failures do
+ let(:mr_title) { 'DRAFT: Fake Title' }
+
+ it_behaves_like 'changelog optional text'
end
end
- context "when title needs sanitization", :aggregate_failures do
- let(:mr_title) { 'DRAFT: Fake Title' }
+ context 'when in local context' do
+ let(:mr_title) { 'Fake Title' }
+
+ before do
+ allow(fake_helper).to receive(:ci?).and_return(false)
+ end
specify do
expect(subject).to include('CHANGELOG missing')
- expect(subject).to include('bin/changelog -m 1234 "Fake Title"')
- expect(subject).to include('bin/changelog --ee -m 1234 "Fake Title"')
+ expect(subject).not_to include('bin/changelog')
end
end
end
diff --git a/spec/tooling/danger/project_helper_spec.rb b/spec/tooling/danger/project_helper_spec.rb
index a8fda901b4a..5d106f08402 100644
--- a/spec/tooling/danger/project_helper_spec.rb
+++ b/spec/tooling/danger/project_helper_spec.rb
@@ -2,7 +2,8 @@
require 'rspec-parameterized'
require 'gitlab-dangerfiles'
-require 'danger/helper'
+require 'danger'
+require 'danger/plugins/helper'
require 'gitlab/dangerfiles/spec_helper'
require_relative '../../../danger/plugins/project_helper'
@@ -43,7 +44,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
end
where(:path, :expected_categories) do
- 'usage_data.rb' | [:database, :backend]
+ 'usage_data.rb' | [:database, :backend, :product_intelligence]
'doc/foo.md' | [:docs]
'CONTRIBUTING.md' | [:docs]
'LICENSE' | [:docs]
@@ -140,6 +141,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'ee/db/geo/post_migrate/foo' | [:database, :migration]
'app/models/project_authorization.rb' | [:database]
'app/services/users/refresh_authorized_projects_service.rb' | [:database]
+ 'app/services/authorized_project_update/find_records_due_for_refresh_service.rb' | [:database]
'lib/gitlab/background_migration.rb' | [:database]
'lib/gitlab/background_migration/foo' | [:database]
'ee/lib/gitlab/background_migration/foo' | [:database]
@@ -157,6 +159,9 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'qa/foo' | [:qa]
'ee/qa/foo' | [:qa]
+ 'workhorse/main.go' | [:workhorse]
+ 'workhorse/internal/upload/upload.go' | [:workhorse]
+
'changelogs/foo' | [:none]
'ee/changelogs/foo' | [:none]
'locale/gitlab.pot' | [:none]
@@ -168,6 +173,21 @@ RSpec.describe Tooling::Danger::ProjectHelper do
'foo/bar.js' | [:frontend]
'foo/bar.txt' | [:none]
'foo/bar.md' | [:none]
+
+ 'ee/config/metrics/counts_7d/20210216174919_g_analytics_issues_weekly.yml' | [:product_intelligence]
+ 'lib/gitlab/usage_data_counters/aggregated_metrics/common.yml' | [:product_intelligence]
+ 'lib/gitlab/usage_data_counters/hll_redis_counter.rb' | [:backend, :product_intelligence]
+ 'doc/development/usage_ping/dictionary.md' | [:docs, :product_intelligence]
+ 'lib/gitlab/tracking.rb' | [:backend, :product_intelligence]
+ 'spec/lib/gitlab/tracking_spec.rb' | [:backend, :product_intelligence]
+ 'app/helpers/tracking_helper.rb' | [:backend, :product_intelligence]
+ 'spec/helpers/tracking_helper_spec.rb' | [:backend, :product_intelligence]
+ 'lib/generators/rails/usage_metric_definition_generator.rb' | [:backend, :product_intelligence]
+ 'spec/lib/generators/usage_metric_definition_generator_spec.rb' | [:backend, :product_intelligence]
+ 'config/metrics/schema.json' | [:product_intelligence]
+ 'app/assets/javascripts/tracking.js' | [:frontend, :product_intelligence]
+ 'spec/frontend/tracking_spec.js' | [:frontend, :product_intelligence]
+ 'lib/gitlab/usage_database/foo.rb' | [:backend]
end
with_them do
@@ -178,12 +198,12 @@ RSpec.describe Tooling::Danger::ProjectHelper do
context 'having specific changes' do
where(:expected_categories, :patch, :changed_files) do
- [:database, :backend] | '+ count(User.active)' | ['usage_data.rb', 'lib/gitlab/usage_data.rb', 'ee/lib/ee/gitlab/usage_data.rb']
- [:database, :backend] | '+ estimate_batch_distinct_count(User.active)' | ['usage_data.rb']
- [:backend] | '+ alt_usage_data(User.active)' | ['usage_data.rb']
- [:backend] | '+ count(User.active)' | ['user.rb']
- [:backend] | '+ count(User.active)' | ['usage_data/topology.rb']
- [:backend] | '+ foo_count(User.active)' | ['usage_data.rb']
+ [:database, :backend, :product_intelligence] | '+ count(User.active)' | ['usage_data.rb', 'lib/gitlab/usage_data.rb', 'ee/lib/ee/gitlab/usage_data.rb']
+ [:database, :backend, :product_intelligence] | '+ estimate_batch_distinct_count(User.active)' | ['usage_data.rb']
+ [:backend, :product_intelligence] | '+ alt_usage_data(User.active)' | ['lib/gitlab/usage_data.rb']
+ [:backend, :product_intelligence] | '+ count(User.active)' | ['lib/gitlab/usage_data/topology.rb']
+ [:backend, :product_intelligence] | '+ foo_count(User.active)' | ['lib/gitlab/usage_data.rb']
+ [:backend] | '+ count(User.active)' | ['user.rb']
end
with_them do
@@ -200,7 +220,7 @@ RSpec.describe Tooling::Danger::ProjectHelper do
describe '.local_warning_message' do
it 'returns an informational message with rules that can run' do
- expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changes_size, commit_messages, database, documentation, duplicate_yarn_dependencies, eslint, karma, pajamas, pipeline, prettier, product_intelligence, utility_css')
+ expect(described_class.local_warning_message).to eq('==> Only the following Danger rules can be run locally: changelog, changes_size, commit_messages, database, datateam, documentation, duplicate_yarn_dependencies, eslint, karma, pajamas, pipeline, prettier, product_intelligence, utility_css')
end
end
diff --git a/spec/tooling/lib/tooling/kubernetes_client_spec.rb b/spec/tooling/lib/tooling/kubernetes_client_spec.rb
index 4a84ec09b5c..636727401af 100644
--- a/spec/tooling/lib/tooling/kubernetes_client_spec.rb
+++ b/spec/tooling/lib/tooling/kubernetes_client_spec.rb
@@ -123,6 +123,16 @@ RSpec.describe Tooling::KubernetesClient do
it_behaves_like 'a kubectl command to delete resources by older than given creation time'
end
+
+ context 'with no resources found' do
+ let(:resource_names) { [] }
+
+ it 'does not call #delete_by_exact_names' do
+ expect(subject).not_to receive(:delete_by_exact_names)
+
+ subject.cleanup_by_created_at(resource_type: resource_type, created_before: two_days_ago)
+ end
+ end
end
describe '#raw_resource_names' do
diff --git a/spec/tooling/merge_request_spec.rb b/spec/tooling/merge_request_spec.rb
new file mode 100644
index 00000000000..de6fd48ad9f
--- /dev/null
+++ b/spec/tooling/merge_request_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'webmock/rspec'
+
+require_relative '../../tooling/merge_request'
+require_relative '../support/helpers/next_instance_of'
+
+RSpec.describe Tooling::MergeRequest do
+ let(:project_path) { 'gitlab-org/gitlab' }
+ let(:branch_name) { 'my-branch' }
+ let(:merge_request_iid) { 123 }
+ let(:merge_requests) { [{ 'iid' => merge_request_iid }] }
+
+ describe '.for' do
+ let(:stub_api) do
+ stub_request(:get, "https://gitlab.com/api/v4/projects/gitlab-org%2Fgitlab/merge_requests")
+ .and_return(body: merge_requests)
+ end
+
+ before do
+ stub_api.with(query: { source_branch: branch_name, order_by: 'updated_at', sort: 'desc' })
+ end
+
+ it 'fetches merge request for local branch in the given GitLab project path' do
+ merge_request = described_class.for(branch: branch_name, project_path: project_path)
+
+ expect(merge_request.iid).to eq(merge_request_iid)
+ expect(stub_api).to have_been_requested.once
+ end
+ end
+end
diff --git a/spec/lib/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index 32960cd571b..89abe337347 100644
--- a/spec/lib/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require_relative '../../../tooling/quality/test_level'
RSpec.describe Quality::TestLevel do
describe '#pattern' do
@@ -197,7 +197,7 @@ RSpec.describe Quality::TestLevel do
it 'raises an error for an unknown level' do
expect { subject.level_for('spec/unknown/foo_spec.rb') }
.to raise_error(described_class::UnknownTestLevelError,
- %r{Test level for spec/unknown/foo_spec.rb couldn't be set. Please rename the file properly or change the test level detection regexes in .+/lib/quality/test_level.rb.})
+ %r{Test level for spec/unknown/foo_spec.rb couldn't be set. Please rename the file properly or change the test level detection regexes in .+/tooling/quality/test_level.rb.})
end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index a1d8695a8c9..b454b0ad8f8 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -441,6 +441,22 @@ RSpec.describe ObjectStorage do
end
end
+ shared_examples 'extracts base filename' do
+ it "returns true for ExtractsBase" do
+ expect(subject[:FeatureFlagExtractBase]).to be true
+ end
+
+ context 'when workhorse_extract_filename_base is disabled' do
+ before do
+ stub_feature_flags(workhorse_extract_filename_base: false)
+ end
+
+ it "returns false for ExtractsBase" do
+ expect(subject[:FeatureFlagExtractBase]).to be false
+ end
+ end
+ end
+
shared_examples 'uses local storage' do
it_behaves_like 'returns the maximum size given' do
it "returns temporary path" do
@@ -502,6 +518,7 @@ RSpec.describe ObjectStorage do
end
it_behaves_like 'uses local storage'
+ it_behaves_like 'extracts base filename'
end
context 'when object storage is enabled' do
@@ -509,6 +526,8 @@ RSpec.describe ObjectStorage do
allow(Gitlab.config.uploads.object_store).to receive(:enabled) { true }
end
+ it_behaves_like 'extracts base filename'
+
context 'when direct upload is enabled' do
before do
allow(Gitlab.config.uploads.object_store).to receive(:direct_upload) { true }
diff --git a/spec/validators/json_schema_validator_spec.rb b/spec/validators/json_schema_validator_spec.rb
index 1e9420c5422..83eb0e2f3dd 100644
--- a/spec/validators/json_schema_validator_spec.rb
+++ b/spec/validators/json_schema_validator_spec.rb
@@ -29,36 +29,6 @@ RSpec.describe JsonSchemaValidator do
expect(build_report_result.errors.full_messages).to eq(["Data must be a valid json schema"])
end
end
-
- context 'when draft is > 4' do
- let(:validator) { described_class.new(attributes: [:data], filename: "build_report_result_data", draft: 6) }
-
- it 'uses JSONSchemer to perform validations' do
- expect(JSONSchemer).to receive(:schema).with(Pathname.new(Rails.root.join('app', 'validators', 'json_schemas', 'build_report_result_data.json').to_s)).and_call_original
-
- subject
- end
- end
-
- context 'when draft is <= 4' do
- let(:validator) { described_class.new(attributes: [:data], filename: "build_report_result_data", draft: 4) }
-
- it 'uses JSON::Validator to perform validations' do
- expect(JSON::Validator).to receive(:validate).with(Rails.root.join('app', 'validators', 'json_schemas', 'build_report_result_data.json').to_s, build_report_result.data)
-
- subject
- end
- end
-
- context 'when draft value is not provided' do
- let(:validator) { described_class.new(attributes: [:data], filename: "build_report_result_data") }
-
- it 'uses JSON::Validator to perform validations' do
- expect(JSON::Validator).to receive(:validate).with(Rails.root.join('app', 'validators', 'json_schemas', 'build_report_result_data.json').to_s, build_report_result.data)
-
- subject
- end
- end
end
context 'when filename is not set' do
diff --git a/spec/validators/x509_certificate_credentials_validator_spec.rb b/spec/validators/x509_certificate_credentials_validator_spec.rb
index 9baa31c7257..9076aee7681 100644
--- a/spec/validators/x509_certificate_credentials_validator_spec.rb
+++ b/spec/validators/x509_certificate_credentials_validator_spec.rb
@@ -13,7 +13,9 @@ RSpec.describe X509CertificateCredentialsValidator do
attr_accessor :certificate, :private_key, :passphrase
def initialize(certificate, private_key, passphrase = nil)
- @certificate, @private_key, @passphrase = certificate, private_key, passphrase
+ @certificate = certificate
+ @private_key = private_key
+ @passphrase = passphrase
end
end
end
diff --git a/spec/views/admin/dashboard/index.html.haml_spec.rb b/spec/views/admin/dashboard/index.html.haml_spec.rb
index 5494b908705..6c7d8d2c165 100644
--- a/spec/views/admin/dashboard/index.html.haml_spec.rb
+++ b/spec/views/admin/dashboard/index.html.haml_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe 'admin/dashboard/index.html.haml' do
it "includes revision of GitLab" do
render
- expect(rendered).to have_content "#{Gitlab::VERSION} (#{Gitlab.revision})"
+ expect(rendered).to have_content "#{Gitlab::VERSION} #{Gitlab.revision}"
end
it 'does not include license breakdown' do
diff --git a/spec/views/admin/services/index.html.haml_spec.rb b/spec/views/admin/services/index.html.haml_spec.rb
deleted file mode 100644
index e8cd2dde67e..00000000000
--- a/spec/views/admin/services/index.html.haml_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'admin/services/index.html.haml' do
- before do
- assign(:services, build_stubbed_list(:service, 1))
- assign(:existing_instance_types, [])
- end
-
- context 'user has not dismissed Service Templates deprecation message' do
- it 'shows the message' do
- allow(view).to receive(:show_service_templates_deprecated?).and_return(true)
-
- render
-
- expect(rendered).to have_content('Service Templates will soon be deprecated.')
- end
- end
-
- context 'user has dismissed Service Templates deprecation message' do
- it 'does not show the message' do
- allow(view).to receive(:show_service_templates_deprecated?).and_return(false)
-
- render
-
- expect(rendered).not_to have_content('Service Templates will soon be deprecated.')
- end
- end
-end
diff --git a/spec/views/dashboard/projects/index.html.haml_spec.rb b/spec/views/dashboard/projects/index.html.haml_spec.rb
new file mode 100644
index 00000000000..72b5901d5e5
--- /dev/null
+++ b/spec/views/dashboard/projects/index.html.haml_spec.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'dashboard/projects/index.html.haml' do
+ let_it_be(:user) { build(:user) }
+
+ before do
+ allow(view).to receive(:limited_counter_with_delimiter)
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:project_list_cache_key)
+ allow(view).to receive(:time_ago_with_tooltip)
+ allow(view).to receive(:project_icon)
+ assign(:projects, [build(:project, name: 'awesome stuff')])
+ end
+
+ it 'shows the project the user is a member of in the list' do
+ render
+
+ expect(rendered).to have_content('awesome stuff')
+ end
+
+ it 'shows the "New project" button' do
+ render
+
+ expect(rendered).to have_link('New project')
+ end
+end
diff --git a/spec/views/groups/settings/_remove.html.haml_spec.rb b/spec/views/groups/settings/_remove.html.haml_spec.rb
new file mode 100644
index 00000000000..07fe900bc2d
--- /dev/null
+++ b/spec/views/groups/settings/_remove.html.haml_spec.rb
@@ -0,0 +1,17 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'groups/settings/_remove.html.haml' do
+ describe 'render' do
+ it 'enables the Remove group button for a group' do
+ group = build(:group)
+
+ render 'groups/settings/remove', group: group
+
+ expect(rendered).to have_selector '[data-testid="remove-group-button"]'
+ expect(rendered).not_to have_selector '[data-testid="remove-group-button"].disabled'
+ expect(rendered).not_to have_selector '[data-testid="group-has-linked-subscription-alert"]'
+ end
+ end
+end
diff --git a/spec/views/layouts/_search.html.haml_spec.rb b/spec/views/layouts/_search.html.haml_spec.rb
new file mode 100644
index 00000000000..f0c7cb57b25
--- /dev/null
+++ b/spec/views/layouts/_search.html.haml_spec.rb
@@ -0,0 +1,68 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/_search' do
+ let(:group) { nil }
+ let(:project) { nil }
+ let(:scope) { 'issues' }
+ let(:search_context) do
+ instance_double(Gitlab::SearchContext,
+ project: project,
+ group: group,
+ scope: scope,
+ ref: nil,
+ snippets: [],
+ search_url: '/search',
+ project_metadata: {},
+ group_metadata: {})
+ end
+
+ before do
+ allow(view).to receive(:search_context).and_return(search_context)
+ allow(search_context).to receive(:code_search?).and_return(false)
+ allow(search_context).to receive(:for_snippets?).and_return(false)
+ end
+
+ shared_examples 'search context scope is set' do
+ context 'when on issues' do
+ it 'sets scope to issues' do
+ render
+
+ expect(rendered).to have_css("input[name='scope'][value='issues']", count: 1, visible: false)
+ end
+ end
+
+ context 'when on merge requests' do
+ let(:scope) { 'merge_requests' }
+
+ it 'sets scope to merge_requests' do
+ render
+
+ expect(rendered).to have_css("input[name='scope'][value='merge_requests']", count: 1, visible: false)
+ end
+ end
+ end
+
+ context 'when doing project level search' do
+ let(:project) { create(:project) }
+
+ before do
+ allow(search_context).to receive(:for_project?).and_return(true)
+ allow(search_context).to receive(:for_group?).and_return(false)
+ end
+
+ it_behaves_like 'search context scope is set'
+ end
+
+ context 'when doing group level search' do
+ let(:group) { create(:group) }
+
+ before do
+ allow(search_context).to receive(:for_project?).and_return(false)
+ allow(search_context).to receive(:for_group?).and_return(true)
+ end
+
+ it_behaves_like 'search context scope is set'
+ end
+end
diff --git a/spec/views/layouts/header/_new_dropdown.haml_spec.rb b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
index 80342cbdb41..cec095f93ad 100644
--- a/spec/views/layouts/header/_new_dropdown.haml_spec.rb
+++ b/spec/views/layouts/header/_new_dropdown.haml_spec.rb
@@ -163,6 +163,7 @@ RSpec.describe 'layouts/header/_new_dropdown' do
end
it 'has a "New project" link' do
+ render('layouts/header/new_repo_experiment')
render
expect(rendered).to have_link('New project', href: new_project_path)
diff --git a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
index b3c8450fb48..640f463b45d 100644
--- a/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_group.html.haml_spec.rb
@@ -10,4 +10,5 @@ RSpec.describe 'layouts/nav/sidebar/_group' do
end
it_behaves_like 'has nav sidebar'
+ it_behaves_like 'sidebar includes snowplow attributes', 'render', 'groups_side_navigation', 'groups_side_navigation'
end
diff --git a/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
index 0f6dcf8e57f..3d28be68b25 100644
--- a/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_profile.html.haml_spec.rb
@@ -10,4 +10,5 @@ RSpec.describe 'layouts/nav/sidebar/_profile' do
end
it_behaves_like 'has nav sidebar'
+ it_behaves_like 'sidebar includes snowplow attributes', 'render', 'user_side_navigation', 'user_side_navigation'
end
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index 99d7dfc8acb..bc5b3b7bfc6 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -5,16 +5,128 @@ require 'spec_helper'
RSpec.describe 'layouts/nav/sidebar/_project' do
let_it_be_with_reload(:project) { create(:project, :repository) }
+ let(:user) { project.owner }
+ let(:current_ref) { 'master' }
+
before do
assign(:project, project)
assign(:repository, project.repository)
- allow(view).to receive(:current_ref).and_return('master')
+ allow(view).to receive(:current_ref).and_return(current_ref)
allow(view).to receive(:can?).and_return(true)
+ allow(view).to receive(:current_user).and_return(user)
end
it_behaves_like 'has nav sidebar'
+ describe 'Project Overview' do
+ it 'has a link to the project path' do
+ render
+
+ expect(rendered).to have_link('Project overview', href: project_path(project), class: %w(shortcuts-project rspec-project-link))
+ expect(rendered).to have_selector('[aria-label="Project overview"]')
+ end
+
+ describe 'Details' do
+ it 'has a link to the projects path' do
+ render
+
+ expect(rendered).to have_link('Details', href: project_path(project), class: 'shortcuts-project')
+ expect(rendered).to have_selector('[aria-label="Project details"]')
+ end
+ end
+
+ describe 'Activity' do
+ it 'has a link to the project activity path' do
+ render
+
+ expect(rendered).to have_link('Activity', href: activity_project_path(project), class: 'shortcuts-project-activity')
+ end
+ end
+
+ describe 'Releases' do
+ it 'has a link to the project releases path' do
+ render
+
+ expect(rendered).to have_link('Releases', href: project_releases_path(project), class: 'shortcuts-project-releases')
+ end
+ end
+ end
+
+ describe 'Learn GitLab' do
+ it 'has a link to the learn GitLab experiment' do
+ allow(view).to receive(:learn_gitlab_experiment_enabled?).and_return(true)
+
+ render
+
+ expect(rendered).to have_link('Learn GitLab', href: project_learn_gitlab_path(project))
+ end
+ end
+
+ describe 'Repository' do
+ it 'has a link to the project tree path' do
+ render
+
+ expect(rendered).to have_link('Repository', href: project_tree_path(project, current_ref), class: 'shortcuts-tree')
+ end
+
+ describe 'Files' do
+ it 'has a link to the project tree path' do
+ render
+
+ expect(rendered).to have_link('Files', href: project_tree_path(project, current_ref))
+ end
+ end
+
+ describe 'Commits' do
+ it 'has a link to the project commits path' do
+ render
+
+ expect(rendered).to have_link('Commits', href: project_commits_path(project, current_ref), id: 'js-onboarding-commits-link')
+ end
+ end
+
+ describe 'Branches' do
+ it 'has a link to the project branches path' do
+ render
+
+ expect(rendered).to have_link('Branches', href: project_branches_path(project), id: 'js-onboarding-branches-link')
+ end
+ end
+
+ describe 'Tags' do
+ it 'has a link to the project tags path' do
+ render
+
+ expect(rendered).to have_link('Tags', href: project_tags_path(project))
+ end
+ end
+
+ describe 'Contributors' do
+ it 'has a link to the project contributors path' do
+ render
+
+ expect(rendered).to have_link('Contributors', href: project_graph_path(project, current_ref))
+ end
+ end
+
+ describe 'Graph' do
+ it 'has a link to the project graph path' do
+ render
+
+ expect(rendered).to have_link('Graph', href: project_network_path(project, current_ref))
+ end
+ end
+
+ describe 'Compare' do
+ it 'has a link to the project compare path' do
+ render
+
+ expect(rendered).to have_link('Compare', href: project_compare_index_path(project, from: project.repository.root_ref, to: current_ref))
+ end
+ end
+ end
+
describe 'issue boards' do
it 'has board tab' do
render
@@ -99,19 +211,11 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- describe 'releases entry' do
- it 'renders releases link' do
- render
-
- expect(rendered).to have_link('Releases', href: project_releases_path(project))
- end
- end
-
describe 'wiki entry tab' do
let(:can_read_wiki) { true }
before do
- allow(view).to receive(:can?).with(nil, :read_wiki, project).and_return(can_read_wiki)
+ allow(view).to receive(:can?).with(user, :read_wiki, project).and_return(can_read_wiki)
end
describe 'when wiki is enabled' do
@@ -146,7 +250,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
it 'shows the external wiki tab with the external wiki service link' do
render
- expect(rendered).to have_link('External Wiki', href: properties['external_wiki_url'])
+ expect(rendered).to have_link('External wiki', href: properties['external_wiki_url'])
end
end
@@ -156,7 +260,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
it 'does not show the external wiki tab' do
render
- expect(rendered).not_to have_link('External Wiki')
+ expect(rendered).not_to have_link('External wiki')
end
end
end
@@ -299,7 +403,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
let(:read_cycle_analytics) { true }
before do
- allow(view).to receive(:can?).with(nil, :read_cycle_analytics, project).and_return(read_cycle_analytics)
+ allow(view).to receive(:can?).with(user, :read_cycle_analytics, project).and_return(read_cycle_analytics)
end
describe 'when value stream analytics is enabled' do
@@ -346,4 +450,6 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
end
+
+ it_behaves_like 'sidebar includes snowplow attributes', 'render', 'projects_side_navigation', 'projects_side_navigation'
end
diff --git a/spec/views/layouts/profile.html.haml_spec.rb b/spec/views/layouts/profile.html.haml_spec.rb
index 93f8a075209..77474555771 100644
--- a/spec/views/layouts/profile.html.haml_spec.rb
+++ b/spec/views/layouts/profile.html.haml_spec.rb
@@ -19,21 +19,8 @@ RSpec.describe 'layouts/profile' do
.with({ locals: { container_class: 'gl-my-5' } })
end
- context 'when search_settings_in_page feature flag is on' do
- it 'displays the search settings entry point' do
- render
- expect(rendered).to include('js-search-settings-app')
- end
- end
-
- context 'when search_settings_in_page feature flag is off' do
- before do
- stub_feature_flags(search_settings_in_page: false)
- end
-
- it 'does not display the search settings entry point' do
- render
- expect(rendered).not_to include('js-search-settings-app')
- end
+ it 'displays the search settings entry point' do
+ render
+ expect(rendered).to include('js-search-settings-app')
end
end
diff --git a/spec/views/profiles/keys/_form.html.haml_spec.rb b/spec/views/profiles/keys/_form.html.haml_spec.rb
new file mode 100644
index 00000000000..62bb271bd9c
--- /dev/null
+++ b/spec/views/profiles/keys/_form.html.haml_spec.rb
@@ -0,0 +1,50 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'profiles/keys/_form.html.haml' do
+ let_it_be(:key) { Key.new }
+
+ let(:page) { Capybara::Node::Simple.new(rendered) }
+
+ before do
+ assign(:key, key)
+ end
+
+ context 'when the form partial is used' do
+ before do
+ allow(view).to receive(:ssh_key_expires_field_description).and_return('Key can still be used after expiration.')
+
+ render
+ end
+
+ it 'renders the form with the correct action' do
+ expect(page.find('form')['action']).to eq('/-/profile/keys')
+ end
+
+ it 'has the key field', :aggregate_failures do
+ expect(rendered).to have_field('Key', type: 'textarea', placeholder: 'Typically starts with "ssh-ed25519 …" or "ssh-rsa …"')
+ expect(rendered).to have_text("Paste your public SSH key, which is usually contained in the file '~/.ssh/id_ed25519.pub' or '~/.ssh/id_rsa.pub' and begins with 'ssh-ed25519' or 'ssh-rsa'. Do not paste your private SSH key, as that can compromise your identity.")
+ end
+
+ it 'has the title field', :aggregate_failures do
+ expect(rendered).to have_field('Title', type: 'text', placeholder: 'e.g. My MacBook key')
+ expect(rendered).to have_text('Give your individual key a title.')
+ end
+
+ it 'has the expires at field', :aggregate_failures do
+ expect(rendered).to have_field('Expires at', type: 'date')
+ expect(page.find_field('Expires at')['min']).to eq(l(1.day.from_now, format: "%Y-%m-%d"))
+ expect(rendered).to have_text('Key can still be used after expiration.')
+ end
+
+ it 'has the validation warning', :aggregate_failures do
+ expect(rendered).to have_text("Oops, are you sure? Publicly visible private SSH keys can compromise your system.")
+ expect(rendered).to have_button('Yes, add it')
+ end
+
+ it 'has the submit button' do
+ expect(rendered).to have_button('Add key')
+ end
+ end
+end
diff --git a/spec/views/profiles/keys/_key.html.haml_spec.rb b/spec/views/profiles/keys/_key.html.haml_spec.rb
new file mode 100644
index 00000000000..bb101198ac3
--- /dev/null
+++ b/spec/views/profiles/keys/_key.html.haml_spec.rb
@@ -0,0 +1,123 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'profiles/keys/_key.html.haml' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(view).to receive(:key).and_return(key)
+ allow(view).to receive(:is_admin).and_return(false)
+ end
+
+ context 'when the key partial is used' do
+ let_it_be(:key) do
+ create(:personal_key,
+ user: user,
+ last_used_at: 7.days.ago,
+ expires_at: 2.days.from_now)
+ end
+
+ it 'displays the correct values', :aggregate_failures do
+ render
+
+ expect(rendered).to have_text(key.title)
+ expect(rendered).to have_css('[data-testid="key-icon"]')
+ expect(rendered).to have_text(key.fingerprint)
+ expect(rendered).to have_text(l(key.last_used_at, format: "%b %d, %Y"))
+ expect(rendered).to have_text(l(key.created_at, format: "%b %d, %Y"))
+ expect(rendered).to have_text(key.expires_at.to_date)
+ expect(response).to render_template(partial: 'shared/ssh_keys/_key_delete')
+ end
+
+ context 'when the key has not been used' do
+ let_it_be(:key) do
+ create(:personal_key,
+ user: user,
+ last_used_at: nil)
+ end
+
+ it 'renders "Never" for last used' do
+ render
+
+ expect(rendered).to have_text('Last used: Never')
+ end
+ end
+
+ context 'when the key does not have an expiration date' do
+ let_it_be(:key) do
+ create(:personal_key,
+ user: user,
+ expires_at: nil)
+ end
+
+ it 'renders "Never" for expires' do
+ render
+
+ expect(rendered).to have_text('Expires: Never')
+ end
+ end
+
+ context 'when the key has expired' do
+ let_it_be(:key) do
+ create(:personal_key,
+ user: user,
+ expires_at: 2.days.ago)
+ end
+
+ it 'renders "Expired:" as the expiration date label' do
+ render
+
+ expect(rendered).to have_text('Expired:')
+ end
+ end
+
+ context 'when the key is not deletable' do
+ # Turns out key.can_delete? is only false for LDAP keys
+ # but LDAP keys don't exist outside EE
+ before do
+ allow(key).to receive(:can_delete?).and_return(false)
+ end
+
+ it 'does not render the partial' do
+ render
+
+ expect(response).not_to render_template(partial: 'shared/ssh_keys/_key_delete')
+ end
+ end
+
+ context 'icon tooltip' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:valid, :expiry, :result) do
+ false | 2.days.from_now | 'Key type is forbidden. Must be DSA, ECDSA, or ED25519'
+ false | 2.days.ago | 'Key type is forbidden. Must be DSA, ECDSA, or ED25519'
+ true | 2.days.ago | 'Key usable beyond expiration date.'
+ true | 2.days.from_now | ''
+ end
+
+ with_them do
+ let_it_be(:key) do
+ create(:personal_key, user: user)
+ end
+
+ it 'renders the correct icon', :aggregate_failures do
+ unless valid
+ stub_application_setting(rsa_key_restriction: ApplicationSetting::FORBIDDEN_KEY_VALUE)
+ end
+
+ key.expires_at = expiry
+
+ render
+
+ if result.empty?
+ expect(rendered).to have_css('[data-testid="key-icon"]')
+ else
+ expect(rendered).to have_css('[data-testid="warning-solid-icon"]')
+ expect(rendered).to have_selector("span.has-tooltip[title='#{result}']")
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/views/projects/commit/_commit_box.html.haml_spec.rb b/spec/views/projects/commit/_commit_box.html.haml_spec.rb
index 9d18519ade6..c503e085d02 100644
--- a/spec/views/projects/commit/_commit_box.html.haml_spec.rb
+++ b/spec/views/projects/commit/_commit_box.html.haml_spec.rb
@@ -34,14 +34,6 @@ RSpec.describe 'projects/commit/_commit_box.html.haml' do
expect(rendered).to have_selector('.js-commit-pipeline-mini-graph')
end
-
- it 'shows pipeline stages in haml when feature flag is disabled' do
- stub_feature_flags(ci_commit_pipeline_mini_graph_vue: false)
-
- render
-
- expect(rendered).to have_selector('.js-commit-pipeline-graph')
- end
end
context 'when there are multiple pipelines for a commit' do
@@ -74,30 +66,4 @@ RSpec.describe 'projects/commit/_commit_box.html.haml' do
end
end
end
-
- context 'viewing a commit' do
- context 'as a developer' do
- before do
- allow(view).to receive(:can_collaborate_with_project?).and_return(true)
- end
-
- it 'has a link to create a new tag' do
- render
-
- expect(rendered).to have_link('Tag')
- end
- end
-
- context 'as a non-developer' do
- before do
- project.add_guest(user)
- end
-
- it 'does not have a link to create a new tag' do
- render
-
- expect(rendered).not_to have_link('Tag')
- end
- end
- end
end
diff --git a/spec/views/projects/empty.html.haml_spec.rb b/spec/views/projects/empty.html.haml_spec.rb
index de83722160e..dd7b1bd94a9 100644
--- a/spec/views/projects/empty.html.haml_spec.rb
+++ b/spec/views/projects/empty.html.haml_spec.rb
@@ -36,6 +36,16 @@ RSpec.describe 'projects/empty' do
end
end
+ context 'project is archived' do
+ let(:project) { ProjectPresenter.new(create(:project, :empty_repo, :archived), current_user: user) }
+
+ it 'shows archived notice' do
+ render
+
+ expect(rendered).to have_content('Archived project!')
+ end
+ end
+
describe 'invite_members_empty_project_version_a experiment' do
let(:can_import_members) { true }
diff --git a/spec/views/projects/pipelines/_stage.html.haml_spec.rb b/spec/views/projects/pipelines/_stage.html.haml_spec.rb
deleted file mode 100644
index c8f6784a0f6..00000000000
--- a/spec/views/projects/pipelines/_stage.html.haml_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/pipelines/_stage' do
- let(:project) { create(:project, :repository) }
- let(:pipeline) { create(:ci_pipeline, project: project) }
- let(:stage) { build(:ci_stage, pipeline: pipeline) }
-
- before do
- assign :stage, stage
- end
-
- context 'when there are only latest builds present' do
- before do
- create(:ci_build, name: 'test:build',
- stage: stage.name,
- pipeline: pipeline)
- end
-
- it 'shows the builds in the stage' do
- render
-
- expect(rendered).to have_text 'test:build'
- end
- end
-
- context 'when build belongs to different stage' do
- before do
- create(:ci_build, name: 'test:build',
- stage: 'other:stage',
- pipeline: pipeline)
- end
-
- it 'does not render build' do
- render
-
- expect(rendered).not_to have_text 'test:build'
- end
- end
-
- context 'when there are retried builds present' do
- before do
- create(:ci_build, name: 'test:build', stage: stage.name, pipeline: pipeline, retried: true)
- create(:ci_build, name: 'test:build', stage: stage.name, pipeline: pipeline)
- end
-
- it 'shows only latest builds' do
- render
-
- expect(rendered).to have_text 'test:build', count: 1
- end
- end
-
- context 'when there are multiple builds' do
- before do
- Ci::HasStatus::AVAILABLE_STATUSES.each do |status|
- create_build(status)
- end
- end
-
- it 'shows them in order' do
- render
-
- expect(rendered).to have_text(Ci::HasStatus::ORDERED_STATUSES.join(" "))
- end
-
- def create_build(status)
- create(:ci_build, name: status, status: status,
- pipeline: pipeline, stage: stage.name)
- end
- end
-end
diff --git a/spec/views/projects/settings/operations/show.html.haml_spec.rb b/spec/views/projects/settings/operations/show.html.haml_spec.rb
index b2dd3556098..e6d53c526e2 100644
--- a/spec/views/projects/settings/operations/show.html.haml_spec.rb
+++ b/spec/views/projects/settings/operations/show.html.haml_spec.rb
@@ -36,8 +36,8 @@ RSpec.describe 'projects/settings/operations/show' do
it 'renders the Operations Settings page' do
render
- expect(rendered).to have_content _('Alerts')
- expect(rendered).to have_content _('Display alerts from all your monitoring tools directly within GitLab.')
+ expect(rendered).to have_content _('Alert integrations')
+ expect(rendered).to have_content _('Display alerts from all configured monitoring tools.')
end
end
diff --git a/spec/views/projects/tags/index.html.haml_spec.rb b/spec/views/projects/tags/index.html.haml_spec.rb
index dc008875062..18b42f98e0b 100644
--- a/spec/views/projects/tags/index.html.haml_spec.rb
+++ b/spec/views/projects/tags/index.html.haml_spec.rb
@@ -21,6 +21,7 @@ RSpec.describe 'projects/tags/index.html.haml' do
end
it 'defaults sort dropdown toggle to last updated' do
+ stub_feature_flags(gldropdown_tags: false)
render
expect(rendered).to have_button('Last updated')
end
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
index f731594e9ee..639759ae095 100644
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ b/spec/views/registrations/welcome/show.html.haml_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'registrations/welcome/show' do
- using RSpec::Parameterized::TableSyntax
+ let(:is_gitlab_com) { false }
let_it_be(:user) { User.new }
@@ -11,9 +11,9 @@ RSpec.describe 'registrations/welcome/show' do
allow(view).to receive(:current_user).and_return(user)
allow(view).to receive(:in_subscription_flow?).and_return(false)
allow(view).to receive(:in_trial_flow?).and_return(false)
- allow(view).to receive(:in_invitation_flow?).and_return(false)
+ allow(view).to receive(:user_has_memberships?).and_return(false)
allow(view).to receive(:in_oauth_flow?).and_return(false)
- allow(Gitlab).to receive(:com?).and_return(false)
+ allow(Gitlab).to receive(:com?).and_return(is_gitlab_com)
render
end
@@ -22,4 +22,24 @@ RSpec.describe 'registrations/welcome/show' do
it { is_expected.not_to have_selector('label[for="user_setup_for_company"]') }
it { is_expected.to have_button('Get started!') }
+ it { is_expected.to have_selector('input[name="user[email_opted_in]"]') }
+
+ describe 'email opt in' do
+ context 'when on gitlab.com' do
+ let(:is_gitlab_com) { true }
+
+ it 'hides the email-opt in by default' do
+ expect(subject).to have_css('.js-email-opt-in.hidden')
+ end
+ end
+
+ context 'when not on gitlab.com' do
+ let(:is_gitlab_com) { false }
+
+ it 'hides the email-opt in by default' do
+ expect(subject).not_to have_css('.js-email-opt-in.hidden')
+ expect(subject).to have_css('.js-email-opt-in')
+ end
+ end
+ end
end
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index 8960d096143..11f2a4082e7 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -28,6 +28,21 @@ RSpec.describe 'search/_results' do
expect(rendered).to have_content('Showing 1 - 2 of 3 issues for foo')
end
+ context 'when searching notes which contain quotes in markdown' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:issue) { create(:issue, project: project, title: '*') }
+ let_it_be(:note) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, note: '```"helloworld"```') }
+ let(:scope) { 'notes' }
+ let(:search_objects) { Note.page(1).per(2) }
+ let(:term) { 'helloworld' }
+
+ it 'renders plain quotes' do
+ render
+
+ expect(rendered).to include('"<mark>helloworld</mark>"')
+ end
+ end
+
context 'when search results do not have a count' do
before do
@search_objects = @search_objects.without_count
diff --git a/spec/views/shared/nav/_sidebar.html.haml_spec.rb b/spec/views/shared/nav/_sidebar.html.haml_spec.rb
new file mode 100644
index 00000000000..268d2952683
--- /dev/null
+++ b/spec/views/shared/nav/_sidebar.html.haml_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'shared/nav/_sidebar.html.haml' do
+ let(:project) { build(:project, id: non_existing_record_id) }
+ let(:context) { Sidebars::Projects::Context.new(current_user: nil, container: project)}
+ let(:sidebar) { Sidebars::Projects::Panel.new(context) }
+
+ before do
+ assign(:project, project)
+ assign(:sidebar, sidebar)
+
+ allow(sidebar).to receive(:renderable_menus).and_return([])
+ end
+
+ context 'when sidebar has a scope menu' do
+ it 'renders the scope menu' do
+ render
+
+ expect(rendered).to render_template('shared/nav/_scope_menu')
+ end
+ end
+
+ context 'when sidebar does not have a scope menu' do
+ let(:scope_menu_view_path) { 'shared/nav/' }
+ let(:scope_menu_view_name) { 'scope_menu.html.haml' }
+ let(:scope_menu_view) { "#{scope_menu_view_path}#{scope_menu_view_name}" }
+ let(:scope_menu_partial) { "#{scope_menu_view_path}_#{scope_menu_view_name}" }
+ let(:content) { 'Custom test content' }
+
+ context 'when sidebar has a custom scope menu partial defined' do
+ it 'renders the custom partial' do
+ allow(sidebar).to receive(:render_raw_scope_menu_partial).and_return(scope_menu_view)
+ allow(sidebar).to receive(:scope_menu).and_return(nil)
+ stub_template(scope_menu_partial => content)
+
+ render
+
+ expect(rendered).to have_text(content)
+ end
+ end
+ end
+end
diff --git a/spec/views/shared/runners/show.html.haml_spec.rb b/spec/views/shared/runners/show.html.haml_spec.rb
index 5e2812eb48a..91a6a31daae 100644
--- a/spec/views/shared/runners/show.html.haml_spec.rb
+++ b/spec/views/shared/runners/show.html.haml_spec.rb
@@ -34,19 +34,19 @@ RSpec.describe 'shared/runners/show.html.haml' do
describe 'Runner id and type' do
context 'when runner is of type instance' do
- it { is_expected.to have_content("Runner ##{runner.id} Shared") }
+ it { is_expected.to have_content("Runner ##{runner.id} shared") }
end
context 'when runner is of type group' do
let(:runner) { create(:ci_runner, :group) }
- it { is_expected.to have_content("Runner ##{runner.id} Group") }
+ it { is_expected.to have_content("Runner ##{runner.id} group") }
end
context 'when runner is of type project' do
let(:runner) { create(:ci_runner, :project) }
- it { is_expected.to have_content("Runner ##{runner.id} Specific") }
+ it { is_expected.to have_content("Runner ##{runner.id} specific") }
end
end
diff --git a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
index a27c431523e..0501fc3b8cf 100644
--- a/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
+++ b/spec/workers/authorized_project_update/user_refresh_over_user_range_worker_spec.rb
@@ -3,16 +3,67 @@
require 'spec_helper'
RSpec.describe AuthorizedProjectUpdate::UserRefreshOverUserRangeWorker do
- let(:start_user_id) { 42 }
- let(:end_user_id) { 4242 }
+ let(:project) { create(:project) }
+ let(:user) { project.namespace.owner }
+ let(:start_user_id) { user.id }
+ let(:end_user_id) { start_user_id }
+ let(:execute_worker) { subject.perform(start_user_id, end_user_id) }
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :periodic_project_authorization_update_via_replica,
+ data_consistency: :delayed
describe '#perform' do
- it 'calls AuthorizedProjectUpdate::RecalculateForUserRangeService' do
- expect_next_instance_of(AuthorizedProjectUpdate::RecalculateForUserRangeService) do |service|
- expect(service).to receive(:execute)
+ context 'when the feature flag `periodic_project_authorization_update_via_replica` is enabled' do
+ before do
+ stub_feature_flags(periodic_project_authorization_update_via_replica: true)
+ end
+
+ context 'checks if project authorization update is required' do
+ it 'checks if a project_authorization refresh is needed for each of the users' do
+ User.where(id: start_user_id..end_user_id).each do |user|
+ expect(AuthorizedProjectUpdate::FindRecordsDueForRefreshService).to(
+ receive(:new).with(user).and_call_original)
+ end
+
+ execute_worker
+ end
+ end
+
+ context 'when there are project authorization records due for either removal or addition for a specific user' do
+ before do
+ user.project_authorizations.delete_all
+ end
+
+ it 'enqueues a new project authorization update job for the user' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).to receive(:perform_async).with(user.id)
+
+ execute_worker
+ end
end
- subject.perform(start_user_id, end_user_id)
+ context 'when there are no additions or removals to be made to project authorizations for a specific user' do
+ it 'does not enqueue a new project authorization update job for the user' do
+ expect(AuthorizedProjectUpdate::UserRefreshWithLowUrgencyWorker).not_to receive(:perform_async)
+
+ execute_worker
+ end
+ end
+ end
+
+ context 'when the feature flag `periodic_project_authorization_update_via_replica` is disabled' do
+ before do
+ stub_feature_flags(periodic_project_authorization_update_via_replica: false)
+ end
+
+ it 'calls AuthorizedProjectUpdate::RecalculateForUserRangeService' do
+ expect_next_instance_of(AuthorizedProjectUpdate::RecalculateForUserRangeService, start_user_id, end_user_id) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ execute_worker
+ end
end
end
end
diff --git a/spec/workers/background_migration_worker_spec.rb b/spec/workers/background_migration_worker_spec.rb
index 8094efcaf04..4575c270042 100644
--- a/spec/workers/background_migration_worker_spec.rb
+++ b/spec/workers/background_migration_worker_spec.rb
@@ -101,7 +101,7 @@ RSpec.describe BackgroundMigrationWorker, :clean_gitlab_redis_shared_state do
it 'sets the class that will be executed as the caller_id' do
expect(Gitlab::BackgroundMigration).to receive(:perform) do
- expect(Labkit::Context.current.to_h).to include('meta.caller_id' => 'Foo')
+ expect(Gitlab::ApplicationContext.current).to include('meta.caller_id' => 'Foo')
end
worker.perform('Foo', [10, 20])
diff --git a/spec/workers/build_finished_worker_spec.rb b/spec/workers/build_finished_worker_spec.rb
index 6d040f83dc7..5aca5d68677 100644
--- a/spec/workers/build_finished_worker_spec.rb
+++ b/spec/workers/build_finished_worker_spec.rb
@@ -6,10 +6,8 @@ RSpec.describe BuildFinishedWorker do
subject { described_class.new.perform(build.id) }
describe '#perform' do
- let(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
-
context 'when build exists' do
- let!(:build) { create(:ci_build) }
+ let_it_be(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline)) }
before do
expect(Ci::Build).to receive(:find_by).with(id: build.id).and_return(build)
@@ -30,6 +28,30 @@ RSpec.describe BuildFinishedWorker do
subject
end
+
+ context 'when build is failed' do
+ before do
+ build.update!(status: :failed)
+ end
+
+ it 'adds a todo' do
+ expect(::Ci::MergeRequests::AddTodoWhenBuildFailsWorker).to receive(:perform_async)
+
+ subject
+ end
+ end
+
+ context 'when build has a chat' do
+ before do
+ build.pipeline.update!(source: :chat)
+ end
+
+ it 'schedules a ChatNotification job' do
+ expect(ChatNotificationWorker).to receive(:perform_async).with(build.id)
+
+ subject
+ end
+ end
end
context 'when build does not exist' do
@@ -38,15 +60,5 @@ RSpec.describe BuildFinishedWorker do
.not_to raise_error
end
end
-
- context 'when build has a chat' do
- let(:build) { create(:ci_build, :success, pipeline: create(:ci_pipeline, source: :chat)) }
-
- it 'schedules a ChatNotification job' do
- expect(ChatNotificationWorker).to receive(:perform_async).with(build.id)
-
- subject
- end
- end
end
end
diff --git a/spec/workers/build_hooks_worker_spec.rb b/spec/workers/build_hooks_worker_spec.rb
index aefbd7e590e..7e469958a84 100644
--- a/spec/workers/build_hooks_worker_spec.rb
+++ b/spec/workers/build_hooks_worker_spec.rb
@@ -22,4 +22,9 @@ RSpec.describe BuildHooksWorker do
end
end
end
+
+ it_behaves_like 'worker with data consistency',
+ described_class,
+ feature_flag: :load_balancing_for_build_hooks_worker,
+ data_consistency: :delayed
end
diff --git a/spec/workers/bulk_import_worker_spec.rb b/spec/workers/bulk_import_worker_spec.rb
index 8cf14ed6f8b..5964ec45563 100644
--- a/spec/workers/bulk_import_worker_spec.rb
+++ b/spec/workers/bulk_import_worker_spec.rb
@@ -4,10 +4,6 @@ require 'spec_helper'
RSpec.describe BulkImportWorker do
describe '#perform' do
- before do
- stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
- end
-
context 'when no bulk import is found' do
it 'does nothing' do
expect(described_class).not_to receive(:perform_in)
@@ -59,10 +55,26 @@ RSpec.describe BulkImportWorker do
expect(bulk_import.reload.started?).to eq(true)
end
+ it 'creates all the required pipeline trackers' do
+ bulk_import = create(:bulk_import, :created)
+ entity_1 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ entity_2 = create(:bulk_import_entity, :created, bulk_import: bulk_import)
+
+ expect { subject.perform(bulk_import.id) }
+ .to change(BulkImports::Tracker, :count)
+ .by(BulkImports::Stage.pipelines.size * 2)
+
+ expect(entity_1.trackers).not_to be_empty
+ expect(entity_2.trackers).not_to be_empty
+ end
+
context 'when there are created entities to process' do
it 'marks a batch of entities as started, enqueues BulkImports::EntityWorker and reenqueues' do
+ stub_const("#{described_class}::DEFAULT_BATCH_SIZE", 1)
+
bulk_import = create(:bulk_import, :created)
- (described_class::DEFAULT_BATCH_SIZE + 1).times { |_| create(:bulk_import_entity, :created, bulk_import: bulk_import) }
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
+ create(:bulk_import_entity, :created, bulk_import: bulk_import)
expect(described_class).to receive(:perform_in).with(described_class::PERFORM_DELAY, bulk_import.id)
expect(BulkImports::EntityWorker).to receive(:perform_async)
diff --git a/spec/workers/bulk_imports/entity_worker_spec.rb b/spec/workers/bulk_imports/entity_worker_spec.rb
index cd9a6f605b9..deae15a3ca2 100644
--- a/spec/workers/bulk_imports/entity_worker_spec.rb
+++ b/spec/workers/bulk_imports/entity_worker_spec.rb
@@ -3,51 +3,107 @@
require 'spec_helper'
RSpec.describe BulkImports::EntityWorker do
- describe '#execute' do
- let(:bulk_import) { create(:bulk_import) }
-
- context 'when started entity exists' do
- let(:entity) { create(:bulk_import_entity, :started, bulk_import: bulk_import) }
-
- it 'executes BulkImports::Importers::GroupImporter' do
- expect(BulkImports::Importers::GroupImporter).to receive(:new).with(entity).and_call_original
+ let_it_be(:entity) { create(:bulk_import_entity) }
+
+ let_it_be(:pipeline_tracker) do
+ create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'Stage0::Pipeline',
+ stage: 0
+ )
+ end
- subject.perform(entity.id)
- end
+ it 'enqueues the first stage pipelines work' do
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: nil
+ )
+ end
- it 'sets jid' do
- jid = 'jid'
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .with(
+ pipeline_tracker.id,
+ pipeline_tracker.stage,
+ entity.id
+ )
- allow(subject).to receive(:jid).and_return(jid)
+ subject.perform(entity.id)
+ end
- subject.perform(entity.id)
+ it 'do not enqueue a new pipeline job if the current stage still running' do
+ expect(BulkImports::PipelineWorker)
+ .not_to receive(:perform_async)
- expect(entity.reload.jid).to eq(jid)
- end
+ subject.perform(entity.id, 0)
+ end
- context 'when exception occurs' do
- it 'tracks the exception & marks entity as failed' do
- allow(BulkImports::Importers::GroupImporter).to receive(:new) { raise StandardError }
+ it 'enqueues the next stage pipelines when the current stage is finished' do
+ next_stage_pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'Stage1::Pipeline',
+ stage: 1
+ )
+
+ pipeline_tracker.fail_op!
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: 0
+ )
+ end
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(kind_of(StandardError), bulk_import_id: bulk_import.id, entity_id: entity.id)
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .with(
+ next_stage_pipeline_tracker.id,
+ next_stage_pipeline_tracker.stage,
+ entity.id
+ )
- subject.perform(entity.id)
+ subject.perform(entity.id, 0)
+ end
- expect(entity.reload.failed?).to eq(true)
- end
- end
+ it 'logs and tracks the raised exceptions' do
+ exception = StandardError.new('Error!')
+
+ expect(BulkImports::PipelineWorker)
+ .to receive(:perform_async)
+ .and_raise(exception)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: nil
+ )
+
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ entity_id: entity.id,
+ current_stage: nil,
+ error_message: 'Error!'
+ )
end
- context 'when started entity does not exist' do
- it 'does not execute BulkImports::Importers::GroupImporter' do
- entity = create(:bulk_import_entity, bulk_import: bulk_import)
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(exception, entity_id: entity.id)
- expect(BulkImports::Importers::GroupImporter).not_to receive(:new)
-
- subject.perform(entity.id)
- end
- end
+ subject.perform(entity.id)
end
end
diff --git a/spec/workers/bulk_imports/pipeline_worker_spec.rb b/spec/workers/bulk_imports/pipeline_worker_spec.rb
new file mode 100644
index 00000000000..27151177634
--- /dev/null
+++ b/spec/workers/bulk_imports/pipeline_worker_spec.rb
@@ -0,0 +1,125 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe BulkImports::PipelineWorker do
+ let(:pipeline_class) do
+ Class.new do
+ def initialize(_); end
+
+ def run; end
+ end
+ end
+
+ let_it_be(:entity) { create(:bulk_import_entity) }
+
+ before do
+ stub_const('FakePipeline', pipeline_class)
+ end
+
+ it 'runs the given pipeline successfully' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+
+ expect(BulkImports::Stage)
+ .to receive(:pipeline_exists?)
+ .with('FakePipeline')
+ .and_return(true)
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:info)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'FakePipeline',
+ entity_id: entity.id
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ expect(subject).to receive(:jid).and_return('jid')
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:finished)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+
+ context 'when the pipeline cannot be found' do
+ it 'logs the error' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ :started,
+ entity: entity,
+ pipeline_name: 'FakePipeline'
+ )
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ pipeline_tracker_id: pipeline_tracker.id,
+ entity_id: entity.id,
+ message: 'Unstarted pipeline not found'
+ )
+ end
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+ end
+ end
+
+ context 'when the pipeline raises an exception' do
+ it 'logs the error' do
+ pipeline_tracker = create(
+ :bulk_import_tracker,
+ entity: entity,
+ pipeline_name: 'InexistentPipeline'
+ )
+
+ expect_next_instance_of(Gitlab::Import::Logger) do |logger|
+ expect(logger)
+ .to receive(:error)
+ .with(
+ worker: described_class.name,
+ pipeline_name: 'InexistentPipeline',
+ entity_id: entity.id,
+ message: "'InexistentPipeline' is not a valid BulkImport Pipeline"
+ )
+ end
+
+ expect(Gitlab::ErrorTracking)
+ .to receive(:track_exception)
+ .with(
+ instance_of(NameError),
+ entity_id: entity.id,
+ pipeline_name: pipeline_tracker.pipeline_name
+ )
+
+ expect(BulkImports::EntityWorker)
+ .to receive(:perform_async)
+ .with(entity.id, pipeline_tracker.stage)
+
+ expect(subject).to receive(:jid).and_return('jid')
+
+ subject.perform(pipeline_tracker.id, pipeline_tracker.stage, entity.id)
+
+ pipeline_tracker.reload
+
+ expect(pipeline_tracker.status_name).to eq(:failed)
+ expect(pipeline_tracker.jid).to eq('jid')
+ end
+ end
+end
diff --git a/spec/workers/ci/drop_pipeline_worker_spec.rb b/spec/workers/ci/drop_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..5e626112520
--- /dev/null
+++ b/spec/workers/ci/drop_pipeline_worker_spec.rb
@@ -0,0 +1,36 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::DropPipelineWorker do
+ include AfterNextHelpers
+
+ let(:pipeline) { create(:ci_pipeline, :running) }
+ let(:failure_reason) { :user_blocked }
+
+ describe '#perform' do
+ subject { described_class.new.perform(pipeline.id, failure_reason) }
+
+ it 'calls delegates to the service' do
+ expect_next(Ci::DropPipelineService).to receive(:execute).with(pipeline, failure_reason)
+
+ subject
+ end
+
+ it_behaves_like 'an idempotent worker' do
+ let!(:running_build) { create(:ci_build, :running, pipeline: pipeline) }
+ let!(:success_build) { create(:ci_build, :success, pipeline: pipeline) }
+
+ let(:job_args) { [pipeline.id, failure_reason] }
+
+ it 'executes the service', :aggregate_failures do
+ subject
+
+ expect(running_build.reload).to be_failed
+ expect(running_build.failure_reason).to eq(failure_reason.to_s)
+
+ expect(success_build.reload).to be_success
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/initial_pipeline_process_worker_spec.rb b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
new file mode 100644
index 00000000000..5db9287fe96
--- /dev/null
+++ b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::InitialPipelineProcessWorker do
+ describe '#perform' do
+ let_it_be(:pipeline) { create(:ci_pipeline, :with_job, status: :created) }
+
+ include_examples 'an idempotent worker' do
+ let(:job_args) { pipeline.id }
+
+ it 'marks the pipeline as pending' do
+ expect(pipeline).to be_created
+
+ subject
+
+ expect(pipeline.reload).to be_pending
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
new file mode 100644
index 00000000000..4690c73d121
--- /dev/null
+++ b/spec/workers/ci/merge_requests/add_todo_when_build_fails_worker_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::MergeRequests::AddTodoWhenBuildFailsWorker do
+ describe '#perform' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pipeline) { create(:ci_pipeline, :detached_merge_request_pipeline) }
+ let_it_be(:job) { create(:ci_build, project: project, pipeline: pipeline, status: :failed) }
+
+ let(:job_args) { job.id }
+
+ subject(:perform_twice) { perform_multiple(job_args, exec_times: 2) }
+
+ include_examples 'an idempotent worker' do
+ it 'executes todo service' do
+ service = double
+ expect(::MergeRequests::AddTodoWhenBuildFailsService).to receive(:new).with(project, nil).and_return(service).twice
+ expect(service).to receive(:execute).with(job).twice
+
+ perform_twice
+ end
+ end
+
+ context 'when job does not exist' do
+ let(:job_args) { 0 }
+
+ it 'returns nil' do
+ expect(described_class.new.perform(job_args)).to eq(nil)
+ end
+ end
+
+ context 'when project does not exist' do
+ before do
+ job.update!(project_id: nil)
+ end
+
+ it 'returns nil' do
+ expect(described_class.new.perform(job_args)).to eq(nil)
+ end
+ end
+
+ context 'when pipeline does not exist' do
+ before do
+ job.update_attribute('pipeline_id', nil)
+ end
+
+ it 'returns nil' do
+ expect(described_class.new.perform(job_args)).to eq(nil)
+ end
+ end
+ end
+end
diff --git a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
index 2bdd8345374..ad9c08d02cb 100644
--- a/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
+++ b/spec/workers/ci/pipeline_artifacts/expire_artifacts_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Ci::PipelineArtifacts::ExpireArtifactsWorker do
end
it 'executes a service' do
- expect_next_instance_of(::Ci::PipelineArtifacts::DestroyExpiredArtifactsService) do |instance|
+ expect_next_instance_of(::Ci::PipelineArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute)
end
diff --git a/spec/workers/ci/test_failure_history_worker_spec.rb b/spec/workers/ci/test_failure_history_worker_spec.rb
index d2896c08209..7530077d4ad 100644
--- a/spec/workers/ci/test_failure_history_worker_spec.rb
+++ b/spec/workers/ci/test_failure_history_worker_spec.rb
@@ -40,8 +40,8 @@ RSpec.describe ::Ci::TestFailureHistoryWorker do
subject
- expect(Ci::TestCase.count).to eq(2)
- expect(Ci::TestCaseFailure.count).to eq(2)
+ expect(Ci::UnitTest.count).to eq(2)
+ expect(Ci::UnitTestFailure.count).to eq(2)
end
end
end
diff --git a/spec/workers/concerns/worker_attributes_spec.rb b/spec/workers/concerns/worker_attributes_spec.rb
new file mode 100644
index 00000000000..a654ecbd3e2
--- /dev/null
+++ b/spec/workers/concerns/worker_attributes_spec.rb
@@ -0,0 +1,74 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe WorkerAttributes do
+ let(:worker) do
+ Class.new do
+ def self.name
+ "TestWorker"
+ end
+
+ include ApplicationWorker
+ end
+ end
+
+ describe '.data_consistency' do
+ context 'with valid data_consistency' do
+ it 'returns correct data_consistency' do
+ worker.data_consistency(:sticky)
+
+ expect(worker.get_data_consistency).to eq(:sticky)
+ end
+ end
+
+ context 'when data_consistency is not provided' do
+ it 'defaults to :always' do
+ expect(worker.get_data_consistency).to eq(:always)
+ end
+ end
+
+ context 'with invalid data_consistency' do
+ it 'raise exception' do
+ expect { worker.data_consistency(:invalid) }
+ .to raise_error('Invalid data consistency: invalid')
+ end
+ end
+
+ context 'when job is idempotent' do
+ context 'when data_consistency is not :always' do
+ it 'raise exception' do
+ worker.idempotent!
+
+ expect { worker.data_consistency(:sticky) }
+ .to raise_error("Class can't be marked as idempotent if data_consistency is not set to :always")
+ end
+ end
+
+ context 'when feature_flag is provided' do
+ before do
+ stub_feature_flags(test_feature_flag: false)
+ skip_feature_flags_yaml_validation
+ skip_default_enabled_yaml_check
+ end
+
+ it 'returns correct feature flag value' do
+ worker.data_consistency(:sticky, feature_flag: :test_feature_flag)
+
+ expect(worker.get_data_consistency_feature_flag_enabled?).not_to be_truthy
+ end
+ end
+ end
+ end
+
+ describe '.idempotent!' do
+ context 'when data consistency is not :always' do
+ it 'raise exception' do
+ worker.data_consistency(:sticky)
+
+ expect { worker.idempotent! }
+ .to raise_error("Class can't be marked as idempotent if data_consistency is not set to :always")
+ end
+ end
+ end
+end
diff --git a/spec/workers/concerns/worker_context_spec.rb b/spec/workers/concerns/worker_context_spec.rb
index 3de37b99aba..ebdb752d900 100644
--- a/spec/workers/concerns/worker_context_spec.rb
+++ b/spec/workers/concerns/worker_context_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe WorkerContext do
describe '#with_context' do
it 'allows modifying context when the job is running' do
worker.new.with_context(user: build_stubbed(:user, username: 'jane-doe')) do
- expect(Labkit::Context.current.to_h).to include('meta.user' => 'jane-doe')
+ expect(Gitlab::ApplicationContext.current).to include('meta.user' => 'jane-doe')
end
end
diff --git a/spec/workers/container_expiration_policy_worker_spec.rb b/spec/workers/container_expiration_policy_worker_spec.rb
index d9a4f6396f8..2d5176e874d 100644
--- a/spec/workers/container_expiration_policy_worker_spec.rb
+++ b/spec/workers/container_expiration_policy_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe ContainerExpirationPolicyWorker do
describe '#perform' do
subject { worker.perform }
- RSpec.shared_examples 'not executing any policy' do
+ shared_examples 'not executing any policy' do
it 'does not run any policy' do
expect(ContainerExpirationPolicyService).not_to receive(:new)
@@ -19,6 +19,21 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
end
+ shared_examples 'handling a taken exclusive lease' do
+ context 'with exclusive lease taken' do
+ before do
+ stub_exclusive_lease_taken(worker.lease_key, timeout: 5.hours)
+ end
+
+ it 'does not do anything' do
+ expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).not_to receive(:perform_with_capacity)
+ expect(worker).not_to receive(:runnable_policies)
+
+ expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ end
+ end
+ end
+
context 'With no container expiration policies' do
it 'does not execute any policies' do
expect(ContainerRepository).not_to receive(:for_project_id)
@@ -27,66 +42,86 @@ RSpec.describe ContainerExpirationPolicyWorker do
end
end
- context 'with container expiration policies' do
- let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
- let_it_be(:container_repository) { create(:container_repository, project: container_expiration_policy.project) }
+ context 'with throttling enabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_throttling: true)
+ end
- context 'with a valid container expiration policy' do
- it 'schedules the next run' do
- expect { subject }.to change { container_expiration_policy.reload.next_run_at }
+ context 'with loopless disabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: false)
end
- it 'marks the container repository as scheduled for cleanup' do
- expect { subject }.to change { container_repository.reload.cleanup_scheduled? }.from(false).to(true)
- expect(ContainerRepository.cleanup_scheduled.count).to eq(1)
- end
+ context 'with container expiration policies' do
+ let_it_be(:container_expiration_policy) { create(:container_expiration_policy, :runnable) }
+ let_it_be(:container_repository) { create(:container_repository, project: container_expiration_policy.project) }
- it 'calls the limited capacity worker' do
- expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
+ before do
+ expect(worker).to receive(:with_runnable_policy).and_call_original
+ end
- subject
- end
- end
+ context 'with a valid container expiration policy' do
+ it 'schedules the next run' do
+ expect { subject }.to change { container_expiration_policy.reload.next_run_at }
+ end
- context 'with a disabled container expiration policy' do
- before do
- container_expiration_policy.disable!
- end
+ it 'marks the container repository as scheduled for cleanup' do
+ expect { subject }.to change { container_repository.reload.cleanup_scheduled? }.from(false).to(true)
+ expect(ContainerRepository.cleanup_scheduled.count).to eq(1)
+ end
- it 'does not run the policy' do
- expect(ContainerRepository).not_to receive(:for_project_id)
+ it 'calls the limited capacity worker' do
+ expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
- expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
- end
- end
+ subject
+ end
+ end
- context 'with an invalid container expiration policy' do
- let(:user) { container_expiration_policy.project.owner }
+ context 'with a disabled container expiration policy' do
+ before do
+ container_expiration_policy.disable!
+ end
- before do
- container_expiration_policy.update_column(:name_regex, '*production')
- end
+ it 'does not run the policy' do
+ expect(ContainerRepository).not_to receive(:for_project_id)
- it 'disables the policy and tracks an error' do
- expect(ContainerRepository).not_to receive(:for_project_id)
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(described_class::InvalidPolicyError), container_expiration_policy_id: container_expiration_policy.id)
+ expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ end
+ end
- expect { subject }.to change { container_expiration_policy.reload.enabled }.from(true).to(false)
- expect(ContainerRepository.cleanup_scheduled).to be_empty
+ context 'with an invalid container expiration policy' do
+ let(:user) { container_expiration_policy.project.owner }
+
+ before do
+ container_expiration_policy.update_column(:name_regex, '*production')
+ end
+
+ it 'disables the policy and tracks an error' do
+ expect(ContainerRepository).not_to receive(:for_project_id)
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(described_class::InvalidPolicyError), container_expiration_policy_id: container_expiration_policy.id)
+
+ expect { subject }.to change { container_expiration_policy.reload.enabled }.from(true).to(false)
+ expect(ContainerRepository.cleanup_scheduled).to be_empty
+ end
+ end
end
- end
- end
- context 'with exclusive lease taken' do
- before do
- stub_exclusive_lease_taken(worker.lease_key, timeout: 5.hours)
+ it_behaves_like 'handling a taken exclusive lease'
end
- it 'does not execute any policy' do
- expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).not_to receive(:perform_with_capacity)
- expect(worker).not_to receive(:runnable_policies)
+ context 'with loopless enabled' do
+ before do
+ stub_feature_flags(container_registry_expiration_policies_loopless: true)
+ expect(worker).not_to receive(:with_runnable_policy)
+ end
- expect { subject }.not_to change { ContainerRepository.cleanup_scheduled.count }
+ it 'calls the limited capacity worker' do
+ expect(ContainerExpirationPolicies::CleanupContainerRepositoryWorker).to receive(:perform_with_capacity)
+
+ subject
+ end
+
+ it_behaves_like 'handling a taken exclusive lease'
end
end
diff --git a/spec/workers/database/batched_background_migration_worker_spec.rb b/spec/workers/database/batched_background_migration_worker_spec.rb
new file mode 100644
index 00000000000..b13d1f5c7aa
--- /dev/null
+++ b/spec/workers/database/batched_background_migration_worker_spec.rb
@@ -0,0 +1,121 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Database::BatchedBackgroundMigrationWorker, '#perform', :clean_gitlab_redis_shared_state do
+ include ExclusiveLeaseHelpers
+
+ let(:worker) { described_class.new }
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: false)
+ end
+
+ it 'does nothing' do
+ expect(worker).not_to receive(:active_migration)
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the feature flag is enabled' do
+ before do
+ stub_feature_flags(execute_batched_migrations_on_schedule: true)
+
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration).and_return(nil)
+ end
+
+ context 'when no active migrations exist' do
+ it 'does nothing' do
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when active migrations exist' do
+ let(:job_interval) { 5.minutes }
+ let(:lease_timeout) { 15.minutes }
+ let(:lease_key) { 'batched_background_migration_worker' }
+ let(:migration) { build(:batched_background_migration, :active, interval: job_interval) }
+ let(:interval_variance) { described_class::INTERVAL_VARIANCE }
+
+ before do
+ allow(Gitlab::Database::BackgroundMigration::BatchedMigration).to receive(:active_migration)
+ .and_return(migration)
+
+ allow(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(true)
+ allow(migration).to receive(:reload)
+ end
+
+ context 'when the reloaded migration is no longer active' do
+ it 'does not run the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect(migration).to receive(:reload)
+ expect(migration).to receive(:active?).and_return(false)
+
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the interval has not elapsed' do
+ it 'does not run the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect(migration).to receive(:interval_elapsed?).with(variance: interval_variance).and_return(false)
+
+ expect(worker).not_to receive(:run_active_migration)
+
+ worker.perform
+ end
+ end
+
+ context 'when the reloaded migration is still active and the interval has elapsed' do
+ it 'runs the migration' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: lease_timeout)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
+ expect(instance).to receive(:run_migration_job).with(migration)
+ end
+
+ expect(worker).to receive(:run_active_migration).and_call_original
+
+ worker.perform
+ end
+ end
+
+ context 'when the calculated timeout is less than the minimum allowed' do
+ let(:minimum_timeout) { described_class::MINIMUM_LEASE_TIMEOUT }
+ let(:job_interval) { 2.minutes }
+
+ it 'sets the lease timeout to the minimum value' do
+ expect_to_obtain_exclusive_lease(lease_key, timeout: minimum_timeout)
+
+ expect_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |instance|
+ expect(instance).to receive(:run_migration_job).with(migration)
+ end
+
+ expect(worker).to receive(:run_active_migration).and_call_original
+
+ worker.perform
+ end
+ end
+
+ it 'always cleans up the exclusive lease' do
+ lease = stub_exclusive_lease_taken(lease_key, timeout: lease_timeout)
+
+ expect(lease).to receive(:try_obtain).and_return(true)
+
+ expect(worker).to receive(:run_active_migration).and_raise(RuntimeError, 'I broke')
+ expect(lease).to receive(:cancel)
+
+ expect { worker.perform }.to raise_error(RuntimeError, 'I broke')
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 3bb9db07ff3..5a22529b6d6 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe 'Every Sidekiq worker' do
let(:workers_without_defaults) do
- Gitlab::SidekiqConfig.workers - Gitlab::SidekiqConfig::DEFAULT_WORKERS
+ Gitlab::SidekiqConfig.workers - Gitlab::SidekiqConfig::DEFAULT_WORKERS.values
end
it 'does not use the default queue' do
diff --git a/spec/workers/expire_build_artifacts_worker_spec.rb b/spec/workers/expire_build_artifacts_worker_spec.rb
index 6d73d715d21..3f8da3fb71c 100644
--- a/spec/workers/expire_build_artifacts_worker_spec.rb
+++ b/spec/workers/expire_build_artifacts_worker_spec.rb
@@ -7,7 +7,7 @@ RSpec.describe ExpireBuildArtifactsWorker do
describe '#perform' do
it 'executes a service' do
- expect_next_instance_of(Ci::DestroyExpiredJobArtifactsService) do |instance|
+ expect_next_instance_of(Ci::JobArtifacts::DestroyAllExpiredService) do |instance|
expect(instance).to receive(:execute).and_call_original
end
diff --git a/spec/workers/expire_job_cache_worker_spec.rb b/spec/workers/expire_job_cache_worker_spec.rb
index 95c54a762a4..8efead31a42 100644
--- a/spec/workers/expire_job_cache_worker_spec.rb
+++ b/spec/workers/expire_job_cache_worker_spec.rb
@@ -8,7 +8,8 @@ RSpec.describe ExpireJobCacheWorker do
describe '#perform' do
context 'with a job in the pipeline' do
- let(:job) { create(:ci_build, pipeline: pipeline) }
+ let_it_be(:job) { create(:ci_build, pipeline: pipeline) }
+
let(:job_args) { job.id }
include_examples 'an idempotent worker' do
@@ -31,6 +32,24 @@ RSpec.describe ExpireJobCacheWorker do
subject
end
end
+
+ it 'does not perform extra queries', :aggregate_failures do
+ worker = described_class.new
+ recorder = ActiveRecord::QueryRecorder.new { worker.perform(job.id) }
+
+ occurences = recorder.data.values.flat_map {|v| v[:occurrences]}
+ project_queries = occurences.select {|s| s.include?('FROM "projects"')}
+ namespace_queries = occurences.select {|s| s.include?('FROM "namespaces"')}
+ route_queries = occurences.select {|s| s.include?('FROM "routes"')}
+
+ # This worker is run 1 million times an hour, so we need to save as much
+ # queries as possible.
+ expect(recorder.count).to be <= 1
+
+ expect(project_queries.size).to eq(0)
+ expect(namespace_queries.size).to eq(0)
+ expect(route_queries.size).to eq(0)
+ end
end
context 'when there is no job in the pipeline' do
diff --git a/spec/workers/expire_pipeline_cache_worker_spec.rb b/spec/workers/expire_pipeline_cache_worker_spec.rb
index a8c21aa9f83..de42eeeab75 100644
--- a/spec/workers/expire_pipeline_cache_worker_spec.rb
+++ b/spec/workers/expire_pipeline_cache_worker_spec.rb
@@ -18,6 +18,23 @@ RSpec.describe ExpirePipelineCacheWorker do
subject.perform(pipeline.id)
end
+ it 'does not perform extra queries', :aggregate_failures do
+ recorder = ActiveRecord::QueryRecorder.new { subject.perform(pipeline.id) }
+
+ project_queries = recorder.data.values.flat_map {|v| v[:occurrences]}.select {|s| s.include?('FROM "projects"')}
+ namespace_queries = recorder.data.values.flat_map {|v| v[:occurrences]}.select {|s| s.include?('FROM "namespaces"')}
+ route_queries = recorder.data.values.flat_map {|v| v[:occurrences]}.select {|s| s.include?('FROM "routes"')}
+
+ # This worker is run 1 million times an hour, so we need to save as much
+ # queries as possible.
+ expect(recorder.count).to be <= 6
+
+ # These arises from #update_etag_cache
+ expect(project_queries.size).to eq(1)
+ expect(namespace_queries.size).to eq(1)
+ expect(route_queries.size).to eq(1)
+ end
+
it "doesn't do anything if the pipeline not exist" do
expect_any_instance_of(Ci::ExpirePipelineCacheService).not_to receive(:execute)
expect_any_instance_of(Gitlab::EtagCaching::Store).not_to receive(:touch)
diff --git a/spec/workers/merge_requests/assignees_change_worker_spec.rb b/spec/workers/merge_requests/assignees_change_worker_spec.rb
new file mode 100644
index 00000000000..33478daf8d3
--- /dev/null
+++ b/spec/workers/merge_requests/assignees_change_worker_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::AssigneesChangeWorker do
+ include AfterNextHelpers
+
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:old_assignees) { create_list(:user, 3) }
+
+ let(:user_ids) { old_assignees.map(&:id).to_a }
+ let(:worker) { described_class.new }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request.id, user.id, user_ids] }
+ end
+
+ describe '#perform' do
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id, user.id, user_ids)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, non_existing_record_id, user_ids)
+ end
+ end
+
+ context 'when there are no changes' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, user.id, merge_request.assignee_ids)
+ end
+ end
+
+ context 'when the old users cannot be found' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, user.id, [non_existing_record_id])
+ end
+ end
+
+ it 'gets MergeRequests::UpdateAssigneesService to handle the changes' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, match_array(old_assignees), execute_hooks: true)
+
+ worker.perform(merge_request.id, user.id, user_ids)
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/create_pipeline_worker_spec.rb b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
new file mode 100644
index 00000000000..8efce5220be
--- /dev/null
+++ b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::CreatePipelineWorker do
+ subject(:worker) { described_class.new }
+
+ describe '#perform' do
+ let(:user) { create(:user) }
+ let(:project) { create(:project) }
+ let(:merge_request) { create(:merge_request) }
+
+ context 'when the objects exist' do
+ it 'calls the merge request create pipeline service and calls update head pipeline' do
+ aggregate_failures do
+ expect_next_instance_of(MergeRequests::CreatePipelineService, project, user) do |service|
+ expect(service).to receive(:execute).with(merge_request)
+ end
+
+ expect(MergeRequest).to receive(:find_by_id).with(merge_request.id).and_return(merge_request)
+ expect(merge_request).to receive(:update_head_pipeline)
+
+ subject.perform(project.id, user.id, merge_request.id)
+ end
+ end
+ end
+
+ shared_examples 'when object does not exist' do
+ it 'does not call the create pipeline service' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+
+ expect { subject.perform(project.id, user.id, merge_request.id) }
+ .not_to raise_exception
+ end
+ end
+
+ context 'when the project does not exist' do
+ before do
+ project.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+
+ context 'when the user does not exist' do
+ before do
+ user.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+
+ context 'when the merge request does not exist' do
+ before do
+ merge_request.destroy!
+ end
+
+ it_behaves_like 'when object does not exist'
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/handle_assignees_change_worker_spec.rb b/spec/workers/merge_requests/handle_assignees_change_worker_spec.rb
new file mode 100644
index 00000000000..4b45f3562d6
--- /dev/null
+++ b/spec/workers/merge_requests/handle_assignees_change_worker_spec.rb
@@ -0,0 +1,62 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::HandleAssigneesChangeWorker do
+ include AfterNextHelpers
+
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:old_assignees) { create_list(:user, 3) }
+
+ let(:user_ids) { old_assignees.map(&:id).to_a }
+ let(:options) { {} }
+ let(:worker) { described_class.new }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request.id, user.id, user_ids, options] }
+ end
+
+ describe '#perform' do
+ it 'calls MergeRequests::HandleAssigneesChangeService#execute to handle the changes' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, match_array(old_assignees), options)
+
+ worker.perform(merge_request.id, user.id, user_ids, options)
+ end
+
+ context 'when there are no changes' do
+ it 'still calls MergeRequests::HandleAssigneesChangeService#execute' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, [], options)
+
+ worker.perform(merge_request.id, user.id, merge_request.assignee_ids, options)
+ end
+ end
+
+ context 'when the old assignees cannot be found' do
+ it 'still calls MergeRequests::HandleAssigneesChangeService#execute' do
+ expect_next(::MergeRequests::HandleAssigneesChangeService)
+ .to receive(:execute).with(merge_request, [], options)
+
+ worker.perform(merge_request.id, user.id, [non_existing_record_id], options)
+ end
+ end
+
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id, user.id, user_ids, options)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::MergeRequests::HandleAssigneesChangeService).not_to receive(:new)
+
+ worker.perform(merge_request.id, non_existing_record_id, user_ids, options)
+ end
+ end
+ end
+end
diff --git a/spec/workers/merge_requests/resolve_todos_worker_spec.rb b/spec/workers/merge_requests/resolve_todos_worker_spec.rb
new file mode 100644
index 00000000000..223b8b6803c
--- /dev/null
+++ b/spec/workers/merge_requests/resolve_todos_worker_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe MergeRequests::ResolveTodosWorker do
+ include AfterNextHelpers
+
+ let_it_be(:merge_request) { create(:merge_request) }
+ let_it_be(:user) { create(:user) }
+
+ let(:worker) { described_class.new }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [merge_request.id, user.id] }
+ end
+
+ describe '#perform' do
+ it 'calls MergeRequests::ResolveTodosService#execute' do
+ expect_next(::MergeRequests::ResolveTodosService, merge_request, user)
+ .to receive(:execute)
+
+ worker.perform(merge_request.id, user.id)
+ end
+
+ context 'with a non-existing merge request' do
+ it 'does nothing' do
+ expect(::MergeRequests::ResolveTodosService).not_to receive(:new)
+
+ worker.perform(non_existing_record_id, user.id)
+ end
+ end
+
+ context 'with a non-existing user' do
+ it 'does nothing' do
+ expect(::MergeRequests::ResolveTodosService).not_to receive(:new)
+
+ worker.perform(merge_request.id, non_existing_record_id)
+ end
+ end
+ end
+end
diff --git a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
index 24143e8cf8a..3b94eb0d1be 100644
--- a/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
+++ b/spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb
@@ -3,45 +3,37 @@
require 'spec_helper'
RSpec.describe Namespaces::InProductMarketingEmailsWorker, '#perform' do
- context 'when the application setting is enabled' do
- before do
- stub_application_setting(in_product_marketing_emails_enabled: true)
+ using RSpec::Parameterized::TableSyntax
+
+ # Running this in EE would call the overridden method, which can't be tested in CE.
+ # The EE code is covered in a separate EE spec.
+ context 'not on gitlab.com', unless: Gitlab.ee? do
+ let(:is_gitlab_com) { false }
+
+ where(:in_product_marketing_emails_enabled, :experiment_active, :executes_service) do
+ true | true | 1
+ true | false | 1
+ false | false | 0
+ false | true | 0
end
- context 'when the experiment is inactive' do
- before do
- stub_experiment(in_product_marketing_emails: false)
- end
-
- it 'does not execute the in product marketing emails service' do
- expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
-
- subject.perform
- end
- end
-
- context 'when the experiment is active' do
- before do
- stub_experiment(in_product_marketing_emails: true)
- end
-
- it 'calls the send_for_all_tracks_and_intervals method on the in product marketing emails service' do
- expect(Namespaces::InProductMarketingEmailsService).to receive(:send_for_all_tracks_and_intervals)
-
- subject.perform
- end
+ with_them do
+ it_behaves_like 'in-product marketing email'
end
end
- context 'when the application setting is disabled' do
- before do
- stub_application_setting(in_product_marketing_emails_enabled: false)
- end
+ context 'on gitlab.com' do
+ let(:is_gitlab_com) { true }
- it 'does not execute the in product marketing emails service' do
- expect(Namespaces::InProductMarketingEmailsService).not_to receive(:send_for_all_tracks_and_intervals)
+ where(:in_product_marketing_emails_enabled, :experiment_active, :executes_service) do
+ true | true | 1
+ true | false | 0
+ false | false | 0
+ false | true | 0
+ end
- subject.perform
+ with_them do
+ it_behaves_like 'in-product marketing email'
end
end
end
diff --git a/spec/workers/new_issue_worker_spec.rb b/spec/workers/new_issue_worker_spec.rb
index ec129ad3380..35b83c3bee8 100644
--- a/spec/workers/new_issue_worker_spec.rb
+++ b/spec/workers/new_issue_worker_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe NewIssueWorker do
+ include AfterNextHelpers
+
describe '#perform' do
let(:worker) { described_class.new }
@@ -49,7 +51,7 @@ RSpec.describe NewIssueWorker do
expect(Notify).not_to receive(:new_issue_email)
.with(mentioned.id, issue.id, NotificationReason::MENTIONED)
- expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: issue.class, object_id: issue.id)
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: issue.class.to_s, object_id: issue.id)
worker.perform(issue.id, user.id)
end
@@ -80,6 +82,13 @@ RSpec.describe NewIssueWorker do
worker.perform(issue.id, user.id)
end
+
+ it 'calls Issues::AfterCreateService' do
+ expect_next(::Issues::AfterCreateService)
+ .to receive(:execute)
+
+ worker.perform(issue.id, user.id)
+ end
end
end
end
diff --git a/spec/workers/new_merge_request_worker_spec.rb b/spec/workers/new_merge_request_worker_spec.rb
index 0d64973b0fa..358939a963a 100644
--- a/spec/workers/new_merge_request_worker_spec.rb
+++ b/spec/workers/new_merge_request_worker_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe NewMergeRequestWorker do
expect(Notify).not_to receive(:new_merge_request_email)
.with(mentioned.id, merge_request.id, NotificationReason::MENTIONED)
- expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: merge_request.class, object_id: merge_request.id)
+ expect(Gitlab::AppLogger).to receive(:warn).with(message: 'Skipping sending notifications', user: user.id, klass: merge_request.class.to_s, object_id: merge_request.id)
worker.perform(merge_request.id, user.id)
end
diff --git a/spec/workers/packages/go/sync_packages_worker_spec.rb b/spec/workers/packages/go/sync_packages_worker_spec.rb
new file mode 100644
index 00000000000..ad1a85b26e4
--- /dev/null
+++ b/spec/workers/packages/go/sync_packages_worker_spec.rb
@@ -0,0 +1,101 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Go::SyncPackagesWorker, type: :worker do
+ include_context 'basic Go module'
+
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PUBLIC)
+ end
+
+ def perform(ref_name, path)
+ described_class.new.perform(project.id, ref_name, path)
+ end
+
+ def validate_package(package, mod, ver)
+ expect(package).not_to be_nil
+ expect(package.name).to eq(mod.name)
+ expect(package.version).to eq(ver.name)
+ expect(package.package_type).to eq('golang')
+ expect(package.created_at).to eq(ver.commit.committed_date)
+ expect(package.package_files.count).to eq(2)
+ end
+
+ shared_examples 'it creates a package' do |path, version, exists: false|
+ subject { perform(version, path) }
+
+ it "returns a package for example.com/project#{path.empty? ? '' : '/' + path}@#{version}" do
+ expect { subject }
+ .to change { project.packages.count }.by(exists ? 0 : 1)
+ .and change { Packages::PackageFile.count }.by(exists ? 0 : 2)
+
+ mod = create :go_module, project: project, path: path
+ ver = create :go_module_version, :tagged, mod: mod, name: version
+ validate_package(subject, mod, ver)
+ end
+ end
+
+ describe '#perform' do
+ context 'with no existing packages' do
+ it_behaves_like 'it creates a package', '', 'v1.0.1'
+ it_behaves_like 'it creates a package', '', 'v1.0.2'
+ it_behaves_like 'it creates a package', '', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'mod', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'v2', 'v2.0.0'
+ end
+
+ context 'with existing packages' do
+ before do
+ mod = create :go_module, project: project
+ ver = create :go_module_version, :tagged, mod: mod, name: 'v1.0.1'
+ Packages::Go::CreatePackageService.new(project, nil, version: ver).execute
+ end
+
+ it_behaves_like 'it creates a package', '', 'v1.0.1', exists: true
+ it_behaves_like 'it creates a package', '', 'v1.0.2'
+ it_behaves_like 'it creates a package', '', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'mod', 'v1.0.3'
+ it_behaves_like 'it creates a package', 'v2', 'v2.0.0'
+ end
+
+ context 'with a package that exceeds project limits' do
+ before do
+ Plan.default.actual_limits.update!({ 'golang_max_file_size': 1 })
+ end
+
+ it 'logs an exception' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(instance_of(::Packages::Go::CreatePackageService::GoZipSizeError))
+
+ perform('v2.0.0', 'v2')
+ end
+ end
+
+ where(:path, :version) do
+ [
+ ['', 'v1.0.1'],
+ ['', 'v1.0.2'],
+ ['', 'v1.0.3'],
+ ['mod', 'v1.0.3'],
+ ['v2', 'v2.0.0']
+ ]
+ end
+
+ with_them do
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id, version, path] }
+
+ it 'creates a package' do
+ expect { subject }
+ .to change { project.packages.count }.by(1)
+ .and change { Packages::PackageFile.count }.by(2)
+
+ mod = create :go_module, project: project, path: path
+ ver = create :go_module_version, :tagged, mod: mod, name: version
+ package = ::Packages::Go::PackageFinder.new(project, mod.name, ver.name).execute
+ validate_package(package, mod, ver)
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/packages/maven/metadata/sync_worker_spec.rb b/spec/workers/packages/maven/metadata/sync_worker_spec.rb
index 7e0f3616491..10482b3e327 100644
--- a/spec/workers/packages/maven/metadata/sync_worker_spec.rb
+++ b/spec/workers/packages/maven/metadata/sync_worker_spec.rb
@@ -61,9 +61,10 @@ RSpec.describe Packages::Maven::Metadata::SyncWorker, type: :worker do
let(:project) { create(:project) }
it 'does not create the updated metadata files' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:message, 'Non existing versionless package(s). Nothing to do.')
+
expect { subject }
.to change { ::Packages::PackageFile.count }.by(0)
- .and raise_error(described_class::SyncError, 'Non existing versionless package')
end
end
@@ -146,9 +147,10 @@ RSpec.describe Packages::Maven::Metadata::SyncWorker, type: :worker do
let(:project) { create(:project) }
it 'does not create the updated metadata files' do
+ expect(worker).to receive(:log_extra_metadata_on_done).with(:message, 'Non existing versionless package(s). Nothing to do.')
+
expect { subject }
.to change { ::Packages::PackageFile.count }.by(0)
- .and raise_error(described_class::SyncError, 'Non existing versionless package')
end
end
diff --git a/spec/workers/packages/rubygems/extraction_worker_spec.rb b/spec/workers/packages/rubygems/extraction_worker_spec.rb
new file mode 100644
index 00000000000..15c0a3be90c
--- /dev/null
+++ b/spec/workers/packages/rubygems/extraction_worker_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Packages::Rubygems::ExtractionWorker, type: :worker do
+ describe '#perform' do
+ let_it_be(:package) { create(:rubygems_package) }
+
+ let(:package_file) { package.package_files.first }
+ let(:package_file_id) { package_file.id }
+ let(:package_name) { 'TempProject.TempPackage' }
+ let(:package_version) { '1.0.0' }
+ let(:job_args) { package_file_id }
+
+ subject { described_class.new.perform(*job_args) }
+
+ include_examples 'an idempotent worker' do
+ it 'processes the gem', :aggregate_failures do
+ expect { subject }
+ .to change { Packages::Package.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(2)
+
+ expect(Packages::Package.last.id).to be(package.id)
+ expect(package.name).not_to be(package_name)
+ end
+ end
+
+ it 'handles a processing failure', :aggregate_failures do
+ expect(::Packages::Rubygems::ProcessGemService).to receive(:new)
+ .and_raise(::Packages::Rubygems::ProcessGemService::ExtractionError)
+
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ instance_of(::Packages::Rubygems::ProcessGemService::ExtractionError),
+ project_id: package.project_id
+ )
+
+ expect { subject }
+ .to change { Packages::Package.count }.by(-1)
+ .and change { Packages::PackageFile.count }.by(-2)
+ end
+
+ context 'returns when there is no package file' do
+ let(:package_file_id) { 999999 }
+
+ it 'returns without action' do
+ expect(::Packages::Rubygems::ProcessGemService).not_to receive(:new)
+
+ expect { subject }
+ .to change { Packages::Package.count }.by(0)
+ .and change { Packages::PackageFile.count }.by(0)
+ end
+ end
+ end
+end
diff --git a/spec/workers/pages_update_configuration_worker_spec.rb b/spec/workers/pages_update_configuration_worker_spec.rb
index ff3727646c7..7cceeaa52d6 100644
--- a/spec/workers/pages_update_configuration_worker_spec.rb
+++ b/spec/workers/pages_update_configuration_worker_spec.rb
@@ -53,7 +53,7 @@ RSpec.describe PagesUpdateConfigurationWorker do
end
it "doesn't schedule a worker if updates on legacy storage are disabled", :sidekiq_inline do
- stub_feature_flags(pages_update_legacy_storage: false)
+ allow(Settings.pages.local_store).to receive(:enabled).and_return(false)
expect(Projects::UpdatePagesConfigurationService).not_to receive(:new)
diff --git a/spec/workers/post_receive_spec.rb b/spec/workers/post_receive_spec.rb
index be501318920..f7fd1b1a0a7 100644
--- a/spec/workers/post_receive_spec.rb
+++ b/spec/workers/post_receive_spec.rb
@@ -102,7 +102,10 @@ RSpec.describe PostReceive do
perform
- expect_snowplow_event(category: 'empty_repo_upload', action: 'initial_write', context: [{ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/0-3-0', data: anything }])
+ expect_snowplow_event(category: 'empty_repo_upload', action: 'initial_write', context: [{
+ schema: 'iglu:com.gitlab/gitlab_experiment/jsonschema/1-0-0',
+ data: anything
+ }])
end
it 'does not track an event for the empty_repo_upload experiment when project is not empty', :snowplow do
diff --git a/spec/workers/projects/post_creation_worker_spec.rb b/spec/workers/projects/post_creation_worker_spec.rb
new file mode 100644
index 00000000000..b15b7b76b56
--- /dev/null
+++ b/spec/workers/projects/post_creation_worker_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::PostCreationWorker do
+ let_it_be(:user) { create :user }
+
+ let(:worker) { described_class.new }
+ let(:project) { create(:project) }
+
+ subject { described_class.new.perform(project.id) }
+
+ it_behaves_like 'an idempotent worker' do
+ let(:job_args) { [project.id] }
+
+ describe 'Prometheus service' do
+ context 'project is nil' do
+ let(:job_args) { [nil] }
+
+ it 'does not create prometheus service' do
+ expect { subject }.not_to change { Service.count }
+ end
+ end
+
+ context 'when project has access to shared service' do
+ context 'Prometheus application is shared via group cluster' do
+ let(:project) { create(:project, group: group) }
+ let(:cluster) { create(:cluster, :group, groups: [group]) }
+ let(:group) do
+ create(:group).tap do |group|
+ group.add_owner(user)
+ end
+ end
+
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'creates PrometheusService record', :aggregate_failures do
+ subject
+
+ service = project.prometheus_service
+ expect(service.active).to be true
+ expect(service.manual_configuration?).to be false
+ expect(service.persisted?).to be true
+ end
+ end
+
+ context 'Prometheus application is shared via instance cluster' do
+ let(:cluster) { create(:cluster, :instance) }
+
+ before do
+ create(:clusters_applications_prometheus, :installed, cluster: cluster)
+ end
+
+ it 'creates PrometheusService record', :aggregate_failures do
+ subject
+
+ service = project.prometheus_service
+ expect(service.active).to be true
+ expect(service.manual_configuration?).to be false
+ expect(service.persisted?).to be true
+ end
+
+ it 'cleans invalid record and logs warning', :aggregate_failures do
+ invalid_service_record = build(:prometheus_service, properties: { api_url: nil, manual_configuration: true }.to_json)
+ allow(PrometheusService).to receive(:new).and_return(invalid_service_record)
+
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(an_instance_of(ActiveRecord::RecordInvalid), include(extra: { project_id: a_kind_of(Integer) })).twice
+ subject
+
+ expect(project.prometheus_service).to be_nil
+ end
+ end
+
+ context 'shared Prometheus application is not available' do
+ it 'does not persist PrometheusService record', :aggregate_failures do
+ subject
+
+ expect(project.prometheus_service).to be_nil
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/workers/remove_expired_members_worker_spec.rb b/spec/workers/remove_expired_members_worker_spec.rb
index 5642de05731..6d0d4aeef89 100644
--- a/spec/workers/remove_expired_members_worker_spec.rb
+++ b/spec/workers/remove_expired_members_worker_spec.rb
@@ -29,6 +29,15 @@ RSpec.describe RemoveExpiredMembersWorker do
worker.perform
expect(non_expiring_project_member.reload).to be_present
end
+
+ it 'adds context to resulting jobs' do
+ worker.perform
+
+ new_job = Sidekiq::Worker.jobs.last
+
+ expect(new_job).to include('meta.project' => expired_project_member.project.full_path,
+ 'meta.user' => expired_project_member.user.username)
+ end
end
context 'project bots' do
@@ -98,6 +107,15 @@ RSpec.describe RemoveExpiredMembersWorker do
worker.perform
expect(non_expiring_group_member.reload).to be_present
end
+
+ it 'adds context to resulting jobs' do
+ worker.perform
+
+ new_job = Sidekiq::Worker.jobs.last
+
+ expect(new_job).to include('meta.root_namespace' => expired_group_member.group.full_path,
+ 'meta.user' => expired_group_member.user.username)
+ end
end
context 'when the last group owner expires' do
diff --git a/spec/workers/repository_check/dispatch_worker_spec.rb b/spec/workers/repository_check/dispatch_worker_spec.rb
index 5e1bc76ec8e..829abc7d895 100644
--- a/spec/workers/repository_check/dispatch_worker_spec.rb
+++ b/spec/workers/repository_check/dispatch_worker_spec.rb
@@ -42,5 +42,12 @@ RSpec.describe RepositoryCheck::DispatchWorker do
subject.perform
end
+
+ it 'logs unhealthy shards' do
+ log_data = { message: "Excluding unhealthy shards", failed_checks: [{ labels: { shard: unhealthy_shard_name }, message: '14:Connect Failed', status: 'failed' }], class: described_class.name }
+ expect(Gitlab::AppLogger).to receive(:error).with(a_hash_including(log_data))
+
+ subject.perform
+ end
end
end
diff --git a/spec/workers/ssh_keys/expired_notification_worker_spec.rb b/spec/workers/ssh_keys/expired_notification_worker_spec.rb
new file mode 100644
index 00000000000..249ee404870
--- /dev/null
+++ b/spec/workers/ssh_keys/expired_notification_worker_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SshKeys::ExpiredNotificationWorker, type: :worker do
+ subject(:worker) { described_class.new }
+
+ it 'uses a cronjob queue' do
+ expect(worker.sidekiq_options_hash).to include(
+ 'queue' => 'cronjob:ssh_keys_expired_notification',
+ 'queue_namespace' => :cronjob
+ )
+ end
+
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+
+ context 'with expiring key today' do
+ let_it_be_with_reload(:expired_today) { create(:key, expires_at: Time.current, user: user) }
+
+ it 'invoke the notification service' do
+ expect_next_instance_of(Keys::ExpiryNotificationService) do |expiry_service|
+ expect(expiry_service).to receive(:execute)
+ end
+
+ worker.perform
+ end
+
+ it 'updates notified column' do
+ expect { worker.perform }.to change { expired_today.reload.expiry_notification_delivered_at }
+ end
+
+ include_examples 'an idempotent worker' do
+ subject do
+ perform_multiple(worker: worker)
+ end
+ end
+
+ context 'when feature is not enabled' do
+ before do
+ stub_feature_flags(ssh_key_expiration_email_notification: false)
+ end
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expired_today.reload.expiry_notification_delivered_at }
+ end
+ end
+ end
+
+ context 'when key has expired in the past' do
+ let_it_be(:expired_past) { create(:key, expires_at: 1.day.ago, user: user) }
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expired_past.reload.expiry_notification_delivered_at }
+ end
+ end
+ end
+end
diff --git a/spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb b/spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb
new file mode 100644
index 00000000000..f9276c86cdf
--- /dev/null
+++ b/spec/workers/ssh_keys/expiring_soon_notification_worker_spec.rb
@@ -0,0 +1,66 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SshKeys::ExpiringSoonNotificationWorker, type: :worker do
+ subject(:worker) { described_class.new }
+
+ it 'uses a cronjob queue' do
+ expect(worker.sidekiq_options_hash).to include(
+ 'queue' => 'cronjob:ssh_keys_expiring_soon_notification',
+ 'queue_namespace' => :cronjob
+ )
+ end
+
+ describe '#perform' do
+ let_it_be(:user) { create(:user) }
+
+ context 'with key expiring soon' do
+ let_it_be_with_reload(:expiring_soon) { create(:key, expires_at: 6.days.from_now, user: user) }
+
+ it 'invoke the notification service' do
+ expect_next_instance_of(Keys::ExpiryNotificationService) do |expiry_service|
+ expect(expiry_service).to receive(:execute)
+ end
+
+ worker.perform
+ end
+
+ it 'updates notified column' do
+ expect { worker.perform }.to change { expiring_soon.reload.before_expiry_notification_delivered_at }
+ end
+
+ include_examples 'an idempotent worker' do
+ subject do
+ perform_multiple(worker: worker)
+ end
+ end
+
+ context 'when feature is not enabled' do
+ before do
+ stub_feature_flags(ssh_key_expiration_email_notification: false)
+ end
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expiring_soon.reload.before_expiry_notification_delivered_at }
+ end
+ end
+ end
+
+ context 'when key has expired in the past' do
+ let_it_be(:expired_past) { create(:key, expires_at: 1.day.ago, user: user) }
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expired_past.reload.before_expiry_notification_delivered_at }
+ end
+ end
+
+ context 'when key is not expiring soon' do
+ let_it_be(:expires_future) { create(:key, expires_at: 8.days.from_now, user: user) }
+
+ it 'does not update notified column' do
+ expect { worker.perform }.not_to change { expires_future.reload.before_expiry_notification_delivered_at }
+ end
+ end
+ end
+end
diff --git a/spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb b/spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb
new file mode 100644
index 00000000000..6ccad25ad76
--- /dev/null
+++ b/spec/workers/todos_destroyer/destroyed_issuable_worker_spec.rb
@@ -0,0 +1,15 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe TodosDestroyer::DestroyedIssuableWorker do
+ let(:job_args) { [1, 'MergeRequest'] }
+
+ it 'calls the Todos::Destroy::DestroyedIssuableService' do
+ expect_next_instance_of(::Todos::Destroy::DestroyedIssuableService, *job_args) do |service|
+ expect(service).to receive(:execute)
+ end
+
+ described_class.new.perform(*job_args)
+ end
+end