summaryrefslogtreecommitdiff
path: root/spec
diff options
context:
space:
mode:
Diffstat (limited to 'spec')
-rw-r--r--spec/commands/metrics_server/metrics_server_spec.rb3
-rw-r--r--spec/commands/sidekiq_cluster/cli_spec.rb12
-rw-r--r--spec/components/previews/pajamas/button_component_preview.rb6
-rw-r--r--spec/config/inject_enterprise_edition_module_spec.rb2
-rw-r--r--spec/config/mail_room_spec.rb8
-rw-r--r--spec/contracts/provider/helpers/contract_source_helper.rb15
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_batch_helper.rb2
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_metadata_helper.rb2
-rw-r--r--spec/contracts/provider/pact_helpers/project/merge_requests/show/get_discussions_helper.rb2
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipeline_schedules/edit/put_edit_a_pipeline_schedule_helper.rb2
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipelines/index/get_list_project_pipelines_helper.rb2
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipelines/new/post_create_a_new_pipeline_helper.rb2
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipelines/show/delete_pipeline_helper.rb2
-rw-r--r--spec/contracts/provider/pact_helpers/project/pipelines/show/get_pipeline_header_data_helper.rb2
-rw-r--r--spec/contracts/provider_specs/helpers/provider/contract_source_helper_spec.rb23
-rw-r--r--spec/contracts/publish-contracts.sh50
-rw-r--r--spec/controllers/admin/application_settings/appearances_controller_spec.rb2
-rw-r--r--spec/controllers/admin/application_settings_controller_spec.rb46
-rw-r--r--spec/controllers/admin/clusters_controller_spec.rb4
-rw-r--r--spec/controllers/concerns/check_rate_limit_spec.rb4
-rw-r--r--spec/controllers/concerns/content_security_policy_patch_spec.rb116
-rw-r--r--spec/controllers/groups/clusters_controller_spec.rb4
-rw-r--r--spec/controllers/groups/imports_controller_spec.rb2
-rw-r--r--spec/controllers/import/available_namespaces_controller_spec.rb109
-rw-r--r--spec/controllers/import/bulk_imports_controller_spec.rb85
-rw-r--r--spec/controllers/import/github_controller_spec.rb41
-rw-r--r--spec/controllers/import/phabricator_controller_spec.rb13
-rw-r--r--spec/controllers/projects/artifacts_controller_spec.rb32
-rw-r--r--spec/controllers/projects/clusters_controller_spec.rb4
-rw-r--r--spec/controllers/projects/deploy_keys_controller_spec.rb5
-rw-r--r--spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb28
-rw-r--r--spec/controllers/projects/environments_controller_spec.rb88
-rw-r--r--spec/controllers/projects/group_links_controller_spec.rb77
-rw-r--r--spec/controllers/projects/merge_requests/creations_controller_spec.rb2
-rw-r--r--spec/controllers/projects/merge_requests/diffs_controller_spec.rb55
-rw-r--r--spec/controllers/projects/merge_requests_controller_spec.rb32
-rw-r--r--spec/controllers/projects/pages_domains_controller_spec.rb6
-rw-r--r--spec/controllers/projects/pipelines_controller_spec.rb7
-rw-r--r--spec/controllers/projects/protected_branches_controller_spec.rb29
-rw-r--r--spec/controllers/projects/releases/evidences_controller_spec.rb2
-rw-r--r--spec/controllers/registrations/welcome_controller_spec.rb6
-rw-r--r--spec/controllers/registrations_controller_spec.rb15
-rw-r--r--spec/controllers/uploads_controller_spec.rb74
-rw-r--r--spec/db/docs_spec.rb53
-rw-r--r--spec/db/migration_spec.rb2
-rw-r--r--spec/db/schema_spec.rb10
-rw-r--r--spec/factories/abuse_reports.rb1
-rw-r--r--spec/factories/analytics/cycle_analytics/aggregations.rb2
-rw-r--r--spec/factories/appearances.rb4
-rw-r--r--spec/factories/bulk_import/entities.rb1
-rw-r--r--spec/factories/ci/builds.rb14
-rw-r--r--spec/factories/ci/job_artifacts.rb8
-rw-r--r--spec/factories/ci/runner_machines.rb8
-rw-r--r--spec/factories/design_management/designs.rb5
-rw-r--r--spec/factories/groups.rb2
-rw-r--r--spec/factories/integrations.rb10
-rw-r--r--spec/factories/ml/candidates.rb9
-rw-r--r--spec/factories/personal_access_tokens.rb6
-rw-r--r--spec/factories/projects/build_artifacts_size_refreshes.rb4
-rw-r--r--spec/factories/wiki_pages.rb2
-rw-r--r--spec/features/abuse_report_spec.rb152
-rw-r--r--spec/features/admin/admin_broadcast_messages_spec.rb98
-rw-r--r--spec/features/admin/admin_groups_spec.rb18
-rw-r--r--spec/features/admin/admin_projects_spec.rb7
-rw-r--r--spec/features/admin/admin_sees_background_migrations_spec.rb31
-rw-r--r--spec/features/admin/admin_users_spec.rb6
-rw-r--r--spec/features/admin/dashboard_spec.rb3
-rw-r--r--spec/features/admin/users/users_spec.rb6
-rw-r--r--spec/features/callouts/registration_enabled_spec.rb2
-rw-r--r--spec/features/commit_spec.rb4
-rw-r--r--spec/features/dashboard/activity_spec.rb2
-rw-r--r--spec/features/dashboard/groups_list_spec.rb2
-rw-r--r--spec/features/dashboard/issuables_counter_spec.rb27
-rw-r--r--spec/features/dashboard/issues_spec.rb2
-rw-r--r--spec/features/dashboard/merge_requests_spec.rb4
-rw-r--r--spec/features/dashboard/milestones_spec.rb2
-rw-r--r--spec/features/dashboard/navbar_spec.rb16
-rw-r--r--spec/features/dashboard/projects_spec.rb2
-rw-r--r--spec/features/dashboard/snippets_spec.rb2
-rw-r--r--spec/features/dashboard/todos/todos_spec.rb55
-rw-r--r--spec/features/dashboard/user_filters_projects_spec.rb2
-rw-r--r--spec/features/error_tracking/user_sees_error_index_spec.rb2
-rw-r--r--spec/features/global_search_spec.rb4
-rw-r--r--spec/features/groups/import_export/connect_instance_spec.rb6
-rw-r--r--spec/features/groups/import_export/migration_history_spec.rb4
-rw-r--r--spec/features/groups/labels/sort_labels_spec.rb20
-rw-r--r--spec/features/groups/members/manage_members_spec.rb5
-rw-r--r--spec/features/groups/members/sort_members_spec.rb4
-rw-r--r--spec/features/groups/merge_requests_spec.rb2
-rw-r--r--spec/features/groups/milestones_sorting_spec.rb14
-rw-r--r--spec/features/groups/navbar_spec.rb2
-rw-r--r--spec/features/groups/new_group_page_spec.rb2
-rw-r--r--spec/features/incidents/user_views_incident_spec.rb2
-rw-r--r--spec/features/issues/group_label_sidebar_spec.rb2
-rw-r--r--spec/features/issues/issue_header_spec.rb12
-rw-r--r--spec/features/issues/user_creates_issue_spec.rb5
-rw-r--r--spec/features/issues/user_edits_issue_spec.rb7
-rw-r--r--spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb2
-rw-r--r--spec/features/jira_connect/branches_spec.rb12
-rw-r--r--spec/features/markdown/observability_spec.rb74
-rw-r--r--spec/features/markdown/sandboxed_mermaid_spec.rb2
-rw-r--r--spec/features/merge_request/admin_views_hidden_merge_request_spec.rb27
-rw-r--r--spec/features/merge_request/batch_comments_spec.rb2
-rw-r--r--spec/features/merge_request/close_reopen_report_toggle_spec.rb22
-rw-r--r--spec/features/merge_request/maintainer_edits_fork_spec.rb2
-rw-r--r--spec/features/merge_request/merge_request_discussion_lock_spec.rb2
-rw-r--r--spec/features/merge_request/user_accepts_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb2
-rw-r--r--spec/features/merge_request/user_approves_spec.rb2
-rw-r--r--spec/features/merge_request/user_assigns_themselves_reviewer_spec.rb2
-rw-r--r--spec/features/merge_request/user_assigns_themselves_spec.rb2
-rw-r--r--spec/features/merge_request/user_awards_emoji_spec.rb2
-rw-r--r--spec/features/merge_request/user_clicks_merge_request_tabs_spec.rb2
-rw-r--r--spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb2
-rw-r--r--spec/features/merge_request/user_comments_on_commit_spec.rb2
-rw-r--r--spec/features/merge_request/user_comments_on_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_comments_on_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_image_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_creates_mr_spec.rb2
-rw-r--r--spec/features/merge_request/user_customizes_merge_commit_message_spec.rb2
-rw-r--r--spec/features/merge_request/user_edits_assignees_sidebar_spec.rb2
-rw-r--r--spec/features/merge_request/user_edits_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_edits_mr_spec.rb2
-rw-r--r--spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb2
-rw-r--r--spec/features/merge_request/user_expands_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb2
-rw-r--r--spec/features/merge_request/user_locks_discussion_spec.rb2
-rw-r--r--spec/features/merge_request/user_manages_subscription_spec.rb2
-rw-r--r--spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_immediately_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb2
-rw-r--r--spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb2
-rw-r--r--spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb2
-rw-r--r--spec/features/merge_request/user_opens_context_commits_modal_spec.rb2
-rw-r--r--spec/features/merge_request/user_posts_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_posts_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_rebases_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_resolves_conflicts_spec.rb2
-rw-r--r--spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb2
-rw-r--r--spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb3
-rw-r--r--spec/features/merge_request/user_resolves_wip_mr_spec.rb2
-rw-r--r--spec/features/merge_request/user_reverts_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_reviews_image_spec.rb2
-rw-r--r--spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_breadcrumb_links_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb7
-rw-r--r--spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_closing_issues_message_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_deleted_target_branch_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_discussions_navigation_spec.rb27
-rw-r--r--spec/features/merge_request/user_sees_discussions_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_merge_widget_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb3
-rw-r--r--spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_page_metadata_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_pipelines_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_system_notes_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_versions_spec.rb2
-rw-r--r--spec/features/merge_request/user_sees_wip_help_message_spec.rb2
-rw-r--r--spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb2
-rw-r--r--spec/features/merge_request/user_squashes_merge_request_spec.rb2
-rw-r--r--spec/features/merge_request/user_suggests_changes_on_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_toggles_whitespace_changes_spec.rb2
-rw-r--r--spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb2
-rw-r--r--spec/features/merge_request/user_uses_quick_actions_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_auto_expanding_diff_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_diffs_commit_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_diffs_file_by_file_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_diffs_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb2
-rw-r--r--spec/features/merge_request/user_views_open_merge_request_spec.rb2
-rw-r--r--spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb27
-rw-r--r--spec/features/merge_requests/filters_generic_behavior_spec.rb2
-rw-r--r--spec/features/merge_requests/rss_spec.rb2
-rw-r--r--spec/features/merge_requests/user_exports_as_csv_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_approvals_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_assignees_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_deployments_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_draft_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_labels_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_milestones_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb2
-rw-r--r--spec/features/merge_requests/user_filters_by_target_branch_spec.rb2
-rw-r--r--spec/features/merge_requests/user_lists_merge_requests_spec.rb2
-rw-r--r--spec/features/merge_requests/user_mass_updates_spec.rb2
-rw-r--r--spec/features/merge_requests/user_sees_empty_state_spec.rb2
-rw-r--r--spec/features/merge_requests/user_sorts_merge_requests_spec.rb2
-rw-r--r--spec/features/merge_requests/user_views_all_merge_requests_spec.rb2
-rw-r--r--spec/features/merge_requests/user_views_closed_merge_requests_spec.rb2
-rw-r--r--spec/features/merge_requests/user_views_merged_merge_requests_spec.rb2
-rw-r--r--spec/features/merge_requests/user_views_open_merge_requests_spec.rb2
-rw-r--r--spec/features/milestone_spec.rb8
-rw-r--r--spec/features/nav/new_nav_toggle_spec.rb14
-rw-r--r--spec/features/oauth_registration_spec.rb17
-rw-r--r--spec/features/profiles/chat_names_spec.rb2
-rw-r--r--spec/features/profiles/user_visits_profile_preferences_page_spec.rb10
-rw-r--r--spec/features/project_variables_spec.rb5
-rw-r--r--spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb12
-rw-r--r--spec/features/projects/commit/cherry_pick_spec.rb4
-rw-r--r--spec/features/projects/diffs/diff_show_spec.rb2
-rw-r--r--spec/features/projects/environments/environment_spec.rb10
-rw-r--r--spec/features/projects/files/template_type_dropdown_spec.rb143
-rw-r--r--spec/features/projects/files/undo_template_spec.rb19
-rw-r--r--spec/features/projects/fork_spec.rb15
-rw-r--r--spec/features/projects/import_export/export_file_spec.rb4
-rw-r--r--spec/features/projects/issues/design_management/user_uploads_designs_spec.rb2
-rw-r--r--spec/features/projects/jobs_spec.rb14
-rw-r--r--spec/features/projects/labels/sort_labels_spec.rb20
-rw-r--r--spec/features/projects/members/manage_members_spec.rb22
-rw-r--r--spec/features/projects/members/sorting_spec.rb4
-rw-r--r--spec/features/projects/milestones/milestones_sorting_spec.rb15
-rw-r--r--spec/features/projects/navbar_spec.rb4
-rw-r--r--spec/features/projects/new_project_spec.rb4
-rw-r--r--spec/features/projects/pages/user_edits_settings_spec.rb2
-rw-r--r--spec/features/projects/pipelines/legacy_pipelines_spec.rb 0
-rw-r--r--spec/features/projects/pipelines/pipeline_spec.rb44
-rw-r--r--spec/features/projects/pipelines/pipelines_spec.rb21
-rw-r--r--spec/features/projects/settings/monitor_settings_spec.rb2
-rw-r--r--spec/features/projects/settings/pipelines_settings_spec.rb24
-rw-r--r--spec/features/projects/settings/user_manages_project_members_spec.rb7
-rw-r--r--spec/features/projects/settings/user_searches_in_settings_spec.rb1
-rw-r--r--spec/features/projects/terraform_spec.rb11
-rw-r--r--spec/features/projects/tree/create_directory_spec.rb8
-rw-r--r--spec/features/projects/tree/create_file_spec.rb7
-rw-r--r--spec/features/projects/tree/tree_show_spec.rb6
-rw-r--r--spec/features/projects/tree/upload_file_spec.rb7
-rw-r--r--spec/features/projects_spec.rb8
-rw-r--r--spec/features/protected_branches_spec.rb2
-rw-r--r--spec/features/protected_tags_spec.rb2
-rw-r--r--spec/features/runners_spec.rb476
-rw-r--r--spec/features/search/user_searches_for_code_spec.rb59
-rw-r--r--spec/features/signed_commits_spec.rb12
-rw-r--r--spec/features/snippets/show_spec.rb21
-rw-r--r--spec/features/snippets/user_creates_snippet_spec.rb2
-rw-r--r--spec/features/triggers_spec.rb14
-rw-r--r--spec/features/user_sees_revert_modal_spec.rb2
-rw-r--r--spec/features/user_sorts_things_spec.rb2
-rw-r--r--spec/features/users/login_spec.rb3
-rw-r--r--spec/finders/access_requests_finder_spec.rb16
-rw-r--r--spec/finders/branches_finder_spec.rb43
-rw-r--r--spec/finders/ci/pipelines_finder_spec.rb10
-rw-r--r--spec/finders/ci/runners_finder_spec.rb12
-rw-r--r--spec/finders/members_finder_spec.rb306
-rw-r--r--spec/finders/merge_requests_finder_spec.rb27
-rw-r--r--spec/fixtures/api/schemas/public_api/v4/integration.json3
-rw-r--r--spec/fixtures/api/schemas/remote_mirror.json61
-rw-r--r--spec/fixtures/config/mail_room_enabled_ms_graph.yml4
-rw-r--r--spec/fixtures/lib/gitlab/import_export/complex/project.json76
-rw-r--r--spec/fixtures/markdown/markdown_golden_master_examples.yml909
-rw-r--r--spec/fixtures/tasks/gitlab/security/expected_banned_keys.yml12
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/LICENSE22
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/README.md12
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/array-networks-vapv-vxag.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/barracuda_load_balancer_vm.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/ceragon-fibeair-cve-2015-0936.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/exagrid-cve-2016-1561.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/f5-bigip-cve-2012-1493.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/loadbalancer.org-enterprise-va.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/monroe-dasdec-cve-2013-0137.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/quantum-dxi-v1000.pub1
-rw-r--r--spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/vagrant-default.pub1
-rw-r--r--spec/fixtures/valid.po6
-rw-r--r--spec/frontend/__mocks__/@cubejs-client/core.js26
-rw-r--r--spec/frontend/abuse_reports/components/abuse_category_selector_spec.js126
-rw-r--r--spec/frontend/admin/broadcast_messages/components/message_form_spec.js6
-rw-r--r--spec/frontend/admin/users/components/user_date_spec.js2
-rw-r--r--spec/frontend/admin/users/mock_data.js8
-rw-r--r--spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js4
-rw-r--r--spec/frontend/analytics/cycle_analytics/store/actions_spec.js38
-rw-r--r--spec/frontend/api/groups_api_spec.js4
-rw-r--r--spec/frontend/api/harbor_registry_spec.js8
-rw-r--r--spec/frontend/api/packages_api_spec.js4
-rw-r--r--spec/frontend/api/tags_api_spec.js4
-rw-r--r--spec/frontend/api/user_api_spec.js35
-rw-r--r--spec/frontend/api_spec.js157
-rw-r--r--spec/frontend/artifacts/components/artifact_row_spec.js21
-rw-r--r--spec/frontend/artifacts/components/artifacts_table_row_details_spec.js1
-rw-r--r--spec/frontend/artifacts/components/feedback_banner_spec.js63
-rw-r--r--spec/frontend/artifacts/components/job_artifacts_table_spec.js24
-rw-r--r--spec/frontend/autosave_spec.js128
-rw-r--r--spec/frontend/batch_comments/components/submit_dropdown_spec.js2
-rw-r--r--spec/frontend/behaviors/markdown/render_gfm_spec.js9
-rw-r--r--spec/frontend/boards/board_card_inner_spec.js20
-rw-r--r--spec/frontend/boards/board_list_helper.js7
-rw-r--r--spec/frontend/boards/board_list_spec.js2
-rw-r--r--spec/frontend/boards/components/board_app_spec.js3
-rw-r--r--spec/frontend/boards/components/board_card_spec.js8
-rw-r--r--spec/frontend/boards/components/board_column_spec.js1
-rw-r--r--spec/frontend/boards/components/board_content_sidebar_spec.js6
-rw-r--r--spec/frontend/boards/components/board_content_spec.js3
-rw-r--r--spec/frontend/boards/components/board_filtered_search_spec.js3
-rw-r--r--spec/frontend/boards/components/board_form_spec.js6
-rw-r--r--spec/frontend/boards/components/board_list_header_spec.js2
-rw-r--r--spec/frontend/boards/components/board_new_issue_spec.js11
-rw-r--r--spec/frontend/boards/components/board_top_bar_spec.js1
-rw-r--r--spec/frontend/boards/components/boards_selector_spec.js25
-rw-r--r--spec/frontend/boards/components/issue_board_filtered_search_spec.js2
-rw-r--r--spec/frontend/boards/stores/getters_spec.js36
-rw-r--r--spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js10
-rw-r--r--spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js (renamed from spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js)2
-rw-r--r--spec/frontend/ci/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js (renamed from spec/frontend/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js)2
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_admin_variables_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js)4
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js118
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_group_variables_spec.js)6
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_project_variables_spec.js)6
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js)6
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js)10
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_variable_shared_spec.js)18
-rw-r--r--spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js (renamed from spec/frontend/ci_variable_list/components/ci_variable_table_spec.js)4
-rw-r--r--spec/frontend/ci/ci_variable_list/mocks.js (renamed from spec/frontend/ci_variable_list/mocks.js)32
-rw-r--r--spec/frontend/ci/ci_variable_list/services/mock_data.js (renamed from spec/frontend/ci_variable_list/services/mock_data.js)0
-rw-r--r--spec/frontend/ci/ci_variable_list/stubs.js (renamed from spec/frontend/ci_variable_list/stubs.js)0
-rw-r--r--spec/frontend/ci/ci_variable_list/utils_spec.js (renamed from spec/frontend/ci_variable_list/utils_spec.js)4
-rw-r--r--spec/frontend/ci/pipeline_editor/components/editor/text_editor_spec.js29
-rw-r--r--spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js4
-rw-r--r--spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js (renamed from spec/frontend/pipeline_new/components/pipeline_new_form_spec.js)28
-rw-r--r--spec/frontend/ci/pipeline_new/components/refs_dropdown_spec.js (renamed from spec/frontend/pipeline_new/components/refs_dropdown_spec.js)91
-rw-r--r--spec/frontend/ci/pipeline_new/mock_data.js (renamed from spec/frontend/pipeline_new/mock_data.js)10
-rw-r--r--spec/frontend/ci/pipeline_new/utils/filter_variables_spec.js (renamed from spec/frontend/pipeline_new/utils/filter_variables_spec.js)2
-rw-r--r--spec/frontend/ci/pipeline_new/utils/format_refs_spec.js82
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js73
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js11
-rw-r--r--spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js12
-rw-r--r--spec/frontend/ci/pipeline_schedules/mock_data.js18
-rw-r--r--spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js12
-rw-r--r--spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js139
-rw-r--r--spec/frontend/commit/pipelines/pipelines_table_spec.js14
-rw-r--r--spec/frontend/constants_spec.js30
-rw-r--r--spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js31
-rw-r--r--spec/frontend/content_editor/extensions/attachment_spec.js10
-rw-r--r--spec/frontend/content_editor/extensions/link_spec.js2
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec.js16
-rw-r--r--spec/frontend/content_editor/markdown_processing_spec_helper.js92
-rw-r--r--spec/frontend/content_editor/markdown_snapshot_spec.js95
-rw-r--r--spec/frontend/content_editor/markdown_snapshot_spec_helper.js96
-rw-r--r--spec/frontend/content_editor/services/upload_helpers_spec.js4
-rw-r--r--spec/frontend/deploy_freeze/store/mutations_spec.js4
-rw-r--r--spec/frontend/design_management/components/design_notes/design_reply_form_spec.js24
-rw-r--r--spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap14
-rw-r--r--spec/frontend/diff_spec.js72
-rw-r--r--spec/frontend/diffs/components/app_spec.js1
-rw-r--r--spec/frontend/diffs/components/diff_file_spec.js6
-rw-r--r--spec/frontend/diffs/components/diff_line_note_form_spec.js3
-rw-r--r--spec/frontend/dropzone_input_spec.js6
-rw-r--r--spec/frontend/editor/schema/ci/json_tests/positive_tests/gitlab-ci.json2
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml40
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml5
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml20
-rw-r--r--spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml4
-rw-r--r--spec/frontend/environments/environment_details/deployment_job_spec.js49
-rw-r--r--spec/frontend/environments/environment_details/deployment_status_link_spec.js57
-rw-r--r--spec/frontend/environments/environment_details/deployment_triggerer_spec.js51
-rw-r--r--spec/frontend/environments/environment_details/empty_state_spec.js39
-rw-r--r--spec/frontend/environments/environment_details/page_spec.js69
-rw-r--r--spec/frontend/environments/environment_details/pagination_spec.js157
-rw-r--r--spec/frontend/environments/environment_details_page_spec.js50
-rw-r--r--spec/frontend/error_tracking/components/error_tracking_list_spec.js18
-rw-r--r--spec/frontend/error_tracking/store/list/actions_spec.js6
-rw-r--r--spec/frontend/error_tracking_settings/components/project_dropdown_spec.js16
-rw-r--r--spec/frontend/error_tracking_settings/mock.js7
-rw-r--r--spec/frontend/feature_flags/components/environments_dropdown_spec.js10
-rw-r--r--spec/frontend/feature_flags/components/new_environments_dropdown_spec.js4
-rw-r--r--spec/frontend/feature_highlight/feature_highlight_helper_spec.js7
-rw-r--r--spec/frontend/fixtures/environments.rb69
-rw-r--r--spec/frontend/fixtures/issues.rb2
-rw-r--r--spec/frontend/fixtures/projects.rb32
-rw-r--r--spec/frontend/fixtures/runner_instructions.rb2
-rw-r--r--spec/frontend/flash_spec.js204
-rw-r--r--spec/frontend/frequent_items/components/app_spec.js62
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_item_spec.js49
-rw-r--r--spec/frontend/frequent_items/components/frequent_items_list_spec.js35
-rw-r--r--spec/frontend/frequent_items/store/actions_spec.js87
-rw-r--r--spec/frontend/frequent_items/store/mutations_spec.js35
-rw-r--r--spec/frontend/gfm_auto_complete/mock_data.js24
-rw-r--r--spec/frontend/gfm_auto_complete_spec.js27
-rw-r--r--spec/frontend/group_settings/components/shared_runners_form_spec.js2
-rw-r--r--spec/frontend/ide/components/repo_editor_spec.js27
-rw-r--r--spec/frontend/ide/lib/gitlab_web_ide/get_base_config_spec.js18
-rw-r--r--spec/frontend/ide/stores/modules/commit/actions_spec.js39
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js8
-rw-r--r--spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js4
-rw-r--r--spec/frontend/ide/stores/modules/terminal/messages_spec.js10
-rw-r--r--spec/frontend/import_entities/components/import_status_spec.js19
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js38
-rw-r--r--spec/frontend/import_entities/import_groups/components/import_table_spec.js103
-rw-r--r--spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js12
-rw-r--r--spec/frontend/incidents_settings/components/incidents_settings_service_spec.js8
-rw-r--r--spec/frontend/integrations/edit/components/integration_form_spec.js81
-rw-r--r--spec/frontend/integrations/edit/components/integration_forms/section_spec.js109
-rw-r--r--spec/frontend/integrations/edit/components/trigger_field_spec.js31
-rw-r--r--spec/frontend/integrations/overrides/components/integration_overrides_spec.js10
-rw-r--r--spec/frontend/invite_members/components/invite_members_modal_spec.js28
-rw-r--r--spec/frontend/invite_members/components/user_limit_notification_spec.js20
-rw-r--r--spec/frontend/issuable/components/issuable_by_email_spec.js6
-rw-r--r--spec/frontend/issuable/components/issuable_header_warnings_spec.js5
-rw-r--r--spec/frontend/issuable/issuable_form_spec.js10
-rw-r--r--spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js173
-rw-r--r--spec/frontend/issues/dashboard/utils_spec.js88
-rw-r--r--spec/frontend/issues/list/mock_data.js19
-rw-r--r--spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js6
-rw-r--r--spec/frontend/issues/show/components/header_actions_spec.js33
-rw-r--r--spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js9
-rw-r--r--spec/frontend/issues/show/components/incidents/mock_data.js1
-rw-r--r--spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js142
-rw-r--r--spec/frontend/jira_connect/branches/components/project_dropdown_spec.js53
-rw-r--r--spec/frontend/jira_connect/subscriptions/api_spec.js14
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/compatibility_alert_spec.js56
-rw-r--r--spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js21
-rw-r--r--spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap20
-rw-r--r--spec/frontend/jobs/components/job/manual_variables_form_spec.js21
-rw-r--r--spec/frontend/jobs/components/job/sidebar_spec.js4
-rw-r--r--spec/frontend/jobs/components/table/jobs_table_spec.js6
-rw-r--r--spec/frontend/language_switcher/components/app_spec.js10
-rw-r--r--spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js18
-rw-r--r--spec/frontend/lib/utils/datetime/date_format_utility_spec.js22
-rw-r--r--spec/frontend/lib/utils/poll_until_complete_spec.js14
-rw-r--r--spec/frontend/locale/ensure_single_line_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js1
-rw-r--r--spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js2
-rw-r--r--spec/frontend/members/components/action_buttons/leave_button_spec.js59
-rw-r--r--spec/frontend/members/components/action_buttons/remove_member_button_spec.js18
-rw-r--r--spec/frontend/members/components/action_buttons/user_action_buttons_spec.js161
-rw-r--r--spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js54
-rw-r--r--spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js77
-rw-r--r--spec/frontend/members/components/action_dropdowns/user_action_dropdown_spec.js220
-rw-r--r--spec/frontend/members/components/modals/leave_modal_spec.js122
-rw-r--r--spec/frontend/members/components/modals/remove_member_modal_spec.js44
-rw-r--r--spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap61
-rw-r--r--spec/frontend/members/components/table/created_at_spec.js19
-rw-r--r--spec/frontend/members/components/table/member_action_buttons_spec.js4
-rw-r--r--spec/frontend/members/components/table/member_activity_spec.js40
-rw-r--r--spec/frontend/members/components/table/member_source_spec.js94
-rw-r--r--spec/frontend/members/components/table/members_table_cell_spec.js16
-rw-r--r--spec/frontend/members/components/table/members_table_spec.js45
-rw-r--r--spec/frontend/members/components/table/role_dropdown_spec.js66
-rw-r--r--spec/frontend/members/guest_overage_confirm_action_spec.js7
-rw-r--r--spec/frontend/members/mock_data.js18
-rw-r--r--spec/frontend/members/store/actions_spec.js8
-rw-r--r--spec/frontend/members/utils_spec.js26
-rw-r--r--spec/frontend/merge_request_tabs_spec.js70
-rw-r--r--spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_candidate_spec.js.snap45
-rw-r--r--spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_experiment_spec.js.snap511
-rw-r--r--spec/frontend/ml/experiment_tracking/components/ml_candidate_spec.js4
-rw-r--r--spec/frontend/ml/experiment_tracking/components/ml_experiment_spec.js111
-rw-r--r--spec/frontend/monitoring/requests/index_spec.js20
-rw-r--r--spec/frontend/monitoring/store/actions_spec.js10
-rw-r--r--spec/frontend/monitoring/store/mutations_spec.js7
-rw-r--r--spec/frontend/nav/components/new_nav_toggle_spec.js23
-rw-r--r--spec/frontend/notes/components/comment_form_spec.js9
-rw-r--r--spec/frontend/notes/components/note_body_spec.js12
-rw-r--r--spec/frontend/notes/stores/actions_spec.js6
-rw-r--r--spec/frontend/notifications/components/custom_notifications_modal_spec.js14
-rw-r--r--spec/frontend/notifications/components/notification_email_listbox_input_spec.js81
-rw-r--r--spec/frontend/notifications/components/notifications_dropdown_spec.js10
-rw-r--r--spec/frontend/observability/observability_app_spec.js29
-rw-r--r--spec/frontend/observability/skeleton_spec.js145
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js8
-rw-r--r--spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js45
-rw-r--r--spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js16
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js8
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js16
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js57
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js2
-rw-r--r--spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js156
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap125
-rw-r--r--spec/frontend/packages_and_registries/package_registry/pages/list_spec.js103
-rw-r--r--spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js42
-rw-r--r--spec/frontend/pages/projects/forks/new/components/fork_form_spec.js19
-rw-r--r--spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js42
-rw-r--r--spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap14
-rw-r--r--spec/frontend/pages/projects/graphs/code_coverage_spec.js12
-rw-r--r--spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js12
-rw-r--r--spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js2
-rw-r--r--spec/frontend/pages/shared/wikis/components/wiki_content_spec.js6
-rw-r--r--spec/frontend/pipeline_new/utils/format_refs_spec.js21
-rw-r--r--spec/frontend/pipeline_wizard/components/wrapper_spec.js7
-rw-r--r--spec/frontend/pipelines/pipelines_spec.js8
-rw-r--r--spec/frontend/pipelines/pipelines_table_spec.js8
-rw-r--r--spec/frontend/projects/commit/components/branches_dropdown_spec.js115
-rw-r--r--spec/frontend/projects/commit/components/projects_dropdown_spec.js64
-rw-r--r--spec/frontend/projects/merge_requests/components/report_abuse_dropdown_item_spec.js73
-rw-r--r--spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js51
-rw-r--r--spec/frontend/projects/settings/components/default_branch_selector_spec.js1
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js11
-rw-r--r--spec/frontend/projects/settings/repository/branch_rules/mock_data.js5
-rw-r--r--spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js8
-rw-r--r--spec/frontend/read_more_spec.js33
-rw-r--r--spec/frontend/ref/components/ref_selector_spec.js11
-rw-r--r--spec/frontend/repository/commits_service_spec.js15
-rw-r--r--spec/frontend/repository/components/blob_content_viewer_spec.js10
-rw-r--r--spec/frontend/repository/components/blob_viewers/notebook_viewer_spec.js40
-rw-r--r--spec/frontend/repository/components/blob_viewers/openapi_viewer_spec.js30
-rw-r--r--spec/frontend/repository/components/fork_info_spec.js122
-rw-r--r--spec/frontend/repository/components/new_directory_modal_spec.js8
-rw-r--r--spec/frontend/repository/components/tree_content_spec.js23
-rw-r--r--spec/frontend/repository/components/upload_blob_modal_spec.js4
-rw-r--r--spec/frontend/repository/mock_data.js18
-rw-r--r--spec/frontend/repository/utils/ref_switcher_utils_spec.js7
-rw-r--r--spec/frontend/search/store/utils_spec.js22
-rw-r--r--spec/frontend/self_monitor/store/actions_spec.js6
-rw-r--r--spec/frontend/set_status_modal/set_status_form_spec.js93
-rw-r--r--spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js49
-rw-r--r--spec/frontend/set_status_modal/user_profile_set_status_wrapper_spec.js81
-rw-r--r--spec/frontend/set_status_modal/utils_spec.js18
-rw-r--r--spec/frontend/sidebar/components/assignees/assignees_spec.js12
-rw-r--r--spec/frontend/sidebar/components/copy/sidebar_reference_widget_spec.js2
-rw-r--r--spec/frontend/super_sidebar/components/counter_spec.js56
-rw-r--r--spec/frontend/super_sidebar/components/super_sidebar_spec.js33
-rw-r--r--spec/frontend/super_sidebar/components/user_bar_spec.js46
-rw-r--r--spec/frontend/super_sidebar/mock_data.js9
-rw-r--r--spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js150
-rw-r--r--spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js129
-rw-r--r--spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js41
-rw-r--r--spec/frontend/usage_quotas/storage/components/usage_graph_spec.js144
-rw-r--r--spec/frontend/usage_quotas/storage/mock_data.js101
-rw-r--r--spec/frontend/usage_quotas/storage/utils_spec.js88
-rw-r--r--spec/frontend/users/profile/components/report_abuse_button_spec.js79
-rw-r--r--spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js8
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap163
-rw-r--r--spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js8
-rw-r--r--spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js20
-rw-r--r--spec/frontend/vue_merge_request_widget/extensions/security_reports/mock_data.js141
-rw-r--r--spec/frontend/vue_merge_request_widget/extensions/security_reports/mr_widget_security_reports_spec.js93
-rw-r--r--spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js28
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js12
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js75
-rw-r--r--spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js37
-rw-r--r--spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js16
-rw-r--r--spec/frontend/vue_shared/components/ci_badge_link_spec.js4
-rw-r--r--spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js36
-rw-r--r--spec/frontend/vue_shared/components/group_select/group_select_spec.js150
-rw-r--r--spec/frontend/vue_shared/components/header_ci_component_spec.js3
-rw-r--r--spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js67
-rw-r--r--spec/frontend/vue_shared/components/markdown/editor_mode_dropdown_spec.js58
-rw-r--r--spec/frontend/vue_shared/components/markdown/field_spec.js18
-rw-r--r--spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js77
-rw-r--r--spec/frontend/vue_shared/components/markdown/toolbar_spec.js15
-rw-r--r--spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap177
-rw-r--r--spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js70
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_docker_instructions_spec.js.snap3
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_kubernetes_instructions_spec.js.snap3
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/instructions/runner_aws_instructions_spec.js117
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js169
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/instructions/runner_docker_instructions_spec.js28
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/instructions/runner_kubernetes_instructions_spec.js28
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/mock_data.js8
-rw-r--r--spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js201
-rw-r--r--spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js17
-rw-r--r--spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js10
-rw-r--r--spec/frontend/vue_shared/components/web_ide_link_spec.js15
-rw-r--r--spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js24
-rw-r--r--spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap9
-rw-r--r--spec/frontend/work_items/components/notes/activity_filter_spec.js74
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_body_spec.js32
-rw-r--r--spec/frontend/work_items/components/notes/work_item_note_spec.js53
-rw-r--r--spec/frontend/work_items/components/work_item_comment_form_spec.js205
-rw-r--r--spec/frontend/work_items/components/work_item_comment_locked_spec.js41
-rw-r--r--spec/frontend/work_items/components/work_item_detail_modal_spec.js18
-rw-r--r--spec/frontend/work_items/components/work_item_detail_spec.js92
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js27
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js21
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js93
-rw-r--r--spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js29
-rw-r--r--spec/frontend/work_items/components/work_item_notes_spec.js95
-rw-r--r--spec/frontend/work_items/mock_data.js362
-rw-r--r--spec/frontend/work_items/router_spec.js1
-rw-r--r--spec/graphql/mutations/achievements/create_spec.rb54
-rw-r--r--spec/graphql/resolvers/ci/jobs_resolver_spec.rb13
-rw-r--r--spec/graphql/resolvers/timelog_resolver_spec.rb29
-rw-r--r--spec/graphql/types/access_level_enum_spec.rb2
-rw-r--r--spec/graphql/types/achievements/achievement_type_spec.rb39
-rw-r--r--spec/graphql/types/alert_management/alert_type_spec.rb1
-rw-r--r--spec/graphql/types/ci/runner_countable_connection_type_spec.rb11
-rw-r--r--spec/graphql/types/description_version_type_spec.rb10
-rw-r--r--spec/graphql/types/design_management/design_type_spec.rb2
-rw-r--r--spec/graphql/types/issue_type_spec.rb51
-rw-r--r--spec/graphql/types/member_access_level_enum_spec.rb11
-rw-r--r--spec/graphql/types/namespace_type_spec.rb2
-rw-r--r--spec/graphql/types/notes/note_type_spec.rb5
-rw-r--r--spec/graphql/types/notes/noteable_interface_spec.rb1
-rw-r--r--spec/graphql/types/notes/system_note_metadata_type_spec.rb11
-rw-r--r--spec/graphql/types/query_type_spec.rb59
-rw-r--r--spec/graphql/types/repository/blob_type_spec.rb13
-rw-r--r--spec/graphql/types/snippet_type_spec.rb2
-rw-r--r--spec/graphql/types/time_tracking/timelog_connection_type_spec.rb44
-rw-r--r--spec/graphql/types/time_tracking/timelog_sort_enum_spec.rb20
-rw-r--r--spec/graphql/types/timelog_type_spec.rb2
-rw-r--r--spec/graphql/types/user_type_spec.rb21
-rw-r--r--spec/graphql/types/users/email_type_spec.rb18
-rw-r--r--spec/graphql/types/users/namespace_commit_email_type_spec.rb18
-rw-r--r--spec/helpers/admin/components_helper_spec.rb30
-rw-r--r--spec/helpers/appearances_helper_spec.rb14
-rw-r--r--spec/helpers/application_helper_spec.rb68
-rw-r--r--spec/helpers/button_helper_spec.rb1
-rw-r--r--spec/helpers/ci/runners_helper_spec.rb4
-rw-r--r--spec/helpers/emails_helper_spec.rb16
-rw-r--r--spec/helpers/feature_flags_helper_spec.rb2
-rw-r--r--spec/helpers/form_helper_spec.rb18
-rw-r--r--spec/helpers/groups/group_members_helper_spec.rb4
-rw-r--r--spec/helpers/groups/observability_helper_spec.rb19
-rw-r--r--spec/helpers/import_helper_spec.rb16
-rw-r--r--spec/helpers/issuables_helper_spec.rb52
-rw-r--r--spec/helpers/issues_helper_spec.rb47
-rw-r--r--spec/helpers/markup_helper_spec.rb18
-rw-r--r--spec/helpers/nav_helper_spec.rb58
-rw-r--r--spec/helpers/preferences_helper_spec.rb24
-rw-r--r--spec/helpers/projects/ml/experiments_helper_spec.rb43
-rw-r--r--spec/helpers/projects/project_members_helper_spec.rb10
-rw-r--r--spec/helpers/projects_helper_spec.rb27
-rw-r--r--spec/helpers/search_helper_spec.rb20
-rw-r--r--spec/helpers/sidebars_helper_spec.rb24
-rw-r--r--spec/helpers/timeboxes_helper_spec.rb63
-rw-r--r--spec/helpers/todos_helper_spec.rb26
-rw-r--r--spec/helpers/url_helper_spec.rb25
-rw-r--r--spec/helpers/users/callouts_helper_spec.rb83
-rw-r--r--spec/helpers/version_check_helper_spec.rb21
-rw-r--r--spec/lib/api/entities/basic_project_details_spec.rb47
-rw-r--r--spec/lib/api/entities/bulk_imports/entity_spec.rb3
-rw-r--r--spec/lib/api/entities/ml/mlflow/run_info_spec.rb14
-rw-r--r--spec/lib/api/helpers/members_helpers_spec.rb53
-rw-r--r--spec/lib/api/helpers/packages_helpers_spec.rb29
-rw-r--r--spec/lib/api/helpers/pagination_strategies_spec.rb8
-rw-r--r--spec/lib/api/helpers/rate_limiter_spec.rb4
-rw-r--r--spec/lib/api/helpers_spec.rb19
-rw-r--r--spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb15
-rw-r--r--spec/lib/banzai/filter/inline_observability_filter_spec.rb48
-rw-r--r--spec/lib/banzai/filter/math_filter_spec.rb171
-rw-r--r--spec/lib/banzai/filter/references/reference_filter_spec.rb6
-rw-r--r--spec/lib/banzai/filter/repository_link_filter_spec.rb8
-rw-r--r--spec/lib/banzai/filter/service_desk_upload_link_filter_spec.rb86
-rw-r--r--spec/lib/banzai/pipeline/full_pipeline_spec.rb4
-rw-r--r--spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb26
-rw-r--r--spec/lib/banzai/pipeline/service_desk_email_pipeline_spec.rb16
-rw-r--r--spec/lib/bulk_imports/clients/http_spec.rb170
-rw-r--r--spec/lib/bulk_imports/groups/stage_spec.rb24
-rw-r--r--spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb5
-rw-r--r--spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb2
-rw-r--r--spec/lib/event_filter_spec.rb72
-rw-r--r--spec/lib/gitlab/application_rate_limiter_spec.rb46
-rw-r--r--spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb2
-rw-r--r--spec/lib/gitlab/auth/o_auth/user_spec.rb8
-rw-r--r--spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb49
-rw-r--r--spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb53
-rw-r--r--spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/backfill_releases_author_id_spec.rb61
-rw-r--r--spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/batched_migration_job_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb12
-rw-r--r--spec/lib/gitlab/background_migration/truncate_overlong_vulnerability_html_titles_spec.rb78
-rw-r--r--spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb2
-rw-r--r--spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb2
-rw-r--r--spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/config/entry/cache_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/entry/job_spec.rb186
-rw-r--r--spec/lib/gitlab/ci/config/entry/processable_spec.rb143
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb6
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb12
-rw-r--r--spec/lib/gitlab/ci/config/entry/reports_spec.rb14
-rw-r--r--spec/lib/gitlab/ci/config/entry/root_spec.rb15
-rw-r--r--spec/lib/gitlab/ci/config/entry/variable_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/config/external/file/local_spec.rb31
-rw-r--r--spec/lib/gitlab/ci/config/external/mapper_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/config_spec.rb95
-rw-r--r--spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb36
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb3
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/create_spec.rb4
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb110
-rw-r--r--spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb1
-rw-r--r--spec/lib/gitlab/ci/status/bridge/factory_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/status/build/manual_spec.rb41
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb8
-rw-r--r--spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb11
-rw-r--r--spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb2
-rw-r--r--spec/lib/gitlab/ci/templates/npm_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb9
-rw-r--r--spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb5
-rw-r--r--spec/lib/gitlab/ci/variables/collection_spec.rb150
-rw-r--r--spec/lib/gitlab/ci/yaml_processor_spec.rb66
-rw-r--r--spec/lib/gitlab/config/entry/validators_spec.rb64
-rw-r--r--spec/lib/gitlab/counters/buffered_counter_spec.rb572
-rw-r--r--spec/lib/gitlab/counters/legacy_counter_spec.rb37
-rw-r--r--spec/lib/gitlab/data_builder/build_spec.rb12
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_creator_spec.rb4
-rw-r--r--spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb2
-rw-r--r--spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb1
-rw-r--r--spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb5
-rw-r--r--spec/lib/gitlab/database/consistency_checker_spec.rb2
-rw-r--r--spec/lib/gitlab/database/gitlab_schema_spec.rb133
-rw-r--r--spec/lib/gitlab/database/indexing_exclusive_lease_guard_spec.rb40
-rw-r--r--spec/lib/gitlab/database/load_balancing/resolver_spec.rb14
-rw-r--r--spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb52
-rw-r--r--spec/lib/gitlab/database/lock_writes_manager_spec.rb31
-rw-r--r--spec/lib/gitlab/database/loose_foreign_keys_spec.rb27
-rw-r--r--spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb104
-rw-r--r--spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb8
-rw-r--r--spec/lib/gitlab/database/migration_helpers_spec.rb176
-rw-r--r--spec/lib/gitlab/database/migrations/instrumentation_spec.rb29
-rw-r--r--spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb22
-rw-r--r--spec/lib/gitlab/database/partitioning_spec.rb8
-rw-r--r--spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb4
-rw-r--r--spec/lib/gitlab/database/postgres_foreign_key_spec.rb191
-rw-r--r--spec/lib/gitlab/database/query_analyzer_spec.rb8
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb8
-rw-r--r--spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb9
-rw-r--r--spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb3
-rw-r--r--spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb49
-rw-r--r--spec/lib/gitlab/database/reindexing/coordinator_spec.rb142
-rw-r--r--spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb14
-rw-r--r--spec/lib/gitlab/database/reindexing/index_selection_spec.rb36
-rw-r--r--spec/lib/gitlab/database/reindexing/reindex_action_spec.rb4
-rw-r--r--spec/lib/gitlab/database/reindexing_spec.rb6
-rw-r--r--spec/lib/gitlab/database/tables_truncate_spec.rb40
-rw-r--r--spec/lib/gitlab/database_spec.rb20
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb11
-rw-r--r--spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb2
-rw-r--r--spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb2
-rw-r--r--spec/lib/gitlab/error_tracking_spec.rb26
-rw-r--r--spec/lib/gitlab/gitaly_client/ref_service_spec.rb11
-rw-r--r--spec/lib/gitlab/gitaly_client/repository_service_spec.rb33
-rw-r--r--spec/lib/gitlab/gitaly_client_spec.rb120
-rw-r--r--spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb4
-rw-r--r--spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_gists_import/representation/gist_spec.rb2
-rw-r--r--spec/lib/gitlab/github_gists_import/status_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/bulk_importing_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/client_spec.rb42
-rw-r--r--spec/lib/gitlab/github_import/importer/labels_importer_spec.rb3
-rw-r--r--spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb103
-rw-r--r--spec/lib/gitlab/github_import/importer/releases_importer_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/page_counter_spec.rb2
-rw-r--r--spec/lib/gitlab/github_import/representation/protected_branch_spec.rb25
-rw-r--r--spec/lib/gitlab/http_spec.rb6
-rw-r--r--spec/lib/gitlab/import_export/all_models.yml11
-rw-r--r--spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb4
-rw-r--r--spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb153
-rw-r--r--spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb159
-rw-r--r--spec/lib/gitlab/import_export/project/tree_saver_spec.rb2
-rw-r--r--spec/lib/gitlab/import_export/safe_model_attributes.yml3
-rw-r--r--spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb11
-rw-r--r--spec/lib/gitlab/import_export/version_checker_spec.rb2
-rw-r--r--spec/lib/gitlab/instrumentation_helper_spec.rb46
-rw-r--r--spec/lib/gitlab/memory/reporter_spec.rb85
-rw-r--r--spec/lib/gitlab/memory/watchdog_spec.rb3
-rw-r--r--spec/lib/gitlab/merge_requests/message_generator_spec.rb2
-rw-r--r--spec/lib/gitlab/observability_spec.rb37
-rw-r--r--spec/lib/gitlab/pages/cache_control_spec.rb55
-rw-r--r--spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb24
-rw-r--r--spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb28
-rw-r--r--spec/lib/gitlab/rack_attack_spec.rb2
-rw-r--r--spec/lib/gitlab/redis/duplicate_jobs_spec.rb10
-rw-r--r--spec/lib/gitlab/redis/multi_store_spec.rb667
-rw-r--r--spec/lib/gitlab/redis/repository_cache_spec.rb49
-rw-r--r--spec/lib/gitlab/redis/sidekiq_status_spec.rb8
-rw-r--r--spec/lib/gitlab/regex_spec.rb55
-rw-r--r--spec/lib/gitlab/relative_positioning/mover_spec.rb2
-rw-r--r--spec/lib/gitlab/repository_cache/preloader_spec.rb91
-rw-r--r--spec/lib/gitlab/repository_hash_cache_spec.rb31
-rw-r--r--spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb58
-rw-r--r--spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb71
-rw-r--r--spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb42
-rw-r--r--spec/lib/gitlab/ssh/commit_spec.rb15
-rw-r--r--spec/lib/gitlab/ssh/signature_spec.rb8
-rw-r--r--spec/lib/gitlab/submodule_links_spec.rb2
-rw-r--r--spec/lib/gitlab/tracking_spec.rb4
-rw-r--r--spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb44
-rw-r--r--spec/lib/gitlab/usage/service_ping/legacy_metric_metadata_decorator_spec.rb (renamed from spec/lib/gitlab/usage/service_ping/legacy_metric_timing_decorator_spec.rb)21
-rw-r--r--spec/lib/gitlab/usage_data_metrics_spec.rb2
-rw-r--r--spec/lib/gitlab/usage_data_queries_spec.rb4
-rw-r--r--spec/lib/gitlab/usage_data_spec.rb18
-rw-r--r--spec/lib/gitlab/utils/lazy_attributes_spec.rb6
-rw-r--r--spec/lib/gitlab/utils/strong_memoize_spec.rb115
-rw-r--r--spec/lib/gitlab/utils/usage_data_spec.rb24
-rw-r--r--spec/lib/gitlab/version_info_spec.rb8
-rw-r--r--spec/lib/google_api/cloud_platform/client_spec.rb145
-rw-r--r--spec/lib/object_storage/direct_upload_spec.rb1
-rw-r--r--spec/lib/sidebars/groups/menus/observability_menu_spec.rb10
-rw-r--r--spec/lib/sidebars/groups/menus/settings_menu_spec.rb2
-rw-r--r--spec/lib/sidebars/projects/menus/settings_menu_spec.rb4
-rw-r--r--spec/lib/sidebars/your_work/menus/issues_menu_spec.rb12
-rw-r--r--spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb12
-rw-r--r--spec/lib/sidebars/your_work/menus/todos_menu_spec.rb12
-rw-r--r--spec/lib/unnested_in_filters/rewriter_spec.rb26
-rw-r--r--spec/mailers/devise_mailer_spec.rb4
-rw-r--r--spec/mailers/emails/imports_spec.rb29
-rw-r--r--spec/mailers/emails/profile_spec.rb10
-rw-r--r--spec/mailers/emails/service_desk_spec.rb106
-rw-r--r--spec/metrics_server/metrics_server_spec.rb70
-rw-r--r--spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb44
-rw-r--r--spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb114
-rw-r--r--spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb44
-rw-r--r--spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb25
-rw-r--r--spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb42
-rw-r--r--spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb120
-rw-r--r--spec/migrations/20210514063252_schedule_cleanup_orphaned_lfs_objects_projects_spec.rb37
-rw-r--r--spec/migrations/20210601073400_fix_total_stage_in_vsa_spec.rb30
-rw-r--r--spec/migrations/20210601080039_group_protected_environments_add_index_and_constraint_spec.rb29
-rw-r--r--spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb3
-rw-r--r--spec/migrations/20210804150320_create_base_work_item_types_spec.rb3
-rw-r--r--spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb2
-rw-r--r--spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb2
-rw-r--r--spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb2
-rw-r--r--spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb2
-rw-r--r--spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb2
-rw-r--r--spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb2
-rw-r--r--spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb2
-rw-r--r--spec/migrations/20221209235940_cleanup_o_auth_access_tokens_with_null_expires_in_spec.rb24
-rw-r--r--spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb53
-rw-r--r--spec/migrations/20221220131020_bump_default_partition_id_value_for_ci_tables_spec.rb78
-rw-r--r--spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb22
-rw-r--r--spec/migrations/20221222092958_sync_new_amount_used_with_amount_used_spec.rb54
-rw-r--r--spec/migrations/20221223123019_delete_queued_jobs_for_vulnerabilities_feedback_migration_spec.rb33
-rw-r--r--spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb55
-rw-r--r--spec/migrations/20230116111252_finalize_todo_sanitization_spec.rb57
-rw-r--r--spec/migrations/add_new_trail_plans_spec.rb95
-rw-r--r--spec/migrations/backfill_clusters_integration_prometheus_enabled_spec.rb80
-rw-r--r--spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb103
-rw-r--r--spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb34
-rw-r--r--spec/migrations/change_web_hook_events_default_spec.rb36
-rw-r--r--spec/migrations/clean_up_pending_builds_table_spec.rb48
-rw-r--r--spec/migrations/cleanup_after_add_primary_email_to_emails_if_user_confirmed_spec.rb48
-rw-r--r--spec/migrations/cleanup_move_container_registry_enabled_to_project_feature_spec.rb45
-rw-r--r--spec/migrations/cleanup_mr_attention_request_todos_spec.rb2
-rw-r--r--spec/migrations/confirm_support_bot_user_spec.rb86
-rw-r--r--spec/migrations/delete_security_findings_without_uuid_spec.rb36
-rw-r--r--spec/migrations/insert_ci_daily_pipeline_schedule_triggers_plan_limits_spec.rb73
-rw-r--r--spec/migrations/migrate_elastic_index_settings_spec.rb44
-rw-r--r--spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb59
-rw-r--r--spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb61
-rw-r--r--spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb18
-rw-r--r--spec/migrations/remove_hipchat_service_records_spec.rb23
-rw-r--r--spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb27
-rw-r--r--spec/migrations/schedule_add_primary_email_to_emails_if_user_confirmed_spec.rb31
-rw-r--r--spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb3
-rw-r--r--spec/migrations/schedule_disable_expiration_policies_linked_to_no_container_images_spec.rb47
-rw-r--r--spec/migrations/schedule_update_timelogs_project_id_spec.rb33
-rw-r--r--spec/migrations/schedule_update_users_where_two_factor_auth_required_from_group_spec.rb29
-rw-r--r--spec/migrations/second_recount_epic_cache_counts_spec.rb32
-rw-r--r--spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb2
-rw-r--r--spec/migrations/update_invalid_web_hooks_spec.rb30
-rw-r--r--spec/models/abuse_report_spec.rb40
-rw-r--r--spec/models/achievements/achievement_spec.rb3
-rw-r--r--spec/models/achievements/user_achievement_spec.rb13
-rw-r--r--spec/models/analytics/cycle_analytics/aggregation_spec.rb22
-rw-r--r--spec/models/analytics/cycle_analytics/project_stage_spec.rb2
-rw-r--r--spec/models/appearance_spec.rb4
-rw-r--r--spec/models/application_setting_spec.rb40
-rw-r--r--spec/models/bulk_imports/entity_spec.rb20
-rw-r--r--spec/models/chat_name_spec.rb12
-rw-r--r--spec/models/ci/bridge_spec.rb41
-rw-r--r--spec/models/ci/build_spec.rb80
-rw-r--r--spec/models/ci/job_artifact_spec.rb36
-rw-r--r--spec/models/ci/namespace_mirror_spec.rb2
-rw-r--r--spec/models/ci/pipeline_spec.rb17
-rw-r--r--spec/models/ci/runner_machine_spec.rb51
-rw-r--r--spec/models/ci/runner_spec.rb26
-rw-r--r--spec/models/ci/runner_version_spec.rb8
-rw-r--r--spec/models/clusters/providers/aws_spec.rb33
-rw-r--r--spec/models/clusters/providers/gcp_spec.rb25
-rw-r--r--spec/models/commit_collection_spec.rb16
-rw-r--r--spec/models/commit_signatures/ssh_signature_spec.rb36
-rw-r--r--spec/models/commit_spec.rb14
-rw-r--r--spec/models/concerns/counter_attribute_spec.rb44
-rw-r--r--spec/models/concerns/has_user_type_spec.rb3
-rw-r--r--spec/models/concerns/noteable_spec.rb76
-rw-r--r--spec/models/concerns/safely_change_column_default_spec.rb75
-rw-r--r--spec/models/concerns/sensitive_serializable_hash_spec.rb2
-rw-r--r--spec/models/deployment_spec.rb23
-rw-r--r--spec/models/environment_spec.rb38
-rw-r--r--spec/models/event_collection_spec.rb19
-rw-r--r--spec/models/event_spec.rb15
-rw-r--r--spec/models/factories_spec.rb10
-rw-r--r--spec/models/group_spec.rb196
-rw-r--r--spec/models/integration_spec.rb2
-rw-r--r--spec/models/integrations/apple_app_store_spec.rb105
-rw-r--r--spec/models/integrations/base_chat_notification_spec.rb8
-rw-r--r--spec/models/integrations/chat_message/issue_message_spec.rb1
-rw-r--r--spec/models/integrations/chat_message/pipeline_message_spec.rb12
-rw-r--r--spec/models/integrations/every_integration_spec.rb4
-rw-r--r--spec/models/integrations/field_spec.rb2
-rw-r--r--spec/models/issue_spec.rb2
-rw-r--r--spec/models/member_spec.rb41
-rw-r--r--spec/models/members/member_role_spec.rb23
-rw-r--r--spec/models/merge_request/approval_removal_settings_spec.rb2
-rw-r--r--spec/models/merge_request_diff_spec.rb23
-rw-r--r--spec/models/merge_request_spec.rb72
-rw-r--r--spec/models/ml/candidate_spec.rb53
-rw-r--r--spec/models/namespace_setting_spec.rb76
-rw-r--r--spec/models/namespace_spec.rb20
-rw-r--r--spec/models/note_spec.rb44
-rw-r--r--spec/models/oauth_access_token_spec.rb18
-rw-r--r--spec/models/packages/package_file_spec.rb7
-rw-r--r--spec/models/packages/package_spec.rb5
-rw-r--r--spec/models/pages/lookup_path_spec.rb10
-rw-r--r--spec/models/pages_domain_spec.rb2
-rw-r--r--spec/models/personal_access_token_spec.rb31
-rw-r--r--spec/models/plan_limits_spec.rb7
-rw-r--r--spec/models/project_import_state_spec.rb20
-rw-r--r--spec/models/project_spec.rb269
-rw-r--r--spec/models/project_statistics_spec.rb168
-rw-r--r--spec/models/projects/branch_rule_spec.rb32
-rw-r--r--spec/models/projects/build_artifacts_size_refresh_spec.rb64
-rw-r--r--spec/models/release_spec.rb4
-rw-r--r--spec/models/repository_spec.rb65
-rw-r--r--spec/models/resource_event_spec.rb19
-rw-r--r--spec/models/resource_label_event_spec.rb18
-rw-r--r--spec/models/resource_milestone_event_spec.rb3
-rw-r--r--spec/models/resource_state_event_spec.rb3
-rw-r--r--spec/models/timelog_spec.rb28
-rw-r--r--spec/models/todo_spec.rb62
-rw-r--r--spec/models/user_detail_spec.rb85
-rw-r--r--spec/models/user_highest_role_spec.rb2
-rw-r--r--spec/models/user_spec.rb269
-rw-r--r--spec/models/users/namespace_commit_email_spec.rb34
-rw-r--r--spec/models/work_item_spec.rb7
-rw-r--r--spec/models/work_items/parent_link_spec.rb12
-rw-r--r--spec/models/work_items/widgets/hierarchy_spec.rb20
-rw-r--r--spec/policies/concerns/archived_abilities_spec.rb2
-rw-r--r--spec/policies/global_policy_spec.rb106
-rw-r--r--spec/policies/group_policy_spec.rb21
-rw-r--r--spec/policies/issue_policy_spec.rb124
-rw-r--r--spec/policies/merge_request_policy_spec.rb30
-rw-r--r--spec/policies/note_policy_spec.rb36
-rw-r--r--spec/policies/project_group_link_policy_spec.rb56
-rw-r--r--spec/policies/project_policy_spec.rb2
-rw-r--r--spec/policies/resource_label_event_policy_spec.rb2
-rw-r--r--spec/policies/resource_milestone_event_policy_spec.rb73
-rw-r--r--spec/policies/resource_state_event_policy_spec.rb39
-rw-r--r--spec/policies/todo_policy_spec.rb2
-rw-r--r--spec/policies/user_policy_spec.rb26
-rw-r--r--spec/policies/work_item_policy_spec.rb2
-rw-r--r--spec/presenters/ci/build_runner_presenter_spec.rb27
-rw-r--r--spec/presenters/ci/stage_presenter_spec.rb2
-rw-r--r--spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb13
-rw-r--r--spec/presenters/project_presenter_spec.rb40
-rw-r--r--spec/requests/abuse_reports_controller_spec.rb90
-rw-r--r--spec/requests/api/appearance_spec.rb10
-rw-r--r--spec/requests/api/boards_spec.rb2
-rw-r--r--spec/requests/api/bulk_imports_spec.rb200
-rw-r--r--spec/requests/api/ci/jobs_spec.rb70
-rw-r--r--spec/requests/api/ci/runner/jobs_artifacts_spec.rb39
-rw-r--r--spec/requests/api/commits_spec.rb12
-rw-r--r--spec/requests/api/debian_project_packages_spec.rb29
-rw-r--r--spec/requests/api/environments_spec.rb74
-rw-r--r--spec/requests/api/files_spec.rb120
-rw-r--r--spec/requests/api/graphql/ci/config_spec.rb62
-rw-r--r--spec/requests/api/graphql/ci/jobs_spec.rb8
-rw-r--r--spec/requests/api/graphql/group/merge_requests_spec.rb2
-rw-r--r--spec/requests/api/graphql/group_query_spec.rb2
-rw-r--r--spec/requests/api/graphql/issues_spec.rb65
-rw-r--r--spec/requests/api/graphql/merge_request/merge_request_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/achievements/create_spec.rb78
-rw-r--r--spec/requests/api/graphql/mutations/ci/job_play_spec.rb39
-rw-r--r--spec/requests/api/graphql/mutations/groups/update_spec.rb19
-rw-r--r--spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb130
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/create_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_draft_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb2
-rw-r--r--spec/requests/api/graphql/mutations/work_items/update_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/branch_rules_spec.rb45
-rw-r--r--spec/requests/api/graphql/project/issues_spec.rb4
-rw-r--r--spec/requests/api/graphql/project/jobs_spec.rb12
-rw-r--r--spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/merge_request_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/merge_requests_spec.rb2
-rw-r--r--spec/requests/api/graphql/project/pipeline_spec.rb17
-rw-r--r--spec/requests/api/graphql/project/runners_spec.rb12
-rw-r--r--spec/requests/api/graphql/project/work_items_spec.rb2
-rw-r--r--spec/requests/api/graphql/user_spec.rb41
-rw-r--r--spec/requests/api/graphql/work_item_spec.rb18
-rw-r--r--spec/requests/api/group_boards_spec.rb2
-rw-r--r--spec/requests/api/group_export_spec.rb11
-rw-r--r--spec/requests/api/import_github_spec.rb139
-rw-r--r--spec/requests/api/internal/base_spec.rb24
-rw-r--r--spec/requests/api/issues/get_project_issues_spec.rb61
-rw-r--r--spec/requests/api/issues/issues_spec.rb5
-rw-r--r--spec/requests/api/markdown_golden_master_spec.rb9
-rw-r--r--spec/requests/api/merge_requests_spec.rb10
-rw-r--r--spec/requests/api/ml/mlflow_spec.rb10
-rw-r--r--spec/requests/api/nuget_group_packages_spec.rb40
-rw-r--r--spec/requests/api/pages_domains_spec.rb3
-rw-r--r--spec/requests/api/project_debian_distributions_spec.rb25
-rw-r--r--spec/requests/api/project_export_spec.rb14
-rw-r--r--spec/requests/api/projects_spec.rb14
-rw-r--r--spec/requests/api/release/links_spec.rb27
-rw-r--r--spec/requests/api/releases_spec.rb28
-rw-r--r--spec/requests/api/repositories_spec.rb12
-rw-r--r--spec/requests/api/rubygem_packages_spec.rb26
-rw-r--r--spec/requests/api/search_spec.rb20
-rw-r--r--spec/requests/api/settings_spec.rb64
-rw-r--r--spec/requests/api/snippet_repository_storage_moves_spec.rb2
-rw-r--r--spec/requests/api/suggestions_spec.rb2
-rw-r--r--spec/requests/api/todos_spec.rb32
-rw-r--r--spec/requests/api/users_spec.rb277
-rw-r--r--spec/requests/dashboard_controller_spec.rb28
-rw-r--r--spec/requests/groups/observability_controller_spec.rb19
-rw-r--r--spec/requests/groups/usage_quotas_controller_spec.rb2
-rw-r--r--spec/requests/openid_connect_spec.rb1
-rw-r--r--spec/requests/projects/issues_controller_spec.rb47
-rw-r--r--spec/requests/projects/merge_requests/content_spec.rb2
-rw-r--r--spec/requests/projects/merge_requests/context_commit_diffs_spec.rb3
-rw-r--r--spec/requests/projects/merge_requests/creations_spec.rb17
-rw-r--r--spec/requests/projects/merge_requests/diffs_spec.rb14
-rw-r--r--spec/requests/projects/merge_requests_controller_spec.rb76
-rw-r--r--spec/requests/projects/ml/candidates_controller_spec.rb8
-rw-r--r--spec/requests/projects/ml/experiments_controller_spec.rb51
-rw-r--r--spec/requests/projects_controller_spec.rb2
-rw-r--r--spec/requests/pwa_controller_spec.rb20
-rw-r--r--spec/requests/rack_attack_global_spec.rb10
-rw-r--r--spec/requests/users_controller_spec.rb12
-rw-r--r--spec/routing/group_routing_spec.rb8
-rw-r--r--spec/rubocop/check_graceful_task_spec.rb2
-rw-r--r--spec/rubocop/cop/background_migration/feature_category_spec.rb71
-rw-r--r--spec/rubocop/cop/gitlab/strong_memoize_attr_spec.rb36
-rw-r--r--spec/rubocop/cop/lint/last_keyword_argument_spec.rb10
-rw-r--r--spec/scripts/trigger-build_spec.rb24
-rw-r--r--spec/serializers/ci/downloadable_artifact_entity_spec.rb5
-rw-r--r--spec/serializers/ci/pipeline_entity_spec.rb10
-rw-r--r--spec/serializers/diffs_entity_spec.rb7
-rw-r--r--spec/serializers/diffs_metadata_entity_spec.rb7
-rw-r--r--spec/serializers/group_link/project_group_link_entity_spec.rb27
-rw-r--r--spec/serializers/merge_requests/pipeline_entity_spec.rb10
-rw-r--r--spec/serializers/paginated_diff_entity_spec.rb7
-rw-r--r--spec/serializers/project_mirror_entity_spec.rb4
-rw-r--r--spec/serializers/stage_entity_spec.rb2
-rw-r--r--spec/services/achievements/create_service_spec.rb46
-rw-r--r--spec/services/audit_event_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb2
-rw-r--r--spec/services/bulk_imports/create_service_spec.rb295
-rw-r--r--spec/services/bulk_imports/get_importable_data_service_spec.rb14
-rw-r--r--spec/services/chat_names/authorize_user_service_spec.rb5
-rw-r--r--spec/services/ci/create_downstream_pipeline_service_spec.rb77
-rw-r--r--spec/services/ci/create_pipeline_service/cache_spec.rb13
-rw-r--r--spec/services/ci/create_pipeline_service/include_spec.rb22
-rw-r--r--spec/services/ci/create_pipeline_service/logger_spec.rb69
-rw-r--r--spec/services/ci/create_pipeline_service/rules_spec.rb5
-rw-r--r--spec/services/ci/create_pipeline_service/variables_spec.rb37
-rw-r--r--spec/services/ci/create_pipeline_service_spec.rb33
-rw-r--r--spec/services/ci/job_artifacts/create_service_spec.rb76
-rw-r--r--spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb9
-rw-r--r--spec/services/ci/job_artifacts/destroy_associations_service_spec.rb35
-rw-r--r--spec/services/ci/job_artifacts/destroy_batch_service_spec.rb64
-rw-r--r--spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb23
-rw-r--r--spec/services/clusters/aws/authorize_role_service_spec.rb102
-rw-r--r--spec/services/clusters/aws/fetch_credentials_service_spec.rb139
-rw-r--r--spec/services/clusters/aws/finalize_creation_service_spec.rb124
-rw-r--r--spec/services/clusters/aws/provision_service_spec.rb130
-rw-r--r--spec/services/clusters/aws/verify_provision_status_service_spec.rb76
-rw-r--r--spec/services/clusters/create_service_spec.rb1
-rw-r--r--spec/services/clusters/gcp/fetch_operation_service_spec.rb45
-rw-r--r--spec/services/clusters/gcp/finalize_creation_service_spec.rb161
-rw-r--r--spec/services/clusters/gcp/provision_service_spec.rb71
-rw-r--r--spec/services/clusters/gcp/verify_provision_status_service_spec.rb111
-rw-r--r--spec/services/database/consistency_check_service_spec.rb2
-rw-r--r--spec/services/design_management/save_designs_service_spec.rb23
-rw-r--r--spec/services/discussions/resolve_service_spec.rb14
-rw-r--r--spec/services/discussions/unresolve_service_spec.rb29
-rw-r--r--spec/services/draft_notes/publish_service_spec.rb8
-rw-r--r--spec/services/environments/stop_stale_service_spec.rb49
-rw-r--r--spec/services/feature_flags/create_service_spec.rb2
-rw-r--r--spec/services/feature_flags/destroy_service_spec.rb2
-rw-r--r--spec/services/feature_flags/update_service_spec.rb2
-rw-r--r--spec/services/files/base_service_spec.rb59
-rw-r--r--spec/services/groups/import_export/export_service_spec.rb10
-rw-r--r--spec/services/groups/import_export/import_service_spec.rb388
-rw-r--r--spec/services/groups/transfer_service_spec.rb37
-rw-r--r--spec/services/groups/update_shared_runners_service_spec.rb28
-rw-r--r--spec/services/ide/schemas_config_service_spec.rb34
-rw-r--r--spec/services/import/github/gists_import_service_spec.rb26
-rw-r--r--spec/services/import/github_service_spec.rb56
-rw-r--r--spec/services/issue_links/create_service_spec.rb4
-rw-r--r--spec/services/issues/close_service_spec.rb2
-rw-r--r--spec/services/issues/export_csv_service_spec.rb2
-rw-r--r--spec/services/issues/update_service_spec.rb2
-rw-r--r--spec/services/lfs/file_transformer_spec.rb38
-rw-r--r--spec/services/members/destroy_service_spec.rb102
-rw-r--r--spec/services/members/update_service_spec.rb44
-rw-r--r--spec/services/merge_requests/base_service_spec.rb68
-rw-r--r--spec/services/merge_requests/rebase_service_spec.rb39
-rw-r--r--spec/services/merge_requests/refresh_service_spec.rb6
-rw-r--r--spec/services/merge_requests/update_service_spec.rb26
-rw-r--r--spec/services/ml/experiment_tracking/candidate_repository_spec.rb22
-rw-r--r--spec/services/notes/create_service_spec.rb18
-rw-r--r--spec/services/notification_service_spec.rb9
-rw-r--r--spec/services/packages/conan/search_service_spec.rb2
-rw-r--r--spec/services/pages_domains/create_service_spec.rb3
-rw-r--r--spec/services/pages_domains/delete_service_spec.rb3
-rw-r--r--spec/services/pages_domains/retry_acme_order_service_spec.rb2
-rw-r--r--spec/services/pages_domains/update_service_spec.rb3
-rw-r--r--spec/services/personal_access_tokens/revoke_service_spec.rb14
-rw-r--r--spec/services/projects/create_service_spec.rb5
-rw-r--r--spec/services/projects/import_service_spec.rb22
-rw-r--r--spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb20
-rw-r--r--spec/services/projects/transfer_service_spec.rb4
-rw-r--r--spec/services/repositories/changelog_service_spec.rb2
-rw-r--r--spec/services/search_service_spec.rb28
-rw-r--r--spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb20
-rw-r--r--spec/services/security/ci_configuration/sast_create_service_spec.rb2
-rw-r--r--spec/services/service_ping/submit_service_ping_service_spec.rb13
-rw-r--r--spec/services/service_response_spec.rb36
-rw-r--r--spec/services/test_hooks/project_service_spec.rb18
-rw-r--r--spec/services/test_hooks/system_service_spec.rb4
-rw-r--r--spec/services/todo_service_spec.rb129
-rw-r--r--spec/services/users/block_service_spec.rb11
-rw-r--r--spec/services/users/signup_service_spec.rb20
-rw-r--r--spec/services/users/unblock_service_spec.rb45
-rw-r--r--spec/services/work_items/create_service_spec.rb2
-rw-r--r--spec/services/work_items/parent_links/create_service_spec.rb2
-rw-r--r--spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb4
-rw-r--r--spec/simplecov_env.rb1
-rw-r--r--spec/spec_helper.rb4
-rw-r--r--spec/support/caching.rb6
-rw-r--r--spec/support/capybara.rb13
-rw-r--r--spec/support/helpers/api_helpers.rb9
-rw-r--r--spec/support/helpers/cycle_analytics_helpers.rb2
-rw-r--r--spec/support/helpers/database/database_helpers.rb30
-rw-r--r--spec/support/helpers/database/table_schema_helpers.rb4
-rw-r--r--spec/support/helpers/features/members_helpers.rb16
-rw-r--r--spec/support/helpers/features/web_ide_spec_helpers.rb10
-rw-r--r--spec/support/helpers/listbox_helpers.rb24
-rw-r--r--spec/support/helpers/listbox_input_helper.rb18
-rw-r--r--spec/support/helpers/login_helpers.rb2
-rw-r--r--spec/support/helpers/migrations_helpers.rb6
-rw-r--r--spec/support/helpers/navbar_structure_helper.rb5
-rw-r--r--spec/support/helpers/query_recorder.rb4
-rw-r--r--spec/support/helpers/usage_data_helpers.rb5
-rw-r--r--spec/support/matchers/be_boolean.rb10
-rw-r--r--spec/support/matchers/exceed_query_limit.rb29
-rw-r--r--spec/support/redis/redis_helpers.rb7
-rw-r--r--spec/support/redis/redis_new_instance_shared_examples.rb60
-rw-r--r--spec/support/redis/redis_shared_examples.rb76
-rw-r--r--spec/support/rspec_order_todo.yml252
-rw-r--r--spec/support/services/clusters/create_service_shared.rb5
-rw-r--r--spec/support/shared_contexts/bulk_imports_requests_shared_context.rb2
-rw-r--r--spec/support/shared_contexts/features/integrations/integrations_shared_context.rb8
-rw-r--r--spec/support/shared_contexts/markdown_golden_master_shared_examples.rb132
-rw-r--r--spec/support/shared_contexts/navbar_structure_context.rb43
-rw-r--r--spec/support/shared_contexts/policies/group_policy_shared_context.rb3
-rw-r--r--spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb28
-rw-r--r--spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb121
-rw-r--r--spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb26
-rw-r--r--spec/support/shared_examples/features/code_highlight_shared_examples.rb13
-rw-r--r--spec/support/shared_examples/features/content_editor_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb21
-rw-r--r--spec/support/shared_examples/features/reportable_note_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/finders/issues_finder_shared_examples.rb18
-rw-r--r--spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/lib/sidebars/your_work/menus/menu_item_examples.rb38
-rw-r--r--spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb75
-rw-r--r--spec/support/shared_examples/models/concerns/integrations/reset_secret_fields_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/models/member_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/models/members_notifications_shared_example.rb2
-rw-r--r--spec/support/shared_examples/models/relative_positioning_shared_examples.rb3
-rw-r--r--spec/support/shared_examples/models/resource_event_shared_examples.rb12
-rw-r--r--spec/support/shared_examples/models/update_project_statistics_shared_examples.rb10
-rw-r--r--spec/support/shared_examples/namespaces/members.rb13
-rw-r--r--spec/support/shared_examples/observability/csp_shared_examples.rb47
-rw-r--r--spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb4
-rw-r--r--spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb28
-rw-r--r--spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb33
-rw-r--r--spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb32
-rw-r--r--spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb38
-rw-r--r--spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb20
-rw-r--r--spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb2
-rw-r--r--spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb7
-rw-r--r--spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb50
-rw-r--r--spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb62
-rw-r--r--spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb24
-rw-r--r--spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb108
-rw-r--r--spec/support/shared_examples/workers/update_repository_move_shared_examples.rb4
-rw-r--r--spec/support/tmpdir.rb18
-rw-r--r--spec/support_specs/license_metadata_tags_spec.rb31
-rw-r--r--spec/tasks/cache/clear/redis_spec.rb41
-rw-r--r--spec/tasks/gitlab/backup_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/db/lock_writes_rake_spec.rb39
-rw-r--r--spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb8
-rw-r--r--spec/tasks/gitlab/db_rake_spec.rb2
-rw-r--r--spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb56
-rw-r--r--spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb43
-rw-r--r--spec/tooling/danger/specs_spec.rb33
-rw-r--r--spec/tooling/danger/user_types_spec.rb56
-rw-r--r--spec/tooling/lib/tooling/view_to_js_mappings_spec.rb356
-rw-r--r--spec/tooling/quality/test_level_spec.rb36
-rw-r--r--spec/uploaders/object_storage_spec.rb12
-rw-r--r--spec/views/admin/application_settings/_repository_check.html.haml_spec.rb21
-rw-r--r--spec/views/admin/application_settings/general.html.haml_spec.rb6
-rw-r--r--spec/views/admin/broadcast_messages/index.html.haml_spec.rb52
-rw-r--r--spec/views/admin/dashboard/index.html.haml_spec.rb10
-rw-r--r--spec/views/errors/omniauth_error.html.haml_spec.rb6
-rw-r--r--spec/views/groups/edit.html.haml_spec.rb8
-rw-r--r--spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb18
-rw-r--r--spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb2
-rw-r--r--spec/views/layouts/snippets.html.haml_spec.rb48
-rw-r--r--spec/views/profiles/notifications/show.html.haml_spec.rb25
-rw-r--r--spec/views/profiles/preferences/show.html.haml_spec.rb5
-rw-r--r--spec/views/projects/_files.html.haml_spec.rb73
-rw-r--r--spec/views/projects/commit/show.html.haml_spec.rb7
-rw-r--r--spec/views/projects/edit.html.haml_spec.rb6
-rw-r--r--spec/views/registrations/welcome/show.html.haml_spec.rb4
-rw-r--r--spec/views/search/_results.html.haml_spec.rb9
-rw-r--r--spec/views/search/show.html.haml_spec.rb2
-rw-r--r--spec/views/shared/projects/_list.html.haml_spec.rb2
-rw-r--r--spec/workers/ci/build_finished_worker_spec.rb6
-rw-r--r--spec/workers/ci/initial_pipeline_process_worker_spec.rb58
-rw-r--r--spec/workers/cluster_provision_worker_spec.rb47
-rw-r--r--spec/workers/counters/cleanup_refresh_worker_spec.rb42
-rw-r--r--spec/workers/every_sidekiq_worker_spec.rb1
-rw-r--r--spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb35
-rw-r--r--spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb2
-rw-r--r--spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb3
-rw-r--r--spec/workers/merge_requests/create_pipeline_worker_spec.rb6
-rw-r--r--spec/workers/pages/invalidate_domain_cache_worker_spec.rb66
-rw-r--r--spec/workers/personal_access_tokens/expired_notification_worker_spec.rb4
-rw-r--r--spec/workers/pipeline_schedule_worker_spec.rb29
-rw-r--r--spec/workers/projects/delete_branch_worker_spec.rb26
-rw-r--r--spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb41
-rw-r--r--spec/workers/projects/git_garbage_collect_worker_spec.rb5
-rw-r--r--spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb32
-rw-r--r--spec/workers/repository_import_worker_spec.rb104
-rw-r--r--spec/workers/run_pipeline_schedule_worker_spec.rb101
-rw-r--r--spec/workers/wait_for_cluster_creation_worker_spec.rb47
-rw-r--r--spec/workers/wikis/git_garbage_collect_worker_spec.rb2
1252 files changed, 25603 insertions, 14425 deletions
diff --git a/spec/commands/metrics_server/metrics_server_spec.rb b/spec/commands/metrics_server/metrics_server_spec.rb
index f93be1d9f88..310e31da045 100644
--- a/spec/commands/metrics_server/metrics_server_spec.rb
+++ b/spec/commands/metrics_server/metrics_server_spec.rb
@@ -70,7 +70,8 @@ RSpec.describe 'GitLab metrics server', :aggregate_failures do
before do
if use_golang_server
stub_env('GITLAB_GOLANG_METRICS_SERVER', '1')
- allow(Settings).to receive(:monitoring).and_return(config.dig('test', 'monitoring'))
+ allow(Settings).to receive(:monitoring).and_return(
+ Settingslogic.new(config.dig('test', 'monitoring')))
else
config_file.write(YAML.dump(config))
config_file.close
diff --git a/spec/commands/sidekiq_cluster/cli_spec.rb b/spec/commands/sidekiq_cluster/cli_spec.rb
index c2ea9455de6..0c32fa2571a 100644
--- a/spec/commands/sidekiq_cluster/cli_spec.rb
+++ b/spec/commands/sidekiq_cluster/cli_spec.rb
@@ -245,9 +245,15 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
it 'expands multiple queue groups correctly' do
expected_workers =
if Gitlab.ee?
- [%w[chat_notification], %w[project_export projects_import_export_parallel_project_export projects_import_export_relation_export project_template_export]]
+ [
+ %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident status_page_publish],
+ %w[project_export projects_import_export_parallel_project_export projects_import_export_relation_export project_template_export]
+ ]
else
- [%w[chat_notification], %w[project_export projects_import_export_parallel_project_export projects_import_export_relation_export]]
+ [
+ %w[cronjob:clusters_integrations_check_prometheus_health incident_management_close_incident],
+ %w[project_export projects_import_export_parallel_project_export projects_import_export_relation_export]
+ ]
end
expect(Gitlab::SidekiqCluster)
@@ -255,7 +261,7 @@ RSpec.describe Gitlab::SidekiqCluster::CLI, stub_settings_source: true do # rubo
.with(expected_workers, default_options)
.and_return([])
- cli.run(%w(--queue-selector feature_category=chatops&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
+ cli.run(%w(--queue-selector feature_category=incident_management&has_external_dependencies=true resource_boundary=memory&feature_category=importers))
end
it 'allows the special * selector' do
diff --git a/spec/components/previews/pajamas/button_component_preview.rb b/spec/components/previews/pajamas/button_component_preview.rb
index c07d898d9cd..13a04dc0d63 100644
--- a/spec/components/previews/pajamas/button_component_preview.rb
+++ b/spec/components/previews/pajamas/button_component_preview.rb
@@ -3,7 +3,7 @@ module Pajamas
class ButtonComponentPreview < ViewComponent::Preview
# Button
# ----
- # See its design reference [here](https://design.gitlab.com/components/banner).
+ # See its design reference [here](https://design.gitlab.com/components/button).
#
# @param category select {{ Pajamas::ButtonComponent::CATEGORY_OPTIONS }}
# @param variant select {{ Pajamas::ButtonComponent::VARIANT_OPTIONS }}
@@ -13,7 +13,7 @@ module Pajamas
# @param loading toggle
# @param block toggle
# @param selected toggle
- # @param icon text
+ # @param icon select [~, star-o, issue-closed, tanuki]
# @param text text
def default( # rubocop:disable Metrics/ParameterLists
category: :primary,
@@ -24,7 +24,7 @@ module Pajamas
loading: false,
block: false,
selected: false,
- icon: "pencil",
+ icon: nil,
text: "Edit"
)
render(Pajamas::ButtonComponent.new(
diff --git a/spec/config/inject_enterprise_edition_module_spec.rb b/spec/config/inject_enterprise_edition_module_spec.rb
index 47cb36c569e..e8c0905ff89 100644
--- a/spec/config/inject_enterprise_edition_module_spec.rb
+++ b/spec/config/inject_enterprise_edition_module_spec.rb
@@ -2,7 +2,7 @@
require 'fast_spec_helper'
-RSpec.describe InjectEnterpriseEditionModule, feature_category: :fulfillment_developer_productivity do
+RSpec.describe InjectEnterpriseEditionModule, feature_category: :not_owned do
let(:extension_name) { 'FF' }
let(:extension_namespace) { Module.new }
let(:fish_name) { 'Fish' }
diff --git a/spec/config/mail_room_spec.rb b/spec/config/mail_room_spec.rb
index cf2146bdf77..a3806fb3cb6 100644
--- a/spec/config/mail_room_spec.rb
+++ b/spec/config/mail_room_spec.rb
@@ -27,11 +27,6 @@ RSpec.describe 'mail_room.yml', feature_category: :service_desk do
before do
stub_env('GITLAB_REDIS_QUEUES_CONFIG_FILE', absolute_path(queues_config_path))
- redis_clear_raw_config!(Gitlab::Redis::Queues)
- end
-
- after do
- redis_clear_raw_config!(Gitlab::Redis::Queues)
end
context 'when incoming email is disabled' do
@@ -57,6 +52,7 @@ RSpec.describe 'mail_room.yml', feature_category: :service_desk do
password: '[REDACTED]',
name: 'inbox',
idle_timeout: 60,
+ delete_after_delivery: true,
expunge_deleted: true
}
expected_options = {
@@ -81,7 +77,7 @@ RSpec.describe 'mail_room.yml', feature_category: :service_desk do
email: 'gitlab-incoming@gmail.com',
name: 'inbox',
idle_timeout: 60,
- expunge_deleted: true
+ delete_after_delivery: false
}
expected_options = {
redis_url: gitlab_redis_queues.url,
diff --git a/spec/contracts/provider/helpers/contract_source_helper.rb b/spec/contracts/provider/helpers/contract_source_helper.rb
index 5fc2e3ffc0d..f59f228722d 100644
--- a/spec/contracts/provider/helpers/contract_source_helper.rb
+++ b/spec/contracts/provider/helpers/contract_source_helper.rb
@@ -4,18 +4,22 @@ module Provider
module ContractSourceHelper
QA_PACT_BROKER_HOST = "http://localhost:9292/pacts"
PREFIX_PATHS = {
- rake: "../../../contracts/contracts/project",
+ rake: {
+ ce: "../../contracts/project",
+ ee: "../../../../ee/spec/contracts/contracts/project"
+ },
spec: "../contracts/project"
}.freeze
SUB_PATH_REGEX = %r{project/(?<file_path>.*?)_helper.rb}.freeze
class << self
- def contract_location(requester, file_path)
+ def contract_location(requester:, file_path:, edition: :ce)
raise ArgumentError, 'requester must be :rake or :spec' unless [:rake, :spec].include? requester
+ raise ArgumentError, 'edition must be :ce or :ee' unless [:ce, :ee].include? edition
relevant_path = file_path.match(SUB_PATH_REGEX)[:file_path].split('/')
- ENV["PACT_BROKER"] ? pact_broker_url(relevant_path) : local_contract_location(requester, relevant_path)
+ ENV["PACT_BROKER"] ? pact_broker_url(relevant_path) : local_contract_location(requester, relevant_path, edition)
end
def pact_broker_url(file_path)
@@ -36,9 +40,10 @@ module Provider
"#{file_path[0].split('_').map(&:capitalize).join}%23#{file_path[1]}"
end
- def local_contract_location(requester, file_path)
+ def local_contract_location(requester, file_path, edition)
contract_path = construct_local_contract_path(file_path)
- prefix_path = requester == :rake ? File.expand_path(PREFIX_PATHS[requester], __dir__) : PREFIX_PATHS[requester]
+ prefix_path = PREFIX_PATHS[requester]
+ prefix_path = File.expand_path(prefix_path[edition], __dir__) if requester == :rake
"#{prefix_path}#{contract_path}"
end
diff --git a/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_batch_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_batch_helper.rb
index aa97a07c07b..2d7486562c2 100644
--- a/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_batch_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_batch_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "MergeRequests#show" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_metadata_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_metadata_helper.rb
index 891585b0066..4cb358f6e32 100644
--- a/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_metadata_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_diffs_metadata_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "MergeRequests#show" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_discussions_helper.rb b/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_discussions_helper.rb
index 229818366ca..4dea90fc6b7 100644
--- a/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_discussions_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/merge_requests/show/get_discussions_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "MergeRequests#show" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider/pact_helpers/project/pipeline_schedules/edit/put_edit_a_pipeline_schedule_helper.rb b/spec/contracts/provider/pact_helpers/project/pipeline_schedules/edit/put_edit_a_pipeline_schedule_helper.rb
index 62702fd5f92..1d9c1331d3b 100644
--- a/spec/contracts/provider/pact_helpers/project/pipeline_schedules/edit/put_edit_a_pipeline_schedule_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/pipeline_schedules/edit/put_edit_a_pipeline_schedule_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "PipelineSchedules#edit" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider/pact_helpers/project/pipelines/index/get_list_project_pipelines_helper.rb b/spec/contracts/provider/pact_helpers/project/pipelines/index/get_list_project_pipelines_helper.rb
index 03708db2eb2..2263723b123 100644
--- a/spec/contracts/provider/pact_helpers/project/pipelines/index/get_list_project_pipelines_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/pipelines/index/get_list_project_pipelines_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "Pipelines#index" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider/pact_helpers/project/pipelines/new/post_create_a_new_pipeline_helper.rb b/spec/contracts/provider/pact_helpers/project/pipelines/new/post_create_a_new_pipeline_helper.rb
index 53e5ab61a20..8c2b0278ad1 100644
--- a/spec/contracts/provider/pact_helpers/project/pipelines/new/post_create_a_new_pipeline_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/pipelines/new/post_create_a_new_pipeline_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "Pipelines#new" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider/pact_helpers/project/pipelines/show/delete_pipeline_helper.rb b/spec/contracts/provider/pact_helpers/project/pipelines/show/delete_pipeline_helper.rb
index 1801e989c99..01b57388d70 100644
--- a/spec/contracts/provider/pact_helpers/project/pipelines/show/delete_pipeline_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/pipelines/show/delete_pipeline_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "Pipelines#show" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider/pact_helpers/project/pipelines/show/get_pipeline_header_data_helper.rb b/spec/contracts/provider/pact_helpers/project/pipelines/show/get_pipeline_header_data_helper.rb
index 1f3ba9dd007..aac8d25dbd1 100644
--- a/spec/contracts/provider/pact_helpers/project/pipelines/show/get_pipeline_header_data_helper.rb
+++ b/spec/contracts/provider/pact_helpers/project/pipelines/show/get_pipeline_header_data_helper.rb
@@ -11,7 +11,7 @@ module Provider
app { Environments::Test.app }
honours_pact_with "Pipelines#show" do
- pact_uri Provider::ContractSourceHelper.contract_location(:spec, __FILE__)
+ pact_uri Provider::ContractSourceHelper.contract_location(requester: :spec, file_path: __FILE__)
end
Provider::PublishContractHelper.publish_contract_setup.call(
diff --git a/spec/contracts/provider_specs/helpers/provider/contract_source_helper_spec.rb b/spec/contracts/provider_specs/helpers/provider/contract_source_helper_spec.rb
index 8bb3b577135..39537aa153d 100644
--- a/spec/contracts/provider_specs/helpers/provider/contract_source_helper_spec.rb
+++ b/spec/contracts/provider_specs/helpers/provider/contract_source_helper_spec.rb
@@ -11,21 +11,26 @@ RSpec.describe Provider::ContractSourceHelper, feature_category: :not_owned do
describe '#contract_location' do
it 'raises an error when an invalid requester is given' do
- expect { subject.contract_location(:foo, pact_helper_path) }
+ expect { subject.contract_location(requester: :foo, file_path: pact_helper_path) }
.to raise_error(ArgumentError, 'requester must be :rake or :spec')
end
+ it 'raises an error when an invalid edition is given' do
+ expect { subject.contract_location(requester: :spec, file_path: pact_helper_path, edition: :zz) }
+ .to raise_error(ArgumentError, 'edition must be :ce or :ee')
+ end
+
context 'when the PACT_BROKER environment variable is not set' do
it 'extracts the relevant path from the pact_helper path' do
- expect(subject).to receive(:local_contract_location).with(:rake, split_pact_helper_path)
+ expect(subject).to receive(:local_contract_location).with(:rake, split_pact_helper_path, :ce)
- subject.contract_location(:rake, pact_helper_path)
+ subject.contract_location(requester: :rake, file_path: pact_helper_path)
end
it 'does not construct the pact broker url' do
expect(subject).not_to receive(:pact_broker_url)
- subject.contract_location(:rake, pact_helper_path)
+ subject.contract_location(requester: :rake, file_path: pact_helper_path)
end
end
@@ -37,13 +42,13 @@ RSpec.describe Provider::ContractSourceHelper, feature_category: :not_owned do
it 'extracts the relevant path from the pact_helper path' do
expect(subject).to receive(:pact_broker_url).with(split_pact_helper_path)
- subject.contract_location(:spec, pact_helper_path)
+ subject.contract_location(requester: :spec, file_path: pact_helper_path)
end
it 'does not construct the pact broker url' do
expect(subject).not_to receive(:local_contract_location)
- subject.contract_location(:spec, pact_helper_path)
+ subject.contract_location(requester: :spec, file_path: pact_helper_path)
end
end
end
@@ -51,7 +56,7 @@ RSpec.describe Provider::ContractSourceHelper, feature_category: :not_owned do
describe '#pact_broker_url' do
it 'returns the full url to the contract that the provider test is verifying' do
contract_url_path = "http://localhost:9292/pacts/provider/" \
- "#{provider_url_path}/consumer/#{consumer_url_path}/latest"
+ "#{provider_url_path}/consumer/#{consumer_url_path}/latest"
expect(subject.pact_broker_url(split_pact_helper_path)).to eq(contract_url_path)
end
@@ -73,7 +78,7 @@ RSpec.describe Provider::ContractSourceHelper, feature_category: :not_owned do
it 'returns the contract file path with the prefix path for a rake task' do
rake_task_relative_path = '/spec/contracts/contracts/project'
- rake_task_path = subject.local_contract_location(:rake, split_pact_helper_path)
+ rake_task_path = subject.local_contract_location(:rake, split_pact_helper_path, :ce)
expect(rake_task_path).to include(rake_task_relative_path)
expect(rake_task_path).not_to include('../')
@@ -82,7 +87,7 @@ RSpec.describe Provider::ContractSourceHelper, feature_category: :not_owned do
it 'returns the contract file path with the prefix path for a spec' do
spec_relative_path = '../contracts/project'
- expect(subject.local_contract_location(:spec, split_pact_helper_path)).to include(spec_relative_path)
+ expect(subject.local_contract_location(:spec, split_pact_helper_path, :ce)).to include(spec_relative_path)
end
end
diff --git a/spec/contracts/publish-contracts.sh b/spec/contracts/publish-contracts.sh
index f20cc43e258..8b9d4b6ecc6 100644
--- a/spec/contracts/publish-contracts.sh
+++ b/spec/contracts/publish-contracts.sh
@@ -2,22 +2,50 @@ LATEST_SHA=$(git rev-parse HEAD)
GIT_BRANCH=$(git rev-parse --abbrev-ref HEAD)
BROKER_BASE_URL="http://localhost:9292"
-CONTRACTS=$(find ./contracts -name "*.json")
-ERROR=0
-
-trap 'catch' ERR
+cd "${0%/*}" || exit 1
function catch() {
printf "\e[31mAn error occured while trying to publish the pact.\033[0m\n"
ERROR=1
}
-for contract in $CONTRACTS
-do
- printf "\e[32mPublishing ${contract}...\033[0m\n"
- pact-broker publish $contract --consumer-app-version $LATEST_SHA --branch $GIT_BRANCH --broker-base-url $BROKER_BASE_URL --output json
-done
+function publish_contract () {
+ CONTRACTS=$(find ./contracts -name "*.json")
+ ERROR=0
+
+ trap 'catch' ERR
+
+ for contract in $CONTRACTS
+ do
+ printf "\e[32mPublishing %s...\033[0m\n" "$contract"
+ pact-broker publish "$contract" --consumer-app-version "$LATEST_SHA" --branch "$GIT_BRANCH" --broker-base-url "$BROKER_BASE_URL" --output json
+ done
+
+ if [ ${ERROR} = 1 ]; then
+ exit 1;
+ fi
+}
+
+function publish_ce_contracts () {
+ publish_contract
+}
+
+function publish_ee_contracts () {
+ cd "../../ee/spec/contracts" || exit 1
+ publish_contract
+}
-if [ ${ERROR} = 1 ]; then
+if [ $1 = "ce" ]; then
+ printf "\e[32mPublishing CE contracts...\033[0m\n"
+ publish_ce_contracts
+elif [ $1 = "ee" ]; then
+ printf "\e[32mPublishing EE contracts...\033[0m\n"
+ publish_ee_contracts
+elif [ $1 = "all" ]; then
+ printf "\e[32mPublishing all contracts...\033[0m\n"
+ publish_ce_contracts
+ publish_ee_contracts
+else
+ printf "\e[31mInvalid argument. Please choose either \"ce\", \"ee\", or \"all\".\033[0m\n"
exit 1;
-fi \ No newline at end of file
+fi
diff --git a/spec/controllers/admin/application_settings/appearances_controller_spec.rb b/spec/controllers/admin/application_settings/appearances_controller_spec.rb
index 5978381a926..78dce4558c3 100644
--- a/spec/controllers/admin/application_settings/appearances_controller_spec.rb
+++ b/spec/controllers/admin/application_settings/appearances_controller_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe Admin::ApplicationSettings::AppearancesController do
let(:create_params) do
{
title: 'Foo',
- short_title: 'F',
+ pwa_short_name: 'F',
description: 'Bar',
header_message: header_message,
footer_message: footer_message
diff --git a/spec/controllers/admin/application_settings_controller_spec.rb b/spec/controllers/admin/application_settings_controller_spec.rb
index 49c40ecee8b..32ac0f8dc07 100644
--- a/spec/controllers/admin/application_settings_controller_spec.rb
+++ b/spec/controllers/admin/application_settings_controller_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
stub_env('IN_MEMORY_APPLICATION_SETTINGS', 'false')
end
- describe 'GET #integrations' do
+ describe 'GET #integrations', feature_category: :integrations do
before do
sign_in(admin)
end
@@ -46,7 +46,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
- describe 'GET #usage_data with no access' do
+ describe 'GET #usage_data with no access', feature_category: :service_ping do
before do
stub_usage_data_connections
sign_in(user)
@@ -59,7 +59,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
- describe 'GET #usage_data' do
+ describe 'GET #usage_data', feature_category: :service_ping do
before do
stub_usage_data_connections
stub_database_flavor_check
@@ -120,13 +120,6 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
sign_in(admin)
end
- it 'updates the require_admin_approval_after_user_signup setting' do
- put :update, params: { application_setting: { require_admin_approval_after_user_signup: true } }
-
- expect(response).to redirect_to(general_admin_application_settings_path)
- expect(ApplicationSetting.current.require_admin_approval_after_user_signup).to eq(true)
- end
-
it 'updates the password_authentication_enabled_for_git setting' do
put :update, params: { application_setting: { password_authentication_enabled_for_git: "0" } }
@@ -204,13 +197,6 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
expect(ApplicationSetting.current.default_branch_name).to eq("example_branch_name")
end
- it "updates admin_mode setting" do
- put :update, params: { application_setting: { admin_mode: true } }
-
- expect(response).to redirect_to(general_admin_application_settings_path)
- expect(ApplicationSetting.current.admin_mode).to be(true)
- end
-
it 'updates valid_runner_registrars setting' do
put :update, params: { application_setting: { valid_runner_registrars: ['project', ''] } }
@@ -218,11 +204,23 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
expect(ApplicationSetting.current.valid_runner_registrars).to eq(['project'])
end
- it 'updates can_create_group setting' do
- put :update, params: { application_setting: { can_create_group: false } }
+ context 'boolean attributes' do
+ shared_examples_for 'updates booolean attribute' do |attribute|
+ specify do
+ existing_value = ApplicationSetting.current.public_send(attribute)
+ new_value = !existing_value
- expect(response).to redirect_to(general_admin_application_settings_path)
- expect(ApplicationSetting.current.can_create_group).to eq(false)
+ put :update, params: { application_setting: { attribute => new_value } }
+
+ expect(response).to redirect_to(general_admin_application_settings_path)
+ expect(ApplicationSetting.current.public_send(attribute)).to eq(new_value)
+ end
+ end
+
+ it_behaves_like 'updates booolean attribute', :user_defaults_to_private_profile
+ it_behaves_like 'updates booolean attribute', :can_create_group
+ it_behaves_like 'updates booolean attribute', :admin_mode
+ it_behaves_like 'updates booolean attribute', :require_admin_approval_after_user_signup
end
context "personal access token prefix settings" do
@@ -402,7 +400,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
- describe 'PUT #reset_registration_token' do
+ describe 'PUT #reset_registration_token', feature_category: :credential_management do
before do
sign_in(admin)
end
@@ -420,7 +418,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
- describe 'PUT #reset_error_tracking_access_token' do
+ describe 'PUT #reset_error_tracking_access_token', feature_category: :error_tracking do
before do
sign_in(admin)
end
@@ -456,7 +454,7 @@ RSpec.describe Admin::ApplicationSettingsController, :do_not_mock_admin_mode_set
end
end
- describe 'GET #service_usage_data' do
+ describe 'GET #service_usage_data', feature_category: :service_ping do
before do
stub_usage_data_connections
stub_database_flavor_check
diff --git a/spec/controllers/admin/clusters_controller_spec.rb b/spec/controllers/admin/clusters_controller_spec.rb
index c432adb6ae3..86a4ac61194 100644
--- a/spec/controllers/admin/clusters_controller_spec.rb
+++ b/spec/controllers/admin/clusters_controller_spec.rb
@@ -159,8 +159,6 @@ RSpec.describe Admin::ClustersController do
describe 'functionality' do
context 'when creates a cluster' do
it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
expect { post_create_user }.to change { Clusters::Cluster.count }
.and change { Clusters::Platforms::Kubernetes.count }
@@ -187,8 +185,6 @@ RSpec.describe Admin::ClustersController do
end
it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
expect { post_create_user }.to change { Clusters::Cluster.count }
.and change { Clusters::Platforms::Kubernetes.count }
diff --git a/spec/controllers/concerns/check_rate_limit_spec.rb b/spec/controllers/concerns/check_rate_limit_spec.rb
index 75776acd520..25574aa295b 100644
--- a/spec/controllers/concerns/check_rate_limit_spec.rb
+++ b/spec/controllers/concerns/check_rate_limit_spec.rb
@@ -33,8 +33,8 @@ RSpec.describe CheckRateLimit do
end
describe '#check_rate_limit!' do
- it 'calls ApplicationRateLimiter#throttled? with the right arguments' do
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(key, scope: scope).and_return(false)
+ it 'calls ApplicationRateLimiter#throttled_request? with the right arguments' do
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled_request?).with(request, user, key, scope: scope).and_return(false)
expect(subject).not_to receive(:render)
subject.check_rate_limit!(key, scope: scope)
diff --git a/spec/controllers/concerns/content_security_policy_patch_spec.rb b/spec/controllers/concerns/content_security_policy_patch_spec.rb
new file mode 100644
index 00000000000..6322950977c
--- /dev/null
+++ b/spec/controllers/concerns/content_security_policy_patch_spec.rb
@@ -0,0 +1,116 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+# Based on https://github.com/rails/rails/pull/45115/files#diff-35ef6d1bd8b8d3b037ec819a704cd78db55db916a57abfc2859882826fc679b6
+RSpec.describe ContentSecurityPolicyPatch, feature_category: :not_owned do
+ include Rack::Test::Methods
+
+ let(:routes) do
+ ActionDispatch::Routing::RouteSet.new.tap do |routes|
+ routes.draw do
+ # Using Testing module defined below
+ scope module: "testing" do
+ get "/", to: "policy#index"
+ end
+ end
+ end
+ end
+
+ let(:csp) do
+ ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.default_src -> { :self }
+ p.script_src -> { :https }
+ end
+ end
+
+ let(:policy_middleware) do
+ Module.new do
+ def self.new(app, policy)
+ ->(env) do
+ env["action_dispatch.content_security_policy"] = policy
+
+ app.call(env)
+ end
+ end
+ end
+ end
+
+ subject(:app) do
+ build_app(routes) do |middleware|
+ middleware.use policy_middleware, csp
+ middleware.use ActionDispatch::ContentSecurityPolicy::Middleware
+ end
+ end
+
+ def setup_controller
+ application_controller = Class.new(ActionController::Base) do # rubocop:disable Rails/ApplicationController
+ helper_method :sky_is_blue?
+ def sky_is_blue?
+ true
+ end
+ end
+
+ policy_controller = Class.new(application_controller) do
+ extend ContentSecurityPolicyPatch
+
+ content_security_policy_with_context do |p|
+ p.default_src "https://example.com"
+ p.script_src "https://example.com" if helpers.sky_is_blue?
+ end
+
+ def index
+ head :ok
+ end
+ end
+
+ stub_const("Testing::ApplicationController", application_controller)
+ stub_const("Testing::PolicyController", policy_controller)
+ end
+
+ def build_app(routes)
+ stack = ActionDispatch::MiddlewareStack.new do |middleware|
+ middleware.use ActionDispatch::DebugExceptions
+ middleware.use ActionDispatch::ActionableExceptions
+ middleware.use ActionDispatch::Callbacks
+ middleware.use ActionDispatch::Cookies
+ middleware.use ActionDispatch::Flash
+ middleware.use Rack::MethodOverride
+ middleware.use Rack::Head
+
+ yield(middleware) if block_given?
+ end
+
+ app = stack.build(routes)
+
+ ->(env) { app.call(env) }
+ end
+
+ it "calls helper method" do
+ setup_controller
+
+ response = get "/"
+
+ csp_header = response.headers["Content-Security-Policy"]
+
+ expect(csp_header).to include "default-src https://example.com"
+ expect(csp_header).to include "script-src https://example.com"
+ end
+
+ it "does not emit any warnings" do
+ expect { setup_controller }.not_to output.to_stderr
+ end
+
+ context "with Rails version 7.2" do
+ before do
+ version = Gem::Version.new("7.2.0")
+ allow(Rails).to receive(:gem_version).and_return(version)
+ end
+
+ it "emits a deprecation warning" do
+ expect { setup_controller }
+ .to output(/Use content_security_policy instead/)
+ .to_stderr
+ end
+ end
+end
diff --git a/spec/controllers/groups/clusters_controller_spec.rb b/spec/controllers/groups/clusters_controller_spec.rb
index eb3fe4bc330..46f507c34ba 100644
--- a/spec/controllers/groups/clusters_controller_spec.rb
+++ b/spec/controllers/groups/clusters_controller_spec.rb
@@ -180,8 +180,6 @@ RSpec.describe Groups::ClustersController do
describe 'functionality' do
context 'when creates a cluster' do
it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
expect { go }.to change { Clusters::Cluster.count }
.and change { Clusters::Platforms::Kubernetes.count }
@@ -210,8 +208,6 @@ RSpec.describe Groups::ClustersController do
end
it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
expect { go }.to change { Clusters::Cluster.count }
.and change { Clusters::Platforms::Kubernetes.count }
diff --git a/spec/controllers/groups/imports_controller_spec.rb b/spec/controllers/groups/imports_controller_spec.rb
index 7372c2e9575..24dc33b2cf1 100644
--- a/spec/controllers/groups/imports_controller_spec.rb
+++ b/spec/controllers/groups/imports_controller_spec.rb
@@ -45,7 +45,7 @@ RSpec.describe Groups::ImportsController do
it 'sets a flash error' do
get :show, params: { group_id: group }
- expect(flash[:alert]).to eq 'Failed to import group.'
+ expect(flash[:alert]).to eq 'Failed to import group: '
end
end
diff --git a/spec/controllers/import/available_namespaces_controller_spec.rb b/spec/controllers/import/available_namespaces_controller_spec.rb
deleted file mode 100644
index 26ea1d92189..00000000000
--- a/spec/controllers/import/available_namespaces_controller_spec.rb
+++ /dev/null
@@ -1,109 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Import::AvailableNamespacesController do
- let_it_be(:user) { create(:user) }
-
- before do
- sign_in(user)
- end
-
- describe "GET index" do
- context "when having group with role never allowed to create projects" do
- using RSpec::Parameterized::TableSyntax
-
- where(
- role: [:guest, :reporter],
- default_project_creation_access: [::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS],
- group_project_creation_level: [nil, ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS])
-
- with_them do
- before do
- stub_application_setting(default_project_creation: default_project_creation_access)
- end
-
- it "does not include group with access level #{params[:role]} in list" do
- group = create(:group, project_creation_level: group_project_creation_level)
- group.add_member(user, role)
- get :index
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).not_to include({
- 'id' => group.id,
- 'full_path' => group.full_path
- })
- end
- end
- end
-
- context "when having group with role always allowed to create projects" do
- using RSpec::Parameterized::TableSyntax
-
- where(
- role: [:maintainer, :owner],
- default_project_creation_access: [::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS],
- group_project_creation_level: [nil, ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS, ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS])
-
- with_them do
- before do
- stub_application_setting(default_project_creation: default_project_creation_access)
- end
-
- it "does not include group with access level #{params[:role]} in list" do
- group = create(:group, project_creation_level: group_project_creation_level)
- group.add_member(user, role)
- get :index
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).to include({
- 'id' => group.id,
- 'full_path' => group.full_path
- })
- end
- end
- end
-
- context "when having developer role" do
- using RSpec::Parameterized::TableSyntax
-
- where(:default_project_creation_access, :project_creation_level, :is_visible) do
- ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | nil | false
- ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | true
- ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | nil | true
- ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS | ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS | false
- end
-
- with_them do
- before do
- stub_application_setting(default_project_creation: default_project_creation_access)
- end
-
- it "#{params[:is_visible] ? 'includes' : 'does not include'} group with access level #{params[:role]} in list" do
- group = create(:group, project_creation_level: project_creation_level)
- group.add_member(user, :developer)
-
- get :index
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response).send(is_visible ? 'to' : 'not_to', include({
- 'id' => group.id,
- 'full_path' => group.full_path
- }))
- end
- end
- end
-
- context "with an anonymous user" do
- before do
- sign_out(user)
- end
-
- it "redirects to sign-in page" do
- get :index
-
- expect(response).to redirect_to(new_user_session_path)
- end
- end
- end
-end
diff --git a/spec/controllers/import/bulk_imports_controller_spec.rb b/spec/controllers/import/bulk_imports_controller_spec.rb
index a0bb39f3e98..a0d5b576e74 100644
--- a/spec/controllers/import/bulk_imports_controller_spec.rb
+++ b/spec/controllers/import/bulk_imports_controller_spec.rb
@@ -2,10 +2,12 @@
require 'spec_helper'
-RSpec.describe Import::BulkImportsController do
+RSpec.describe Import::BulkImportsController, feature_category: :importers do
let_it_be(:user) { create(:user) }
before do
+ stub_application_setting(bulk_import_enabled: true)
+
sign_in(user)
end
@@ -16,6 +18,13 @@ RSpec.describe Import::BulkImportsController do
end
describe 'POST configure' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:validate_instance_version!).and_return(true)
+ allow(instance).to receive(:validate_import_scopes!).and_return(true)
+ end
+ end
+
context 'when no params are passed in' do
it 'clears out existing session' do
post :configure
@@ -28,8 +37,57 @@ RSpec.describe Import::BulkImportsController do
end
end
+ context 'when URL is invalid' do
+ it 'redirects to initial import page' do
+ token = 'token'
+ url = 'http://192.168.0.1'
+
+ post :configure, params: { bulk_import_gitlab_access_token: token, bulk_import_gitlab_url: url }
+
+ expect(response).to redirect_to new_group_path(anchor: 'import-group-pane')
+ expect(flash[:alert]).to include('Specified URL cannot be used')
+ end
+ end
+
+ context 'when token scope is invalid' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:validate_instance_version!).and_return(true)
+ allow(instance).to receive(:validate_import_scopes!).and_raise(BulkImports::Error.new('Error!'))
+ end
+ end
+
+ it 'redirects to initial import page' do
+ token = 'token'
+ url = 'https://gitlab.example'
+
+ post :configure, params: { bulk_import_gitlab_access_token: token, bulk_import_gitlab_url: url }
+
+ expect(response).to redirect_to new_group_path(anchor: 'import-group-pane')
+ expect(flash[:alert]).to include('Error!')
+ end
+ end
+
+ context 'when instance version is incompatible' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:validate_instance_version!).and_raise(BulkImports::Error.new('Error!'))
+ end
+ end
+
+ it 'redirects to initial import page' do
+ token = 'token'
+ url = 'https://gitlab.example'
+
+ post :configure, params: { bulk_import_gitlab_access_token: token, bulk_import_gitlab_url: url }
+
+ expect(response).to redirect_to new_group_path(anchor: 'import-group-pane')
+ expect(flash[:alert]).to include('Error!')
+ end
+ end
+
it 'sets the session variables' do
- token = 'token'
+ token = 'invalid token'
url = 'https://gitlab.example'
post :configure, params: { bulk_import_gitlab_access_token: token, bulk_import_gitlab_url: url }
@@ -100,6 +158,18 @@ RSpec.describe Import::BulkImportsController do
)
end
+ let(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ allow(instance).to receive(:instance_enterprise).and_return(false)
+ end
+ end
+
it 'returns serialized group data' do
get_status
@@ -201,8 +271,15 @@ RSpec.describe Import::BulkImportsController do
end
context 'when connection error occurs' do
+ let(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
before do
allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ allow(instance).to receive(:instance_enterprise).and_return(false)
allow(instance).to receive(:get).and_raise(BulkImports::Error)
end
end
@@ -326,9 +403,9 @@ RSpec.describe Import::BulkImportsController do
end
end
- context 'when bulk_import feature flag is disabled' do
+ context 'when feature is disabled' do
before do
- stub_feature_flags(bulk_import: false)
+ stub_application_setting(bulk_import_enabled: false)
end
context 'POST configure' do
diff --git a/spec/controllers/import/github_controller_spec.rb b/spec/controllers/import/github_controller_spec.rb
index a85af89b262..c1a61a78d80 100644
--- a/spec/controllers/import/github_controller_spec.rb
+++ b/spec/controllers/import/github_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Import::GithubController do
+RSpec.describe Import::GithubController, feature_category: :import do
include ImportSpecHelper
let(:provider) { :github }
@@ -138,7 +138,7 @@ RSpec.describe Import::GithubController do
it 'calls repos list from provider with expected args' do
expect_next_instance_of(Gitlab::GithubImport::Clients::Proxy) do |client|
expect(client).to receive(:repos)
- .with(expected_filter, expected_pagination_options)
+ .with(expected_filter, expected_options)
.and_return({ repos: [], page_info: {} })
end
@@ -155,11 +155,16 @@ RSpec.describe Import::GithubController do
let(:provider_token) { 'asdasd12345' }
let(:client_auth_success) { true }
let(:client_stub) { instance_double(Gitlab::GithubImport::Client, user: { login: 'user' }) }
- let(:expected_pagination_options) { pagination_params.merge(first: 25, page: 1, per_page: 25) }
- let(:expected_filter) { nil }
let(:params) { nil }
let(:pagination_params) { { before: nil, after: nil } }
+ let(:relation_params) { { relation_type: nil, organization_login: '' } }
let(:provider_repos) { [] }
+ let(:expected_filter) { '' }
+ let(:expected_options) do
+ pagination_params.merge(relation_params).merge(
+ first: 25, page: 1, per_page: 25
+ )
+ end
before do
allow_next_instance_of(Gitlab::GithubImport::Clients::Proxy) do |proxy|
@@ -277,8 +282,34 @@ RSpec.describe Import::GithubController do
context 'when page is specified' do
let(:pagination_params) { { before: nil, after: nil, page: 2 } }
- let(:expected_pagination_options) { pagination_params.merge(first: 25, page: 2, per_page: 25) }
let(:params) { pagination_params }
+ let(:expected_options) do
+ pagination_params.merge(relation_params).merge(first: 25, page: 2, per_page: 25)
+ end
+
+ it_behaves_like 'calls repos through Clients::Proxy with expected args'
+ end
+ end
+
+ context 'when relation type params present' do
+ let(:organization_login) { 'test-login' }
+ let(:params) { pagination_params.merge(relation_type: 'organization', organization_login: organization_login) }
+ let(:pagination_defaults) { { first: 25, page: 1, per_page: 25 } }
+ let(:expected_options) do
+ pagination_defaults.merge(pagination_params).merge(
+ relation_type: 'organization', organization_login: organization_login
+ )
+ end
+
+ it_behaves_like 'calls repos through Clients::Proxy with expected args'
+
+ context 'when organization_login is too long and with ":"' do
+ let(:organization_login) { ":#{Array.new(270) { ('a'..'z').to_a.sample }.join}" }
+ let(:expected_options) do
+ pagination_defaults.merge(pagination_params).merge(
+ relation_type: 'organization', organization_login: organization_login.slice(1, 254)
+ )
+ end
it_behaves_like 'calls repos through Clients::Proxy with expected args'
end
diff --git a/spec/controllers/import/phabricator_controller_spec.rb b/spec/controllers/import/phabricator_controller_spec.rb
index 9827a6d077c..9be85a40d82 100644
--- a/spec/controllers/import/phabricator_controller_spec.rb
+++ b/spec/controllers/import/phabricator_controller_spec.rb
@@ -14,25 +14,14 @@ RSpec.describe Import::PhabricatorController do
context 'when the import source is not available' do
before do
- stub_feature_flags(phabricator_import: true)
stub_application_setting(import_sources: [])
end
it { is_expected.to have_gitlab_http_status(:not_found) }
end
- context 'when the feature is disabled' do
+ context 'when the import source is available' do
before do
- stub_feature_flags(phabricator_import: false)
- stub_application_setting(import_sources: ['phabricator'])
- end
-
- it { is_expected.to have_gitlab_http_status(:not_found) }
- end
-
- context 'when the import is available' do
- before do
- stub_feature_flags(phabricator_import: true)
stub_application_setting(import_sources: ['phabricator'])
end
diff --git a/spec/controllers/projects/artifacts_controller_spec.rb b/spec/controllers/projects/artifacts_controller_spec.rb
index 00efd7d7b56..3d12926c07a 100644
--- a/spec/controllers/projects/artifacts_controller_spec.rb
+++ b/spec/controllers/projects/artifacts_controller_spec.rb
@@ -26,14 +26,34 @@ RSpec.describe Projects::ArtifactsController do
subject { get :index, params: { namespace_id: project.namespace, project_id: project } }
context 'when feature flag is on' do
+ render_views
+
before do
stub_feature_flags(artifacts_management_page: true)
end
- it 'renders the page' do
+ it 'renders the page with data for the artifacts app' do
subject
expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template('projects/artifacts/index')
+
+ app = Nokogiri::HTML.parse(response.body).at_css('div#js-artifact-management')
+
+ expect(app.attributes['data-project-path'].value).to eq(project.full_path)
+ expect(app.attributes['data-can-destroy-artifacts'].value).to eq('true')
+ end
+
+ describe 'when user does not have permission to delete artifacts' do
+ let(:user) { create(:user) }
+
+ it 'passes false to the artifacts app' do
+ subject
+
+ app = Nokogiri::HTML.parse(response.body).at_css('div#js-artifact-management')
+
+ expect(app.attributes['data-can-destroy-artifacts'].value).to eq('false')
+ end
end
end
@@ -423,6 +443,16 @@ RSpec.describe Projects::ArtifactsController do
end
end
+ context 'when artifacts archive is missing' do
+ let!(:job) { create(:ci_build, :success, pipeline: pipeline) }
+
+ it 'returns 404' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
context 'fetching an artifact of different type' do
before do
job.job_artifacts.each(&:destroy)
diff --git a/spec/controllers/projects/clusters_controller_spec.rb b/spec/controllers/projects/clusters_controller_spec.rb
index 12202518e1e..894f0f8354d 100644
--- a/spec/controllers/projects/clusters_controller_spec.rb
+++ b/spec/controllers/projects/clusters_controller_spec.rb
@@ -181,8 +181,6 @@ RSpec.describe Projects::ClustersController do
describe 'functionality' do
context 'when creates a cluster' do
it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
expect { go }.to change { Clusters::Cluster.count }
.and change { Clusters::Platforms::Kubernetes.count }
@@ -210,8 +208,6 @@ RSpec.describe Projects::ClustersController do
end
it 'creates a new cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
expect { go }.to change { Clusters::Cluster.count }
.and change { Clusters::Platforms::Kubernetes.count }
diff --git a/spec/controllers/projects/deploy_keys_controller_spec.rb b/spec/controllers/projects/deploy_keys_controller_spec.rb
index fd844808d81..ec63bad22b5 100644
--- a/spec/controllers/projects/deploy_keys_controller_spec.rb
+++ b/spec/controllers/projects/deploy_keys_controller_spec.rb
@@ -102,7 +102,7 @@ RSpec.describe Projects::DeployKeysController do
it 'shows an alert with the validations errors' do
post :create, params: create_params(nil)
- expect(flash[:alert]).to eq("Title can't be blank, Deploy keys projects deploy key title can't be blank")
+ expect(flash[:alert]).to eq("Title can't be blank")
end
end
@@ -126,8 +126,7 @@ RSpec.describe Projects::DeployKeysController do
it 'shows an alert with the validations errors' do
post :create, params: create_params
- expect(flash[:alert]).to eq("Fingerprint sha256 has already been taken, " \
- "Deploy keys projects deploy key fingerprint sha256 has already been taken")
+ expect(flash[:alert]).to eq("Fingerprint sha256 has already been taken")
end
end
end
diff --git a/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb b/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
index cc0f4a426f4..5cc6e1b1bb4 100644
--- a/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
+++ b/spec/controllers/projects/design_management/designs/resized_image_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::DesignManagement::Designs::ResizedImageController do
+RSpec.describe Projects::DesignManagement::Designs::ResizedImageController, feature_category: :design_management do
include DesignManagementTestHelpers
let_it_be(:project) { create(:project, :private) }
@@ -19,7 +19,7 @@ RSpec.describe Projects::DesignManagement::Designs::ResizedImageController do
end
describe 'GET #show' do
- subject do
+ subject(:response) do
get(:show,
params: {
namespace_id: project.namespace,
@@ -27,12 +27,12 @@ RSpec.describe Projects::DesignManagement::Designs::ResizedImageController do
design_id: design_id,
sha: sha,
id: size
- })
+ }
+ )
end
before do
sign_in(viewer)
- subject
end
context 'when the user does not have permission' do
@@ -68,8 +68,6 @@ RSpec.describe Projects::DesignManagement::Designs::ResizedImageController do
let(:design_id) { 'foo' }
specify do
- subject
-
expect(response).to have_gitlab_http_status(:not_found)
end
end
@@ -136,6 +134,24 @@ RSpec.describe Projects::DesignManagement::Designs::ResizedImageController do
expect(response).to have_gitlab_http_status(:not_found)
end
end
+
+ context 'when multiple design versions have the same sha hash' do
+ let(:sha) { newest_version.sha }
+
+ before do
+ create(:design, :with_smaller_image_versions,
+ issue: create(:issue, project: project),
+ versions_count: 1,
+ versions_sha: sha)
+ end
+
+ it 'serves the newest image' do
+ action = newest_version.actions.first
+
+ expect(response.header['ETag']).to eq(etag(action))
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
end
context 'when design does not have a smaller image size available' do
diff --git a/spec/controllers/projects/environments_controller_spec.rb b/spec/controllers/projects/environments_controller_spec.rb
index 2334521b8a8..dddefbac163 100644
--- a/spec/controllers/projects/environments_controller_spec.rb
+++ b/spec/controllers/projects/environments_controller_spec.rb
@@ -998,6 +998,94 @@ RSpec.describe Projects::EnvironmentsController do
end
end
+ describe '#append_info_to_payload' do
+ let(:search_param) { 'my search param' }
+
+ context 'when search_environment_logging feature is disabled' do
+ before do
+ stub_feature_flags(environments_search_logging: false)
+ end
+
+ it 'does not log search params in meta.environment.search' do
+ expect(controller).to receive(:append_info_to_payload).and_wrap_original do |method, payload|
+ method.call(payload)
+
+ expect(payload[:metadata]).not_to have_key('meta.environment.search')
+ expect(payload[:action]).to eq("search")
+ expect(payload[:controller]).to eq("Projects::EnvironmentsController")
+ end
+
+ get :search, params: environment_params(format: :json, search: search_param)
+ end
+
+ it 'logs params correctly when search params are missing' do
+ expect(controller).to receive(:append_info_to_payload).and_wrap_original do |method, payload|
+ method.call(payload)
+
+ expect(payload[:metadata]).not_to have_key('meta.environment.search')
+ expect(payload[:action]).to eq("search")
+ expect(payload[:controller]).to eq("Projects::EnvironmentsController")
+ end
+
+ get :search, params: environment_params(format: :json)
+ end
+
+ it 'logs params correctly when search params is empty string' do
+ expect(controller).to receive(:append_info_to_payload).and_wrap_original do |method, payload|
+ method.call(payload)
+
+ expect(payload[:metadata]).not_to have_key('meta.environment.search')
+ expect(payload[:action]).to eq("search")
+ expect(payload[:controller]).to eq("Projects::EnvironmentsController")
+ end
+
+ get :search, params: environment_params(format: :json, search: "")
+ end
+ end
+
+ context 'when search_environment_logging feature is enabled' do
+ before do
+ stub_feature_flags(environments_search_logging: true)
+ end
+
+ it 'logs search params in meta.environment.search' do
+ expect(controller).to receive(:append_info_to_payload).and_wrap_original do |method, payload|
+ method.call(payload)
+
+ expect(payload[:metadata]['meta.environment.search']).to eq(search_param)
+ expect(payload[:action]).to eq("search")
+ expect(payload[:controller]).to eq("Projects::EnvironmentsController")
+ end
+
+ get :search, params: environment_params(format: :json, search: search_param)
+ end
+
+ it 'logs params correctly when search params are missing' do
+ expect(controller).to receive(:append_info_to_payload).and_wrap_original do |method, payload|
+ method.call(payload)
+
+ expect(payload[:metadata]).not_to have_key('meta.environment.search')
+ expect(payload[:action]).to eq("search")
+ expect(payload[:controller]).to eq("Projects::EnvironmentsController")
+ end
+
+ get :search, params: environment_params(format: :json)
+ end
+
+ it 'logs params correctly when search params is empty string' do
+ expect(controller).to receive(:append_info_to_payload).and_wrap_original do |method, payload|
+ method.call(payload)
+
+ expect(payload[:metadata]).not_to have_key('meta.environment.search')
+ expect(payload[:action]).to eq("search")
+ expect(payload[:controller]).to eq("Projects::EnvironmentsController")
+ end
+
+ get :search, params: environment_params(format: :json, search: "")
+ end
+ end
+ end
+
def environment_params(opts = {})
opts.reverse_merge(namespace_id: project.namespace,
project_id: project,
diff --git a/spec/controllers/projects/group_links_controller_spec.rb b/spec/controllers/projects/group_links_controller_spec.rb
index 96705d82ac5..a5c00d24e30 100644
--- a/spec/controllers/projects/group_links_controller_spec.rb
+++ b/spec/controllers/projects/group_links_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::GroupLinksController do
+RSpec.describe Projects::GroupLinksController, feature_category: :authentication_and_authorization do
let_it_be(:group) { create(:group, :private) }
let_it_be(:group2) { create(:group, :private) }
let_it_be(:project) { create(:project, :private, group: group2) }
@@ -60,4 +60,79 @@ RSpec.describe Projects::GroupLinksController do
end
end
end
+
+ describe '#destroy' do
+ let(:group_owner) { create(:user) }
+
+ let(:link) do
+ create(:project_group_link, project: project, group: group, group_access: Gitlab::Access::DEVELOPER)
+ end
+
+ subject(:destroy_link) do
+ post(:destroy, params: { namespace_id: project.namespace.to_param,
+ project_id: project.to_param,
+ id: link.id })
+ end
+
+ shared_examples 'success response' do
+ it 'deletes the project group link' do
+ destroy_link
+
+ expect(response).to redirect_to(project_project_members_path(project))
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
+ context 'when user is group owner' do
+ before do
+ link.group.add_owner(group_owner)
+ sign_in(group_owner)
+ end
+
+ context 'when user is not project maintainer' do
+ it 'deletes the project group link and redirects to group show page' do
+ destroy_link
+
+ expect(response).to redirect_to(group_path(group))
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+
+ context 'when user is a project maintainer' do
+ before do
+ project.add_maintainer(group_owner)
+ end
+
+ it 'deletes the project group link and redirects to group show page' do
+ destroy_link
+
+ expect(response).to redirect_to(group_path(group))
+ expect(response).to have_gitlab_http_status(:found)
+ end
+ end
+ end
+
+ context 'when user is not a group owner' do
+ context 'when user is a project maintainer' do
+ before do
+ sign_in(user)
+ end
+
+ it_behaves_like 'success response'
+ end
+
+ context 'when user is not a project maintainer' do
+ before do
+ project.add_developer(user)
+ sign_in(user)
+ end
+
+ it 'renders 404' do
+ destroy_link
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
+ end
end
diff --git a/spec/controllers/projects/merge_requests/creations_controller_spec.rb b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
index ace8c04b819..7db708e0e78 100644
--- a/spec/controllers/projects/merge_requests/creations_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/creations_controller_spec.rb
@@ -307,7 +307,7 @@ RSpec.describe Projects::MergeRequests::CreationsController do
end
end
- describe 'GET target_projects', feature_category: :code_review do
+ describe 'GET target_projects', feature_category: :code_review_workflow do
it 'returns target projects JSON' do
get :target_projects, params: { namespace_id: project.namespace.to_param, project_id: project }
diff --git a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
index 613d82efd06..4de724fd6d6 100644
--- a/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests/diffs_controller_spec.rb
@@ -213,7 +213,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
commit: nil,
latest_diff: true,
only_context_commits: false,
- merge_conflicts_in_diff: true,
merge_ref_head_diff: false
}
end
@@ -281,7 +280,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
commit: nil,
latest_diff: true,
only_context_commits: false,
- merge_conflicts_in_diff: true,
merge_ref_head_diff: nil
}
end
@@ -303,33 +301,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
commit: merge_request.diff_head_commit,
latest_diff: nil,
only_context_commits: false,
- merge_conflicts_in_diff: true,
- merge_ref_head_diff: nil
- }
- end
- end
- end
-
- context 'when display_merge_conflicts_in_diff is disabled' do
- subject { go }
-
- before do
- stub_feature_flags(display_merge_conflicts_in_diff: false)
- end
-
- it_behaves_like 'serializes diffs metadata with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiff }
- let(:expected_options) do
- {
- merge_request: merge_request,
- merge_request_diff: merge_request.merge_request_diff,
- merge_request_diffs: merge_request.merge_request_diffs,
- start_version: nil,
- start_sha: nil,
- commit: nil,
- latest_diff: true,
- only_context_commits: false,
- merge_conflicts_in_diff: false,
merge_ref_head_diff: nil
}
end
@@ -430,6 +401,16 @@ RSpec.describe Projects::MergeRequests::DiffsController do
expect(response).to have_gitlab_http_status(:ok)
end
+ it 'measures certain parts of the request' do
+ allow(Gitlab::Metrics).to receive(:measure).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_unfoldable_positions).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_unfold).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_write_cache).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_render).and_call_original
+
+ subject
+ end
+
it 'tracks mr_diffs event' do
expect(Gitlab::UsageDataCounters::MergeRequestActivityUniqueCounter)
.to receive(:track_mr_diffs_action)
@@ -488,7 +469,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
commit: nil,
diff_view: :inline,
merge_ref_head_diff: nil,
- merge_conflicts_in_diff: true,
pagination_data: {
total_pages: nil
}.merge(pagination_data)
@@ -607,21 +587,6 @@ RSpec.describe Projects::MergeRequests::DiffsController do
it_behaves_like 'successful request'
end
- context 'when display_merge_conflicts_in_diff is disabled' do
- before do
- stub_feature_flags(display_merge_conflicts_in_diff: false)
- end
-
- subject { go }
-
- it_behaves_like 'serializes diffs with expected arguments' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20).merge(merge_conflicts_in_diff: false) }
- end
-
- it_behaves_like 'successful request'
- end
-
it_behaves_like 'forked project with submodules'
it_behaves_like 'cached diff collection'
diff --git a/spec/controllers/projects/merge_requests_controller_spec.rb b/spec/controllers/projects/merge_requests_controller_spec.rb
index a93dc806283..095775b0ddd 100644
--- a/spec/controllers/projects/merge_requests_controller_spec.rb
+++ b/spec/controllers/projects/merge_requests_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::MergeRequestsController, feature_category: :code_review do
+RSpec.describe Projects::MergeRequestsController, feature_category: :code_review_workflow do
include ProjectForksHelper
include Gitlab::Routing
using RSpec::Parameterized::TableSyntax
@@ -229,6 +229,16 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-diff:")
end
+
+ context 'when there is no diff' do
+ it 'renders 404' do
+ merge_request.merge_request_diff.destroy!
+
+ go(format: :diff)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
describe "as patch" do
@@ -237,6 +247,16 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
expect(response.headers[Gitlab::Workhorse::SEND_DATA_HEADER]).to start_with("git-format-patch:")
end
+
+ context 'when there is no diff' do
+ it 'renders 404' do
+ merge_request.merge_request_diff.destroy!
+
+ go(format: :patch)
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
end
end
@@ -2132,12 +2152,13 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
create(:protected_branch, project: project, name: merge_request.source_branch, allow_force_push: false)
end
- it 'returns 404' do
+ it 'returns 403' do
expect_rebase_worker_for(user).never
post_rebase
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['merge_error']).to eq('Source branch is protected from force push')
end
end
@@ -2153,12 +2174,13 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :code_review
forked_project.add_reporter(user)
end
- it 'returns 404' do
+ it 'returns 403' do
expect_rebase_worker_for(user).never
post_rebase
- expect(response).to have_gitlab_http_status(:not_found)
+ expect(response).to have_gitlab_http_status(:forbidden)
+ expect(json_response['merge_error']).to eq('Cannot push to source branch')
end
end
diff --git a/spec/controllers/projects/pages_domains_controller_spec.rb b/spec/controllers/projects/pages_domains_controller_spec.rb
index b29bbef0c40..9cc740fcbef 100644
--- a/spec/controllers/projects/pages_domains_controller_spec.rb
+++ b/spec/controllers/projects/pages_domains_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::PagesDomainsController do
+RSpec.describe Projects::PagesDomainsController, feature_category: :pages do
let(:user) { create(:user) }
let(:project) { create(:project) }
let!(:pages_domain) { create(:pages_domain, project: project) }
@@ -70,6 +70,7 @@ RSpec.describe Projects::PagesDomainsController do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: kind_of(Numeric),
domain: pages_domain_params[:domain]
)
@@ -119,6 +120,7 @@ RSpec.describe Projects::PagesDomainsController do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
end
@@ -226,6 +228,7 @@ RSpec.describe Projects::PagesDomainsController do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
@@ -251,6 +254,7 @@ RSpec.describe Projects::PagesDomainsController do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
end
diff --git a/spec/controllers/projects/pipelines_controller_spec.rb b/spec/controllers/projects/pipelines_controller_spec.rb
index f66e4b133ca..3d1d28945f7 100644
--- a/spec/controllers/projects/pipelines_controller_spec.rb
+++ b/spec/controllers/projects/pipelines_controller_spec.rb
@@ -84,6 +84,13 @@ RSpec.describe Projects::PipelinesController do
end
context 'when performing gitaly calls', :request_store do
+ before do
+ # To prevent double writes / fallback read due to MultiStore which is failing the `Gitlab::GitalyClient
+ # .get_request_count` expectation.
+ stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
+ end
+
it 'limits the Gitaly requests' do
# Isolate from test preparation (Repository#exists? is also cached in RequestStore)
RequestStore.end!
diff --git a/spec/controllers/projects/protected_branches_controller_spec.rb b/spec/controllers/projects/protected_branches_controller_spec.rb
index 14728618633..6778d4100b8 100644
--- a/spec/controllers/projects/protected_branches_controller_spec.rb
+++ b/spec/controllers/projects/protected_branches_controller_spec.rb
@@ -33,21 +33,26 @@ RSpec.describe Projects::ProtectedBranchesController do
let(:create_params) { attributes_for(:protected_branch).merge(access_level_params) }
- it 'creates the protected branch rule' do
- expect do
- post(:create, params: project_params.merge(protected_branch: create_params))
- end.to change(ProtectedBranch, :count).by(1)
- end
+ describe "created successfully" do
+ using RSpec::Parameterized::TableSyntax
- context 'when repository is empty' do
- let(:project) { empty_project }
+ let(:protected_branch) { create(:protected_branch, project: ref_project) }
+ let(:project_params) { { namespace_id: ref_project.namespace.to_param, project_id: ref_project } }
+
+ subject { post(:create, params: project_params.merge(protected_branch: create_params), format: format) }
- it 'creates the protected branch rule' do
- expect do
- post(:create, params: project_params.merge(protected_branch: create_params))
- end.to change(ProtectedBranch, :count).by(1)
+ where(:format, :ref_project, :response_status) do
+ :html | ref(:project) | :found
+ :html | ref(:empty_project) | :found
+ :json | ref(:project) | :ok
+ :json | ref(:empty_project) | :ok
+ end
- expect(response).to have_gitlab_http_status(:found)
+ with_them do
+ it 'creates a protected branch' do
+ expect { subject }.to change(ProtectedBranch, :count).by(1)
+ expect(response).to have_gitlab_http_status(response_status)
+ end
end
end
diff --git a/spec/controllers/projects/releases/evidences_controller_spec.rb b/spec/controllers/projects/releases/evidences_controller_spec.rb
index 68433969d69..879cbc543e9 100644
--- a/spec/controllers/projects/releases/evidences_controller_spec.rb
+++ b/spec/controllers/projects/releases/evidences_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::Releases::EvidencesController do
+RSpec.describe Projects::Releases::EvidencesController, :with_license do
let!(:project) { create(:project, :repository, :public) }
let_it_be(:private_project) { create(:project, :repository, :private) }
let_it_be(:developer) { create(:user) }
diff --git a/spec/controllers/registrations/welcome_controller_spec.rb b/spec/controllers/registrations/welcome_controller_spec.rb
index a3b246fbedd..b5416d226e1 100644
--- a/spec/controllers/registrations/welcome_controller_spec.rb
+++ b/spec/controllers/registrations/welcome_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Registrations::WelcomeController do
+RSpec.describe Registrations::WelcomeController, feature_category: :authentication_and_authorization do
let(:user) { create(:user) }
describe '#welcome' do
@@ -47,7 +47,7 @@ RSpec.describe Registrations::WelcomeController do
it { is_expected.to render_template(:show) }
end
- context '2FA is required from group' do
+ context 'when 2FA is required from group' do
before do
user = create(:user, require_two_factor_authentication_from_group: true)
sign_in(user)
@@ -99,7 +99,7 @@ RSpec.describe Registrations::WelcomeController do
end
context 'when tasks to be done are assigned' do
- let!(:member1) { create(:group_member, user: user, tasks_to_be_done: %w(ci code)) }
+ let!(:member1) { create(:group_member, user: user, tasks_to_be_done: %w[ci code]) }
it { is_expected.to redirect_to(issues_dashboard_path(assignee_username: user.username)) }
end
diff --git a/spec/controllers/registrations_controller_spec.rb b/spec/controllers/registrations_controller_spec.rb
index 699052fe37a..d0439a18158 100644
--- a/spec/controllers/registrations_controller_spec.rb
+++ b/spec/controllers/registrations_controller_spec.rb
@@ -137,6 +137,21 @@ RSpec.describe RegistrationsController do
end
end
+ context 'private profile' do
+ context 'when the `user_defaults_to_private_profile` setting is turned on' do
+ before do
+ stub_application_setting(user_defaults_to_private_profile: true)
+ end
+
+ it 'creates new user with profile set to private' do
+ subject
+ user = User.find_by(email: base_user_params[:email], private_profile: true)
+
+ expect(user).to be_present
+ end
+ end
+ end
+
context 'email confirmation' do
before do
stub_feature_flags(identity_verification: false)
diff --git a/spec/controllers/uploads_controller_spec.rb b/spec/controllers/uploads_controller_spec.rb
index 3e9c56d3274..8015136d1e0 100644
--- a/spec/controllers/uploads_controller_spec.rb
+++ b/spec/controllers/uploads_controller_spec.rb
@@ -658,19 +658,17 @@ RSpec.describe UploadsController do
end
context 'Appearance' do
- context 'when viewing a custom header logo' do
- let!(:appearance) { create :appearance, header_logo: fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
-
+ shared_examples 'view custom logo' do |mounted_as|
context 'when not signed in' do
it 'responds with status 200' do
- get :show, params: { model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png' }
+ get :show, params: { model: 'appearance', mounted_as: mounted_as, id: appearance.id, filename: 'dk.png' }
expect(response).to have_gitlab_http_status(:ok)
end
it_behaves_like 'content publicly cached' do
subject do
- get :show, params: { model: 'appearance', mounted_as: 'header_logo', id: appearance.id, filename: 'dk.png' }
+ get :show, params: { model: 'appearance', mounted_as: mounted_as, id: appearance.id, filename: 'dk.png' }
response
end
@@ -678,24 +676,22 @@ RSpec.describe UploadsController do
end
end
- context 'when viewing a custom logo' do
- let!(:appearance) { create :appearance, logo: fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
+ context 'when viewing a custom pwa icon' do
+ let!(:appearance) { create :appearance, pwa_icon: fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
- context 'when not signed in' do
- it 'responds with status 200' do
- get :show, params: { model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png' }
+ it_behaves_like 'view custom logo', 'pwa_icon'
+ end
- expect(response).to have_gitlab_http_status(:ok)
- end
+ context 'when viewing a custom header logo' do
+ let!(:appearance) { create :appearance, header_logo: fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
- it_behaves_like 'content publicly cached' do
- subject do
- get :show, params: { model: 'appearance', mounted_as: 'logo', id: appearance.id, filename: 'dk.png' }
+ it_behaves_like 'view custom logo', 'header_logo'
+ end
- response
- end
- end
- end
+ context 'when viewing a custom logo' do
+ let!(:appearance) { create :appearance, logo: fixture_file_upload('spec/fixtures/dk.png', 'image/png') }
+
+ it_behaves_like 'view custom logo', 'logo'
end
end
@@ -740,6 +736,46 @@ RSpec.describe UploadsController do
expect(response).to have_gitlab_http_status(:ok)
end
end
+
+ context "when viewing an achievement" do
+ let!(:achievement) { create(:achievement, avatar: fixture_file_upload("spec/fixtures/dk.png", "image/png")) }
+
+ context "when signed in" do
+ before do
+ sign_in(user)
+ end
+
+ it "responds with status 200" do
+ get :show, params: { model: "achievements/achievement", mounted_as: "avatar", id: achievement.id, filename: "dk.png" }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it_behaves_like 'content publicly cached' do
+ subject do
+ get :show, params: { model: "achievements/achievement", mounted_as: "avatar", id: achievement.id, filename: "dk.png" }
+
+ response
+ end
+ end
+ end
+
+ context "when not signed in" do
+ it "responds with status 200" do
+ get :show, params: { model: "achievements/achievement", mounted_as: "avatar", id: achievement.id, filename: "dk.png" }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it_behaves_like 'content publicly cached' do
+ subject do
+ get :show, params: { model: "achievements/achievement", mounted_as: "avatar", id: achievement.id, filename: "dk.png" }
+
+ response
+ end
+ end
+ end
+ end
end
def post_authorize(verified: true)
diff --git a/spec/db/docs_spec.rb b/spec/db/docs_spec.rb
index 6cfff725988..5960b8bebcc 100644
--- a/spec/db/docs_spec.rb
+++ b/spec/db/docs_spec.rb
@@ -2,12 +2,20 @@
require 'spec_helper'
+# This list is used to provide temporary exceptions for feature categories
+# that are transitioning and not yet in the feature_categories.yml file
+# any additions here should be accompanied by a link to an issue link
+VALID_FEATURE_CATEGORIES = [
+ 'jihu' # https://gitlab.com/gitlab-org/database-team/team-tasks/-/issues/192
+].freeze
+
RSpec.shared_examples 'validate dictionary' do |objects, directory_path, required_fields|
context 'for each object' do
let(:directory_path) { directory_path }
let(:metadata_allowed_fields) do
required_fields + %i[
+ feature_categories
classes
description
introduced_by_url
@@ -40,6 +48,10 @@ RSpec.shared_examples 'validate dictionary' do |objects, directory_path, require
metadata.select { |_, t| t.has_key?(:missing_required_fields) }.keys
end
+ let(:objects_with_invalid_feature_category) do
+ metadata.select { |_, t| t.has_key?(:invalid_feature_category) }.keys
+ end
+
it 'has a metadata file' do
expect(objects_without_metadata).to be_empty, multiline_error(
'Missing metadata files',
@@ -55,6 +67,14 @@ RSpec.shared_examples 'validate dictionary' do |objects, directory_path, require
)
end
+ it 'has a valid feature category' do
+ expect(objects_with_invalid_feature_category).to be_empty, object_metadata_errors(
+ 'Table metadata files with an invalid feature category',
+ :error,
+ objects_with_invalid_feature_category
+ )
+ end
+
it 'has a valid metadata file with allowed fields' do
expect(objects_with_disallowed_fields).to be_empty, object_metadata_errors(
'Table metadata files with disallowed fields',
@@ -82,6 +102,13 @@ RSpec.shared_examples 'validate dictionary' do |objects, directory_path, require
Rails.root.join(object_metadata_file(object_name))
end
+ def feature_categories_valid?(object_feature_categories)
+ return false unless object_feature_categories.present?
+
+ all_feature_categories = YAML.load_file(Rails.root.join('config/feature_categories.yml')) + VALID_FEATURE_CATEGORIES
+ object_feature_categories.all? { |category| all_feature_categories.include?(category) }
+ end
+
def load_object_metadata(required_fields, object_name)
result = {}
begin
@@ -94,6 +121,16 @@ RSpec.shared_examples 'validate dictionary' do |objects, directory_path, require
unless missing_required_fields.empty?
result[:missing_required_fields] = "missing required fields: #{missing_required_fields.join(', ')}"
end
+
+ if required_fields.include?(:feature_categories)
+ object_feature_categories = result.dig(:metadata, :feature_categories)
+
+ unless feature_categories_valid?(object_feature_categories)
+ result[:invalid_feature_category] =
+ "invalid feature category: #{object_feature_categories}" \
+ "Please use a category from https://about.gitlab.com/handbook/product/categories/#categories-a-z"
+ end
+ end
rescue Psych::SyntaxError => ex
result[:error] = ex.message
end
@@ -139,3 +176,19 @@ RSpec.describe 'Tables documentation', feature_category: :database do
include_examples 'validate dictionary', tables, directory_path, required_fields
end
+
+RSpec.describe 'Deleted tables documentation', feature_category: :database do
+ directory_path = File.join('db', 'docs', 'deleted_tables')
+ tables = Dir.glob(File.join(directory_path, '*.yml')).map { |f| File.basename(f, '.yml') }.sort.uniq
+ required_fields = %i[table_name gitlab_schema removed_by_url removed_in_milestone]
+
+ include_examples 'validate dictionary', tables, directory_path, required_fields
+end
+
+RSpec.describe 'Deleted views documentation', feature_category: :database do
+ directory_path = File.join('db', 'docs', 'deleted_views')
+ views = Dir.glob(File.join(directory_path, '*.yml')).map { |f| File.basename(f, '.yml') }.sort.uniq
+ required_fields = %i[view_name gitlab_schema removed_by_url removed_in_milestone]
+
+ include_examples 'validate dictionary', views, directory_path, required_fields
+end
diff --git a/spec/db/migration_spec.rb b/spec/db/migration_spec.rb
index b5f6192233f..a5449c6dccd 100644
--- a/spec/db/migration_spec.rb
+++ b/spec/db/migration_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Migrations Validation' do
+RSpec.describe 'Migrations Validation', feature_category: :database do
using RSpec::Parameterized::TableSyntax
# The range describes the timestamps that given migration helper can be used
diff --git a/spec/db/schema_spec.rb b/spec/db/schema_spec.rb
index 9e23cca7c3f..7f3cab55d5a 100644
--- a/spec/db/schema_spec.rb
+++ b/spec/db/schema_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require Rails.root.join('ee', 'spec', 'db', 'schema_support') if Gitlab.ee?
-RSpec.describe 'Database schema' do
+RSpec.describe 'Database schema', feature_category: :database do
prepend_mod_with('DB::SchemaSupport')
let(:tables) { connection.tables }
@@ -30,7 +30,7 @@ RSpec.describe 'Database schema' do
award_emoji: %w[awardable_id user_id],
aws_roles: %w[role_external_id],
boards: %w[milestone_id iteration_id],
- chat_names: %w[chat_id team_id user_id],
+ chat_names: %w[chat_id team_id user_id integration_id],
chat_teams: %w[team_id],
ci_build_needs: %w[partition_id],
ci_build_pending_states: %w[partition_id],
@@ -39,7 +39,7 @@ RSpec.describe 'Database schema' do
ci_build_trace_metadata: %w[partition_id],
ci_builds: %w[erased_by_id trigger_request_id partition_id],
ci_builds_runner_session: %w[partition_id],
- p_ci_builds_metadata: %w[partition_id],
+ p_ci_builds_metadata: %w[partition_id runner_machine_id], # NOTE: FK will be added in follow-up https://gitlab.com/gitlab-org/gitlab/-/merge_requests/108167
ci_job_artifacts: %w[partition_id],
ci_job_variables: %w[partition_id],
ci_namespace_monthly_usages: %w[namespace_id],
@@ -166,7 +166,7 @@ RSpec.describe 'Database schema' do
context 'columns ending with _id' do
let(:column_names) { columns.map(&:name) }
let(:column_names_with_id) { column_names.select { |column_name| column_name.ends_with?('_id') } }
- let(:foreign_keys_columns) { all_foreign_keys.map(&:column).uniq } # we can have FK and loose FK present at the same time
+ let(:foreign_keys_columns) { all_foreign_keys.reject { |fk| fk.name&.end_with?("_p") }.map(&:column).uniq } # we can have FK and loose FK present at the same time
let(:ignored_columns) { ignored_fk_columns(table) }
it 'do have the foreign keys' do
@@ -184,7 +184,7 @@ RSpec.describe 'Database schema' do
# These pre-existing enums have limits > 2 bytes
IGNORED_LIMIT_ENUMS = {
- 'Analytics::CycleAnalytics::GroupStage' => %w[start_event_identifier end_event_identifier],
+ 'Analytics::CycleAnalytics::Stage' => %w[start_event_identifier end_event_identifier],
'Analytics::CycleAnalytics::ProjectStage' => %w[start_event_identifier end_event_identifier],
'Ci::Bridge' => %w[failure_reason],
'Ci::Build' => %w[failure_reason],
diff --git a/spec/factories/abuse_reports.rb b/spec/factories/abuse_reports.rb
index 4174faae1ed..4ae9b4def8e 100644
--- a/spec/factories/abuse_reports.rb
+++ b/spec/factories/abuse_reports.rb
@@ -5,5 +5,6 @@ FactoryBot.define do
reporter factory: :user
user
message { 'User sends spam' }
+ reported_from_url { 'http://gitlab.com' }
end
end
diff --git a/spec/factories/analytics/cycle_analytics/aggregations.rb b/spec/factories/analytics/cycle_analytics/aggregations.rb
index 78e82f166d0..99f0e34ede7 100644
--- a/spec/factories/analytics/cycle_analytics/aggregations.rb
+++ b/spec/factories/analytics/cycle_analytics/aggregations.rb
@@ -2,7 +2,7 @@
FactoryBot.define do
factory :cycle_analytics_aggregation, class: 'Analytics::CycleAnalytics::Aggregation' do
- group
+ namespace { association(:group) }
enabled { true }
diff --git a/spec/factories/appearances.rb b/spec/factories/appearances.rb
index 8101cd8d8bf..321c31d7565 100644
--- a/spec/factories/appearances.rb
+++ b/spec/factories/appearances.rb
@@ -18,6 +18,10 @@ FactoryBot.define do
header_logo { fixture_file_upload('spec/fixtures/dk.png') }
end
+ trait :with_pwa_icon do
+ pwa_icon { fixture_file_upload('spec/fixtures/dk.png') }
+ end
+
trait :with_favicon do
favicon { fixture_file_upload('spec/fixtures/dk.png') }
end
diff --git a/spec/factories/bulk_import/entities.rb b/spec/factories/bulk_import/entities.rb
index eeb4f8325ae..66d212daaae 100644
--- a/spec/factories/bulk_import/entities.rb
+++ b/spec/factories/bulk_import/entities.rb
@@ -10,6 +10,7 @@ FactoryBot.define do
sequence(:destination_namespace) { |n| "destination-path-#{n}" }
destination_name { 'Imported Entity' }
sequence(:source_xid)
+ migrate_projects { true }
trait(:group_entity) do
source_type { :group_entity }
diff --git a/spec/factories/ci/builds.rb b/spec/factories/ci/builds.rb
index 15a88955e05..78398fd7f20 100644
--- a/spec/factories/ci/builds.rb
+++ b/spec/factories/ci/builds.rb
@@ -421,9 +421,17 @@ FactoryBot.define do
end
trait :artifacts do
- after(:create) do |build|
- create(:ci_job_artifact, :archive, job: build, expire_at: build.artifacts_expire_at)
- create(:ci_job_artifact, :metadata, job: build, expire_at: build.artifacts_expire_at)
+ after(:create) do |build, evaluator|
+ create(:ci_job_artifact, :archive, :public, job: build, expire_at: build.artifacts_expire_at)
+ create(:ci_job_artifact, :metadata, :public, job: build, expire_at: build.artifacts_expire_at)
+ build.reload
+ end
+ end
+
+ trait :private_artifacts do
+ after(:create) do |build, evaluator|
+ create(:ci_job_artifact, :archive, :private, job: build, expire_at: build.artifacts_expire_at)
+ create(:ci_job_artifact, :metadata, :private, job: build, expire_at: build.artifacts_expire_at)
build.reload
end
end
diff --git a/spec/factories/ci/job_artifacts.rb b/spec/factories/ci/job_artifacts.rb
index 7569e832c60..5e049e0375b 100644
--- a/spec/factories/ci/job_artifacts.rb
+++ b/spec/factories/ci/job_artifacts.rb
@@ -174,6 +174,14 @@ FactoryBot.define do
end
end
+ trait :private do
+ accessibility { 'private' }
+ end
+
+ trait :public do
+ accessibility { 'public' }
+ end
+
trait :accessibility do
file_type { :accessibility }
file_format { :raw }
diff --git a/spec/factories/ci/runner_machines.rb b/spec/factories/ci/runner_machines.rb
new file mode 100644
index 00000000000..09bf5d0844e
--- /dev/null
+++ b/spec/factories/ci/runner_machines.rb
@@ -0,0 +1,8 @@
+# frozen_string_literal: true
+
+FactoryBot.define do
+ factory :ci_runner_machine, class: 'Ci::RunnerMachine' do
+ runner factory: :ci_runner
+ machine_xid { "r_#{SecureRandom.hex.slice(0, 10)}" }
+ end
+end
diff --git a/spec/factories/design_management/designs.rb b/spec/factories/design_management/designs.rb
index 3d95c754a96..d16fd0c297b 100644
--- a/spec/factories/design_management/designs.rb
+++ b/spec/factories/design_management/designs.rb
@@ -100,8 +100,9 @@ FactoryBot.define do
trait :with_file do
transient do
deleted { false }
- versions_count { 1 }
file { File.join(Rails.root, 'spec/fixtures/dk.png') }
+ versions_count { 1 }
+ versions_sha { nil }
end
after :create do |design, evaluator|
@@ -109,6 +110,8 @@ FactoryBot.define do
repository = project.design_repository
commit_version = ->(action) do
+ return evaluator.versions_sha if evaluator.versions_sha
+
repository.commit_files(
evaluator.author,
branch_name: 'master',
diff --git a/spec/factories/groups.rb b/spec/factories/groups.rb
index f4d47b9ff8c..5b4839df2d3 100644
--- a/spec/factories/groups.rb
+++ b/spec/factories/groups.rb
@@ -74,7 +74,7 @@ FactoryBot.define do
allow_descendants_override_disabled_shared_runners { false }
end
- trait :disabled_with_override do
+ trait :disabled_and_overridable do
shared_runners_disabled
allow_descendants_override_disabled_shared_runners
end
diff --git a/spec/factories/integrations.rb b/spec/factories/integrations.rb
index ebbf1b560e5..7740b2da911 100644
--- a/spec/factories/integrations.rb
+++ b/spec/factories/integrations.rb
@@ -254,6 +254,16 @@ FactoryBot.define do
password { 'harborpassword' }
end
+ factory :apple_app_store_integration, class: 'Integrations::AppleAppStore' do
+ project
+ active { true }
+ type { 'Integrations::AppleAppStore' }
+
+ app_store_issuer_id { 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee' }
+ app_store_key_id { 'ABC1' }
+ app_store_private_key { File.read('spec/fixtures/ssl_key.pem') }
+ end
+
# this is for testing storing values inside properties, which is deprecated and will be removed in
# https://gitlab.com/gitlab-org/gitlab/issues/29404
trait :without_properties_callback do
diff --git a/spec/factories/ml/candidates.rb b/spec/factories/ml/candidates.rb
index 2daed36d777..1b41e39d711 100644
--- a/spec/factories/ml/candidates.rb
+++ b/spec/factories/ml/candidates.rb
@@ -16,5 +16,14 @@ FactoryBot.define do
candidate.metadata = FactoryBot.create_list(:ml_candidate_metadata, 2, candidate: candidate )
end
end
+
+ trait :with_artifact do
+ after(:create) do |candidate|
+ FactoryBot.create(:generic_package,
+ name: candidate.package_name,
+ version: candidate.package_version,
+ project: candidate.project)
+ end
+ end
end
end
diff --git a/spec/factories/personal_access_tokens.rb b/spec/factories/personal_access_tokens.rb
index 9625fdc195d..a140011941f 100644
--- a/spec/factories/personal_access_tokens.rb
+++ b/spec/factories/personal_access_tokens.rb
@@ -27,6 +27,12 @@ FactoryBot.define do
token_digest { nil }
end
+ trait :admin_mode do
+ before(:create) do |personal_access_token|
+ personal_access_token.scopes.append(Gitlab::Auth::ADMIN_MODE_SCOPE) if personal_access_token.user.admin?
+ end
+ end
+
trait :no_prefix do
after(:build) { |personal_access_token| personal_access_token.set_token(Devise.friendly_token) }
end
diff --git a/spec/factories/projects/build_artifacts_size_refreshes.rb b/spec/factories/projects/build_artifacts_size_refreshes.rb
index b05f5dfab1c..b00cecfa705 100644
--- a/spec/factories/projects/build_artifacts_size_refreshes.rb
+++ b/spec/factories/projects/build_artifacts_size_refreshes.rb
@@ -18,6 +18,10 @@ FactoryBot.define do
refresh_started_at { Time.zone.now }
end
+ trait :finalizing do
+ state { Projects::BuildArtifactsSizeRefresh::STATES[:finalizing] }
+ end
+
trait :stale do
running
refresh_started_at { 30.days.ago }
diff --git a/spec/factories/wiki_pages.rb b/spec/factories/wiki_pages.rb
index 6f912a183e8..9b4c8a4fced 100644
--- a/spec/factories/wiki_pages.rb
+++ b/spec/factories/wiki_pages.rb
@@ -28,7 +28,7 @@ FactoryBot.define do
# Clear our default @page, except when using build_stubbed
after(:build) do |page|
- page.instance_variable_set('@page', nil)
+ page.instance_variable_set(:@page, nil)
end
to_create do |page, evaluator|
diff --git a/spec/features/abuse_report_spec.rb b/spec/features/abuse_report_spec.rb
index fdd11b59938..e0a61656a88 100644
--- a/spec/features/abuse_report_spec.rb
+++ b/spec/features/abuse_report_spec.rb
@@ -2,25 +2,155 @@
require 'spec_helper'
-RSpec.describe 'Abuse reports', feature_category: :not_owned do
- let(:another_user) { create(:user) }
+RSpec.describe 'Abuse reports', :js, feature_category: :insider_threat do
+ let_it_be(:abusive_user) { create(:user) }
+
+ let_it_be(:reporter1) { create(:user) }
+
+ let_it_be(:project) { create(:project, :public, :repository) }
+ let_it_be(:issue) { create(:issue, project: project, author: abusive_user) }
before do
- sign_in(create(:user))
+ sign_in(reporter1)
end
- it 'report abuse' do
- visit user_path(another_user)
+ describe 'report abuse to administrator' do
+ shared_examples 'reports the user with an abuse category' do
+ it do
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
- click_link 'Report abuse'
+ expect(page).to have_content 'Thank you for your report'
+ end
+ end
- fill_in 'abuse_report_message', with: 'This user sends spam'
- click_button 'Send report'
+ shared_examples 'reports the user without an abuse category' do
+ it do
+ click_link 'Report abuse to administrator'
+
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+ end
+ end
+
+ context 'when reporting an issue for abuse' do
+ before do
+ visit project_issue_path(project, issue)
+
+ click_button 'Issue actions'
+ end
+
+ it_behaves_like 'reports the user with an abuse category'
+
+ it 'redirects backs to the issue when cancel button is clicked' do
+ fill_and_submit_abuse_category_form
+
+ click_link 'Cancel'
+
+ expect(page).to have_current_path(project_issue_path(project, issue))
+ end
+ end
+
+ context 'when reporting an incident for abuse' do
+ let_it_be(:incident) { create(:incident, project: project, author: abusive_user) }
+
+ before do
+ visit project_issues_incident_path(project, incident)
+ click_button 'Incident actions'
+ end
+
+ it_behaves_like 'reports the user with an abuse category'
+ end
+
+ context 'when reporting a user profile for abuse' do
+ let_it_be(:reporter2) { create(:user) }
+
+ before do
+ visit user_path(abusive_user)
+ end
+
+ it_behaves_like 'reports the user with an abuse category'
+
+ it 'allows the reporter to report the same user for different abuse categories' do
+ visit user_path(abusive_user)
+
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+
+ visit user_path(abusive_user)
- expect(page).to have_content 'Thank you for your report'
+ fill_and_submit_abuse_category_form("They're being offsensive or abusive.")
+ fill_and_submit_report_abuse_form
- visit user_path(another_user)
+ expect(page).to have_content 'Thank you for your report'
+ end
- expect(page).to have_button("Already reported for abuse")
+ it 'allows multiple users to report the same user' do
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+
+ gitlab_sign_out
+ gitlab_sign_in(reporter2)
+
+ visit user_path(abusive_user)
+
+ fill_and_submit_abuse_category_form
+ fill_and_submit_report_abuse_form
+
+ expect(page).to have_content 'Thank you for your report'
+ end
+
+ it 'redirects backs to user profile when cancel button is clicked' do
+ fill_and_submit_abuse_category_form
+
+ click_link 'Cancel'
+
+ expect(page).to have_current_path(user_path(abusive_user))
+ end
+ end
+
+ context 'when reporting an merge request for abuse' do
+ let_it_be(:merge_request) { create(:merge_request, source_project: project, author: abusive_user) }
+
+ before do
+ visit project_merge_request_path(project, merge_request)
+ find('[data-testid="merge-request-actions"]').click
+ end
+
+ it_behaves_like 'reports the user with an abuse category'
+ end
+
+ context 'when reporting a comment' do
+ let_it_be(:issue) { create(:issue, project: project, author: abusive_user) }
+ let_it_be(:comment) do
+ create(:discussion_note_on_issue, author: abusive_user, project: project, noteable: issue, note: 'some note')
+ end
+
+ before do
+ visit project_issue_path(project, issue)
+ click_button 'More actions'
+ end
+
+ it_behaves_like 'reports the user without an abuse category'
+ end
+ end
+
+ private
+
+ def fill_and_submit_abuse_category_form(category = "They're posting spam.")
+ click_button 'Report abuse to administrator'
+
+ choose category
+ click_button 'Next'
+ end
+
+ def fill_and_submit_report_abuse_form
+ fill_in 'abuse_report_message', with: 'This user sends spam'
+ click_button 'Send report'
end
end
diff --git a/spec/features/admin/admin_broadcast_messages_spec.rb b/spec/features/admin/admin_broadcast_messages_spec.rb
deleted file mode 100644
index a6bbdd70fc3..00000000000
--- a/spec/features/admin/admin_broadcast_messages_spec.rb
+++ /dev/null
@@ -1,98 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Admin Broadcast Messages', feature_category: :onboarding do
- before do
- admin = create(:admin)
- sign_in(admin)
- stub_feature_flags(vue_broadcast_messages: false)
- gitlab_enable_admin_mode_sign_in(admin)
- create(
- :broadcast_message,
- :expired,
- message: 'Migration to new server',
- target_access_levels: [Gitlab::Access::DEVELOPER]
- )
- visit admin_broadcast_messages_path
- end
-
- it 'see broadcast messages list' do
- expect(page).to have_content 'Migration to new server'
- end
-
- it 'creates a customized broadcast banner message' do
- fill_in 'broadcast_message_message', with: 'Application update from **4:00 CST to 5:00 CST**'
- fill_in 'broadcast_message_target_path', with: '*/user_onboarded'
- select 'light-indigo', from: 'broadcast_message_theme'
- select Date.today.next_year.year, from: 'broadcast_message_ends_at_1i'
- check 'Guest'
- check 'Owner'
- click_button 'Add broadcast message'
-
- expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
- expect(page).to have_content 'Application update from 4:00 CST to 5:00 CST'
- expect(page).to have_content 'Guest, Owner'
- expect(page).to have_content '*/user_onboarded'
- expect(page).to have_selector 'strong', text: '4:00 CST to 5:00 CST'
- expect(page).to have_selector %(.light-indigo[role=alert])
- end
-
- it 'creates a customized broadcast notification message' do
- fill_in 'broadcast_message_message', with: 'Application update from **4:00 CST to 5:00 CST**'
- fill_in 'broadcast_message_target_path', with: '*/user_onboarded'
- select 'Notification', from: 'broadcast_message_broadcast_type'
- select Date.today.next_year.year, from: 'broadcast_message_ends_at_1i'
- check 'Reporter'
- check 'Developer'
- check 'Maintainer'
- click_button 'Add broadcast message'
-
- expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
- expect(page).to have_content 'Application update from 4:00 CST to 5:00 CST'
- expect(page).to have_content 'Reporter, Developer, Maintainer'
- expect(page).to have_content '*/user_onboarded'
- expect(page).to have_content 'Notification'
- expect(page).to have_selector 'strong', text: '4:00 CST to 5:00 CST'
- end
-
- it 'edit an existing broadcast message' do
- click_link 'Edit'
- fill_in 'broadcast_message_message', with: 'Application update RIGHT NOW'
- check 'Reporter'
- click_button 'Update broadcast message'
-
- expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
- expect(page).to have_content 'Application update RIGHT NOW'
-
- page.within('.table-responsive') do
- expect(page).to have_content 'Reporter, Developer'
- end
- end
-
- it 'remove an existing broadcast message' do
- click_link 'Remove'
-
- expect(page).to have_current_path admin_broadcast_messages_path, ignore_query: true
- expect(page).not_to have_content 'Migration to new server'
- end
-
- it 'updates a preview of a customized broadcast banner message', :js do
- fill_in 'broadcast_message_message', with: "Live **Markdown** previews. :tada:"
-
- page.within('.js-broadcast-banner-message-preview') do
- expect(page).to have_selector('strong', text: 'Markdown')
- expect(page).to have_emoji('tada')
- end
- end
-
- it 'updates a preview of a customized broadcast notification message', :js do
- fill_in 'broadcast_message_message', with: "Live **Markdown** previews. :tada:"
- select 'Notification', from: 'broadcast_message_broadcast_type'
-
- page.within('#broadcast-message-preview') do
- expect(page).to have_selector('strong', text: 'Markdown')
- expect(page).to have_emoji('tada')
- end
- end
-end
diff --git a/spec/features/admin/admin_groups_spec.rb b/spec/features/admin/admin_groups_spec.rb
index c36a742af6b..119e09f9b09 100644
--- a/spec/features/admin/admin_groups_spec.rb
+++ b/spec/features/admin/admin_groups_spec.rb
@@ -204,6 +204,17 @@ RSpec.describe 'Admin Groups', feature_category: :subgroups do
expect(page).to have_content(new_admin_note_text)
end
+
+ it 'hides removed note' do
+ group = create(:group, :private)
+ group.create_admin_note(note: 'A note by an administrator')
+
+ visit admin_group_edit_path(group)
+ fill_in 'group_admin_note_attributes_note', with: ''
+ click_button 'Save changes'
+
+ expect(page).not_to have_content(s_('Admin|Admin notes'))
+ end
end
describe 'add user into a group', :js do
@@ -258,9 +269,12 @@ RSpec.describe 'Admin Groups', feature_category: :subgroups do
expect(page).to have_content('Developer')
end
- find_member_row(current_user).click_button(title: 'Leave')
+ show_actions_for_username(current_user)
+ click_button _('Leave group')
- accept_gl_confirm(button_text: 'Leave')
+ within_modal do
+ click_button _('Leave')
+ end
wait_for_all_requests
diff --git a/spec/features/admin/admin_projects_spec.rb b/spec/features/admin/admin_projects_spec.rb
index 0cb813c40f4..3c7eba2cc97 100644
--- a/spec/features/admin/admin_projects_spec.rb
+++ b/spec/features/admin/admin_projects_spec.rb
@@ -151,12 +151,11 @@ RSpec.describe "Admin::Projects", feature_category: :projects do
expect(find_member_row(current_user)).to have_content('Developer')
- page.within find_member_row(current_user) do
- click_button 'Leave'
- end
+ show_actions_for_username(current_user)
+ click_button _('Leave group')
within_modal do
- click_button('Leave')
+ click_button _('Leave')
end
expect(page).to have_current_path(dashboard_projects_path, ignore_query: true, url: false)
diff --git a/spec/features/admin/admin_sees_background_migrations_spec.rb b/spec/features/admin/admin_sees_background_migrations_spec.rb
index e1746dad196..4b8636da6b4 100644
--- a/spec/features/admin/admin_sees_background_migrations_spec.rb
+++ b/spec/features/admin/admin_sees_background_migrations_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe "Admin > Admin sees background migrations", feature_category: :database do
+ include ListboxHelpers
+
let_it_be(:admin) { create(:admin) }
let(:job_class) { Gitlab::BackgroundMigration::CopyColumnUsingBackgroundMigrationJob }
@@ -204,7 +206,7 @@ RSpec.describe "Admin > Admin sees background migrations", feature_category: :da
it 'does not render the database listbox' do
visit admin_background_migrations_path
- expect(page).not_to have_selector('[data-testid="database-listbox"]')
+ expect(page).not_to have_button('main')
end
end
@@ -215,41 +217,26 @@ RSpec.describe "Admin > Admin sees background migrations", feature_category: :da
allow(Gitlab::Database).to receive(:db_config_names).and_return(%w[main ci])
end
- it 'does render the database listbox' do
- visit admin_background_migrations_path
-
- expect(page).to have_selector('[data-testid="database-listbox"]')
- end
-
- it 'defaults to main when no parameter is passed' do
+ it 'renders the database listbox' do
visit admin_background_migrations_path
- listbox = page.find('[data-testid="database-listbox"]')
-
- expect(listbox).to have_text('main')
+ expect(page).to have_button('main')
end
it 'shows correct database when a parameter is passed' do
visit admin_background_migrations_path(database: 'ci')
- listbox = page.find('[data-testid="database-listbox"]')
-
- expect(listbox).to have_text('ci')
+ expect(page).to have_button('ci')
end
it 'updates the path to correct database when clicking on listbox option' do
visit admin_background_migrations_path
- listbox = page.find('[data-testid="database-listbox"]')
- expect(listbox).to have_text('main')
-
- listbox.find('button').click
- listbox.find('li', text: 'ci').click
- wait_for_requests
+ click_button 'main'
+ select_listbox_item('ci')
expect(page).to have_current_path(admin_background_migrations_path(database: 'ci'))
- listbox = page.find('[data-testid="database-listbox"]')
- expect(listbox).to have_text('ci')
+ expect(page).to have_button('ci')
end
end
end
diff --git a/spec/features/admin/admin_users_spec.rb b/spec/features/admin/admin_users_spec.rb
index 1f40f1f1bce..ca08bc9e577 100644
--- a/spec/features/admin/admin_users_spec.rb
+++ b/spec/features/admin/admin_users_spec.rb
@@ -69,13 +69,9 @@ RSpec.describe "Admin::Users", feature_category: :user_management do
expect(page).not_to have_content(message)
end
- context 'with no license and service ping disabled' do
+ context 'with no license and service ping disabled', :without_license do
before do
stub_application_setting(usage_ping_enabled: false)
-
- if Gitlab.ee?
- allow(License).to receive(:current).and_return(nil)
- end
end
it 'renders registration features CTA' do
diff --git a/spec/features/admin/dashboard_spec.rb b/spec/features/admin/dashboard_spec.rb
index baca60134b9..06f9c531e74 100644
--- a/spec/features/admin/dashboard_spec.rb
+++ b/spec/features/admin/dashboard_spec.rb
@@ -49,8 +49,7 @@ RSpec.describe 'admin visits dashboard' do
end
expect(page).to have_content('Blocked users 7')
- expect(page).to have_content('Total users 78')
- expect(page).to have_content('Active users 71')
+ expect(page).to have_content('Total users (active users + blocked users) 78')
end
end
diff --git a/spec/features/admin/users/users_spec.rb b/spec/features/admin/users/users_spec.rb
index 4b49e8f4bc6..975af84969d 100644
--- a/spec/features/admin/users/users_spec.rb
+++ b/spec/features/admin/users/users_spec.rb
@@ -28,7 +28,7 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
expect(page).to have_content(current_user.email)
expect(page).to have_content(current_user.name)
- expect(page).to have_content(current_user.created_at.strftime('%e %b, %Y'))
+ expect(page).to have_content(current_user.created_at.strftime('%b %d, %Y'))
expect(page).to have_content(user.email)
expect(page).to have_content(user.name)
expect(page).to have_content('Projects')
@@ -367,6 +367,8 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
.to eq(Gitlab.config.gitlab.default_projects_limit)
expect(user.can_create_group)
.to eq(Gitlab::CurrentSettings.can_create_group)
+ expect(user.private_profile)
+ .to eq(Gitlab::CurrentSettings.user_defaults_to_private_profile)
end
it 'creates user with valid data' do
@@ -564,6 +566,7 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
fill_in 'user_password', with: 'AValidPassword1'
fill_in 'user_password_confirmation', with: 'AValidPassword1'
choose 'user_access_level_admin'
+ check 'Private profile'
click_button 'Save changes'
end
@@ -577,6 +580,7 @@ RSpec.describe 'Admin::Users', feature_category: :user_management do
expect(user.name).to eq('Big Bang')
expect(user.admin?).to be_truthy
expect(user.password_expires_at).to be <= Time.zone.now
+ expect(user.private_profile).to eq(true)
end
end
diff --git a/spec/features/callouts/registration_enabled_spec.rb b/spec/features/callouts/registration_enabled_spec.rb
index 1ea52dbf12a..ac7b68876da 100644
--- a/spec/features/callouts/registration_enabled_spec.rb
+++ b/spec/features/callouts/registration_enabled_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'Registration enabled callout', feature_category: :authentication
stub_application_setting(signup_enabled: true)
end
- context 'when an admin is logged in' do
+ context 'when an admin is logged in', :do_not_mock_admin_mode_setting do
before do
sign_in(admin)
end
diff --git a/spec/features/commit_spec.rb b/spec/features/commit_spec.rb
index 649b67e7fd0..a9672569a4a 100644
--- a/spec/features/commit_spec.rb
+++ b/spec/features/commit_spec.rb
@@ -22,6 +22,8 @@ RSpec.describe 'Commit', feature_category: :source_code_management do
end
describe "commit details" do
+ subject { page }
+
before do
visit project_commit_path(project, commit)
end
@@ -37,6 +39,8 @@ RSpec.describe 'Commit', feature_category: :source_code_management do
it 'renders diff stats', :js do
expect(page).to have_selector(".diff-stats")
end
+
+ it_behaves_like 'code highlight'
end
describe "pagination" do
diff --git a/spec/features/dashboard/activity_spec.rb b/spec/features/dashboard/activity_spec.rb
index b1734cb353b..edb3dacc2cc 100644
--- a/spec/features/dashboard/activity_spec.rb
+++ b/spec/features/dashboard/activity_spec.rb
@@ -9,6 +9,8 @@ RSpec.describe 'Dashboard > Activity', feature_category: :users do
sign_in(user)
end
+ it_behaves_like 'a dashboard page with sidebar', :activity_dashboard_path, :activity
+
context 'tabs' do
it 'shows Your Projects' do
visit activity_dashboard_path
diff --git a/spec/features/dashboard/groups_list_spec.rb b/spec/features/dashboard/groups_list_spec.rb
index b28e2ccf787..a45e0a58ed6 100644
--- a/spec/features/dashboard/groups_list_spec.rb
+++ b/spec/features/dashboard/groups_list_spec.rb
@@ -19,6 +19,8 @@ RSpec.describe 'Dashboard Groups page', :js, feature_category: :subgroups do
page.find("[data-testid='group-#{group.id}-dropdown-button'").click
end
+ it_behaves_like 'a dashboard page with sidebar', :dashboard_groups_path, :groups
+
it 'shows groups user is member of' do
group.add_owner(user)
nested_group.add_owner(user)
diff --git a/spec/features/dashboard/issuables_counter_spec.rb b/spec/features/dashboard/issuables_counter_spec.rb
index 5c7285f0491..5dc59cfa841 100644
--- a/spec/features/dashboard/issuables_counter_spec.rb
+++ b/spec/features/dashboard/issuables_counter_spec.rb
@@ -12,7 +12,6 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching,
issue.assignees = [user]
merge_request.update!(assignees: [user])
sign_in(user)
- stub_feature_flags(limit_assigned_issues_count: false)
end
it 'reflects dashboard issues count' do
@@ -20,9 +19,9 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching,
expect_counters('issues', '1', n_("%d assigned issue", "%d assigned issues", 1) % 1)
- issue.assignees = []
+ issue.update!(assignees: [])
- user.invalidate_cache_counts
+ Users::AssignedIssuesCountService.new(current_user: user).delete_cache
travel_to(3.minutes.from_now) do
visit issues_path
@@ -31,28 +30,6 @@ RSpec.describe 'Navigation bar counter', :use_clean_rails_memory_store_caching,
end
end
- context 'when :limit_assigned_issues_count FF is used' do
- before do
- stub_feature_flags(limit_assigned_issues_count: true)
- end
-
- it 'reflects dashboard issues count' do
- visit issues_path
-
- expect_counters('issues', '1', n_("%d assigned issue", "%d assigned issues", 1) % 1)
-
- issue.update!(assignees: [])
-
- Users::AssignedIssuesCountService.new(current_user: user).delete_cache
-
- travel_to(3.minutes.from_now) do
- visit issues_path
-
- expect_counters('issues', '0', n_("%d assigned issue", "%d assigned issues", 0) % 0)
- end
- end
- end
-
it 'reflects dashboard merge requests count', :js do
visit merge_requests_path
diff --git a/spec/features/dashboard/issues_spec.rb b/spec/features/dashboard/issues_spec.rb
index d74965f58fa..ae375bd3e13 100644
--- a/spec/features/dashboard/issues_spec.rb
+++ b/spec/features/dashboard/issues_spec.rb
@@ -21,6 +21,8 @@ RSpec.describe 'Dashboard Issues', feature_category: :team_planning do
visit issues_dashboard_path(assignee_username: current_user.username)
end
+ it_behaves_like 'a dashboard page with sidebar', :issues_dashboard_path, :issues
+
describe 'issues' do
it 'shows issues assigned to current user' do
expect(page).to have_content(assigned_issue.title)
diff --git a/spec/features/dashboard/merge_requests_spec.rb b/spec/features/dashboard/merge_requests_spec.rb
index 56d7c45de5d..a146a6987bc 100644
--- a/spec/features/dashboard/merge_requests_spec.rb
+++ b/spec/features/dashboard/merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Dashboard Merge Requests', feature_category: :code_review do
+RSpec.describe 'Dashboard Merge Requests', feature_category: :code_review_workflow do
include Spec::Support::Helpers::Features::SortingHelpers
include FilteredSearchHelpers
include ProjectForksHelper
@@ -19,6 +19,8 @@ RSpec.describe 'Dashboard Merge Requests', feature_category: :code_review do
sign_in(current_user)
end
+ it_behaves_like 'a dashboard page with sidebar', :merge_requests_dashboard_path, :merge_requests
+
it 'disables target branch filter' do
visit merge_requests_dashboard_path
diff --git a/spec/features/dashboard/milestones_spec.rb b/spec/features/dashboard/milestones_spec.rb
index b4d0d9c5812..a9f23f90bb1 100644
--- a/spec/features/dashboard/milestones_spec.rb
+++ b/spec/features/dashboard/milestones_spec.rb
@@ -26,6 +26,8 @@ RSpec.describe 'Dashboard > Milestones', feature_category: :team_planning do
visit dashboard_milestones_path
end
+ it_behaves_like 'a dashboard page with sidebar', :dashboard_milestones_path, :milestones
+
it 'sees milestones' do
expect(page).to have_current_path dashboard_milestones_path, ignore_query: true
expect(page).to have_content(milestone.title)
diff --git a/spec/features/dashboard/navbar_spec.rb b/spec/features/dashboard/navbar_spec.rb
new file mode 100644
index 00000000000..ff0ff899fc2
--- /dev/null
+++ b/spec/features/dashboard/navbar_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe '"Your work" navbar', feature_category: :navigation do
+ include_context 'dashboard navbar structure'
+
+ let_it_be(:user) { create(:user) }
+
+ it_behaves_like 'verified navigation bar' do
+ before do
+ sign_in(user)
+ visit root_path
+ end
+ end
+end
diff --git a/spec/features/dashboard/projects_spec.rb b/spec/features/dashboard/projects_spec.rb
index 2b89f16bbff..779fbb48ddb 100644
--- a/spec/features/dashboard/projects_spec.rb
+++ b/spec/features/dashboard/projects_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe 'Dashboard Projects', feature_category: :projects do
end
end
+ it_behaves_like "a dashboard page with sidebar", :dashboard_projects_path, :projects
+
context 'when user has access to the project' do
it 'shows role badge' do
visit dashboard_projects_path
diff --git a/spec/features/dashboard/snippets_spec.rb b/spec/features/dashboard/snippets_spec.rb
index ab2cfc0573e..ba40290d866 100644
--- a/spec/features/dashboard/snippets_spec.rb
+++ b/spec/features/dashboard/snippets_spec.rb
@@ -5,6 +5,8 @@ require 'spec_helper'
RSpec.describe 'Dashboard snippets', feature_category: :source_code_management do
let_it_be(:user) { create(:user) }
+ it_behaves_like 'a dashboard page with sidebar', :dashboard_snippets_path, :snippets
+
context 'when the project has snippets' do
let(:project) { create(:project, :public, creator: user) }
let!(:snippets) { create_list(:project_snippet, 2, :public, author: project.first_owner, project: project) }
diff --git a/spec/features/dashboard/todos/todos_spec.rb b/spec/features/dashboard/todos/todos_spec.rb
index 606bc82a7bb..59bb1a452c9 100644
--- a/spec/features/dashboard/todos/todos_spec.rb
+++ b/spec/features/dashboard/todos/todos_spec.rb
@@ -15,6 +15,8 @@ RSpec.describe 'Dashboard Todos', feature_category: :team_planning do
project.add_developer(user)
end
+ it_behaves_like 'a dashboard page with sidebar', :dashboard_todos_path, :todos
+
context 'User does not have todos' do
before do
sign_in(user)
@@ -152,6 +154,22 @@ RSpec.describe 'Dashboard Todos', feature_category: :team_planning do
it_behaves_like 'deleting the todo'
it_behaves_like 'deleting and restoring the todo'
end
+
+ context 'when todo has a note' do
+ let(:note) { create(:note, project: project, note: "Check out stuff", noteable: create(:issue, project: project)) }
+ let!(:todo) { create(:todo, :mentioned, user: user, project: project, author: author, note: note, target: note.noteable) }
+
+ before do
+ sign_in(user)
+ visit dashboard_todos_path
+ end
+
+ it 'shows note preview' do
+ expect(page).to have_no_content('mentioned you:')
+ expect(page).to have_no_content('"Check out stuff"')
+ expect(page).to have_content('Check out stuff')
+ end
+ end
end
context 'User created todos for themself' do
@@ -446,27 +464,30 @@ RSpec.describe 'Dashboard Todos', feature_category: :team_planning do
end
end
- context 'User has a todo for an access requested raised for group membership' do
- let_it_be(:group) { create(:group, :public) }
+ context 'User requested access' do
+ shared_examples 'has todo present with access request content' do
+ specify do
+ create(:todo, :member_access_requested,
+ user: user,
+ target: target,
+ author: author
+ )
+ target.add_owner(user)
- let_it_be(:todo) do
- create(:todo, :member_access_requested,
- user: user,
- target: group,
- author: author,
- group: group)
- end
-
- before do
- group.add_owner(user)
- sign_in(user)
+ sign_in(user)
+ visit dashboard_todos_path
- visit dashboard_todos_path
+ expect(page).to have_selector('.todos-list .todo', count: 1)
+ expect(page).to have_content "#{author.name} has requested access to #{target.class.name.downcase} #{target.name}"
+ end
end
- it 'has todo present with access request content' do
- expect(page).to have_selector('.todos-list .todo', count: 1)
- expect(page).to have_content "#{author.name} has requested access to group #{group.name}"
+ context 'when user requests access to project or group' do
+ %i[project group].each do |target_type|
+ it_behaves_like 'has todo present with access request content' do
+ let_it_be(:target) { create(target_type, :public) }
+ end
+ end
end
end
end
diff --git a/spec/features/dashboard/user_filters_projects_spec.rb b/spec/features/dashboard/user_filters_projects_spec.rb
index 1168a6827fd..8ec9b98c3b3 100644
--- a/spec/features/dashboard/user_filters_projects_spec.rb
+++ b/spec/features/dashboard/user_filters_projects_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe 'Dashboard > User filters projects', feature_category: :projects
it 'returns message when starred projects fitler returns no results' do
fill_in 'project-filter-form-field', with: 'Beta\n'
- expect(page).to have_content('This user doesn\'t have any personal projects')
+ expect(page).to have_content('There are no projects available to be displayed here.')
expect(page).not_to have_content('You don\'t have starred projects yet')
end
end
diff --git a/spec/features/error_tracking/user_sees_error_index_spec.rb b/spec/features/error_tracking/user_sees_error_index_spec.rb
index b7dfb6afc18..f83c8ffe439 100644
--- a/spec/features/error_tracking/user_sees_error_index_spec.rb
+++ b/spec/features/error_tracking/user_sees_error_index_spec.rb
@@ -50,7 +50,7 @@ feature_category: :error_tracking do
end
it 'renders call to action' do
- expect(page).to have_content('Enable error tracking')
+ expect(page).to have_content('Monitor your errors directly in GitLab.')
end
end
diff --git a/spec/features/global_search_spec.rb b/spec/features/global_search_spec.rb
index 7c55551e9c3..15393ec4cd6 100644
--- a/spec/features/global_search_spec.rb
+++ b/spec/features/global_search_spec.rb
@@ -72,10 +72,6 @@ RSpec.describe 'Global search', :js, feature_category: :global_search do
# TODO: Remove this along with feature flag #339348
stub_feature_flags(new_header_search: true)
visit dashboard_projects_path
-
- # initialize javascript loaded input search input field
- find('#search').click
- find('body').click
end
it 'renders updated search bar' do
diff --git a/spec/features/groups/import_export/connect_instance_spec.rb b/spec/features/groups/import_export/connect_instance_spec.rb
index 11cc4bb9b37..8aea18a268b 100644
--- a/spec/features/groups/import_export/connect_instance_spec.rb
+++ b/spec/features/groups/import_export/connect_instance_spec.rb
@@ -25,7 +25,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js, feature_categ
it 'successfully connects to remote instance' do
pat = 'demo-pat'
- expect(page).to have_content 'Import groups from another instance of GitLab'
+ expect(page).to have_content 'Import groups by direct transfer'
expect(page).to have_content 'Not all related objects are migrated'
fill_in :bulk_import_gitlab_url, with: source_url
@@ -64,7 +64,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js, feature_categ
click_on 'Connect instance'
- expect(page).to have_content 'Please fill in GitLab source URL'
+ expect(page).to have_content 'Enter the URL for the source instance'
end
end
@@ -89,7 +89,7 @@ RSpec.describe 'Import/Export - Connect to another instance', :js, feature_categ
end
it 'renders fields and button disabled' do
- expect(page).to have_field('GitLab source URL', disabled: true)
+ expect(page).to have_field('GitLab source instance URL', disabled: true)
expect(page).to have_field('Personal access token', disabled: true)
expect(page).to have_button('Connect instance', disabled: true)
end
diff --git a/spec/features/groups/import_export/migration_history_spec.rb b/spec/features/groups/import_export/migration_history_spec.rb
index f851c5e2ec5..9fc9c7898d1 100644
--- a/spec/features/groups/import_export/migration_history_spec.rb
+++ b/spec/features/groups/import_export/migration_history_spec.rb
@@ -12,6 +12,8 @@ RSpec.describe 'Import/Export - GitLab migration history', :js, feature_category
let_it_be(:failed_entity_2) { create(:bulk_import_entity, :failed, bulk_import: user_import_2) }
before do
+ stub_application_setting(bulk_import_enabled: true)
+
gitlab_sign_in(user)
visit new_group_path
@@ -24,7 +26,7 @@ RSpec.describe 'Import/Export - GitLab migration history', :js, feature_category
wait_for_requests
- expect(page).to have_content 'Group import history'
+ expect(page).to have_content 'GitLab Migration history'
expect(page.find('tbody')).to have_css('tr', count: 2)
end
end
diff --git a/spec/features/groups/labels/sort_labels_spec.rb b/spec/features/groups/labels/sort_labels_spec.rb
index c2410246fe1..e177461701e 100644
--- a/spec/features/groups/labels/sort_labels_spec.rb
+++ b/spec/features/groups/labels/sort_labels_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
+ include ListboxHelpers
+
let(:user) { create(:user) }
let(:group) { create(:group) }
let!(:label1) { create(:group_label, title: 'Foo', description: 'Lorem ipsum', group: group) }
@@ -28,16 +30,16 @@ RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
it 'sorts by date' do
click_button 'Name'
- sort_options = find('ul[role="listbox"]').all('li').collect(&:text)
-
- expect(sort_options[0]).to eq('Name')
- expect(sort_options[1]).to eq('Name, descending')
- expect(sort_options[2]).to eq('Last created')
- expect(sort_options[3]).to eq('Oldest created')
- expect(sort_options[4]).to eq('Updated date')
- expect(sort_options[5]).to eq('Oldest updated')
+ expect_listbox_items([
+ 'Name',
+ 'Name, descending',
+ 'Last created',
+ 'Oldest created',
+ 'Updated date',
+ 'Oldest updated'
+ ])
- find('li', text: 'Name, descending').click
+ select_listbox_item('Name, descending')
# assert default sorting
within '.other-labels' do
diff --git a/spec/features/groups/members/manage_members_spec.rb b/spec/features/groups/members/manage_members_spec.rb
index 4211f2b6265..5cd5908b359 100644
--- a/spec/features/groups/members/manage_members_spec.rb
+++ b/spec/features/groups/members/manage_members_spec.rb
@@ -50,12 +50,13 @@ RSpec.describe 'Groups > Members > Manage members', feature_category: :subgroups
# Open modal
page.within(second_row) do
- click_button 'Remove member'
+ show_actions
+ click_button _('Remove member')
end
within_modal do
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
- click_button('Remove member')
+ click_button _('Remove member')
end
wait_for_requests
diff --git a/spec/features/groups/members/sort_members_spec.rb b/spec/features/groups/members/sort_members_spec.rb
index 4e9adda5f2b..5634122ec16 100644
--- a/spec/features/groups/members/sort_members_spec.rb
+++ b/spec/features/groups/members/sort_members_spec.rb
@@ -56,7 +56,7 @@ RSpec.describe 'Groups > Members > Sort members', :js, feature_category: :subgro
expect(first_row.text).to include(owner.name)
expect(second_row.text).to include(developer.name)
- expect_sort_by('Created on', :asc)
+ expect_sort_by('User created', :asc)
end
it 'sorts by user created on descending' do
@@ -65,7 +65,7 @@ RSpec.describe 'Groups > Members > Sort members', :js, feature_category: :subgro
expect(first_row.text).to include(developer.name)
expect(second_row.text).to include(owner.name)
- expect_sort_by('Created on', :desc)
+ expect_sort_by('User created', :desc)
end
it 'sorts by last activity ascending' do
diff --git a/spec/features/groups/merge_requests_spec.rb b/spec/features/groups/merge_requests_spec.rb
index 87f1f422e90..8a3401d0572 100644
--- a/spec/features/groups/merge_requests_spec.rb
+++ b/spec/features/groups/merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group merge requests page', feature_category: :code_review do
+RSpec.describe 'Group merge requests page', feature_category: :code_review_workflow do
include FilteredSearchHelpers
let(:path) { merge_requests_group_path(group) }
diff --git a/spec/features/groups/milestones_sorting_spec.rb b/spec/features/groups/milestones_sorting_spec.rb
index 5543938957a..0f0ecb8f5f3 100644
--- a/spec/features/groups/milestones_sorting_spec.rb
+++ b/spec/features/groups/milestones_sorting_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Milestones sorting', :js, feature_category: :team_planning do
+ include ListboxHelpers
+
let(:group) { create(:group) }
let!(:project) { create(:project_empty_repo, group: group) }
let!(:other_project) { create(:project_empty_repo, group: group) }
@@ -27,13 +29,13 @@ RSpec.describe 'Milestones sorting', :js, feature_category: :team_planning do
expect(page.all('ul.content-list > li strong > a').map(&:text)).to eq(['v2.0', 'v2.0', 'v3.0', 'v1.0', 'v1.0'])
end
- within '[data-testid=milestone_sort_by_dropdown]' do
- click_button 'Due soon'
- expect(find('ul[role="listbox"]').all('li').map(&:text)).to eq(['Due soon', 'Due later', 'Start soon', 'Start later', 'Name, ascending', 'Name, descending'])
+ click_button 'Due soon'
- find('li', text: 'Due later').click
- expect(page).to have_button('Due later')
- end
+ expect_listbox_items(['Due soon', 'Due later', 'Start soon', 'Start later', 'Name, ascending', 'Name, descending'])
+
+ select_listbox_item('Due later')
+
+ expect(page).to have_button('Due later')
# assert descending sorting
within '.milestones' do
diff --git a/spec/features/groups/navbar_spec.rb b/spec/features/groups/navbar_spec.rb
index 180ccab78bc..a52e2d95fed 100644
--- a/spec/features/groups/navbar_spec.rb
+++ b/spec/features/groups/navbar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group navbar', feature_category: :navigation do
+RSpec.describe 'Group navbar', :with_license, feature_category: :navigation do
include NavbarStructureHelper
include WikiHelpers
diff --git a/spec/features/groups/new_group_page_spec.rb b/spec/features/groups/new_group_page_spec.rb
index 662ef734299..a07c27331d9 100644
--- a/spec/features/groups/new_group_page_spec.rb
+++ b/spec/features/groups/new_group_page_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe 'New group page', :js, feature_category: :subgroups do
sign_in(user)
end
+ it_behaves_like 'a dashboard page with sidebar', :new_group_path, :groups
+
describe 'new top level group alert' do
context 'when a user visits the new group page' do
it 'shows the new top level group alert' do
diff --git a/spec/features/incidents/user_views_incident_spec.rb b/spec/features/incidents/user_views_incident_spec.rb
index 8216aca787a..49041d187dd 100644
--- a/spec/features/incidents/user_views_incident_spec.rb
+++ b/spec/features/incidents/user_views_incident_spec.rb
@@ -57,7 +57,7 @@ RSpec.describe "User views incident", feature_category: :incident_management do
it 'shows incident actions', :js do
click_button 'Incident actions'
- expect(page).to have_link 'Report abuse to administrator'
+ expect(page).to have_button 'Report abuse to administrator'
end
end
end
diff --git a/spec/features/issues/group_label_sidebar_spec.rb b/spec/features/issues/group_label_sidebar_spec.rb
index b26030fe8d0..41450ba3373 100644
--- a/spec/features/issues/group_label_sidebar_spec.rb
+++ b/spec/features/issues/group_label_sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Group label on issue', feature_category: :team_planning do
+RSpec.describe 'Group label on issue', :with_license, feature_category: :team_planning do
it 'renders link to the project issues page', :js do
group = create(:group)
project = create(:project, :public, namespace: group)
diff --git a/spec/features/issues/issue_header_spec.rb b/spec/features/issues/issue_header_spec.rb
index 090067fc4ac..6c5daa29631 100644
--- a/spec/features/issues/issue_header_spec.rb
+++ b/spec/features/issues/issue_header_spec.rb
@@ -27,7 +27,7 @@ RSpec.describe 'issue header', :js, feature_category: :team_planning do
it 'shows the "New related issue", "Report abuse", and "Delete issue" items', :aggregate_failures do
expect(page).to have_link 'New related issue'
- expect(page).to have_link 'Report abuse to administrator'
+ expect(page).to have_button 'Report abuse to administrator'
expect(page).to have_button 'Delete issue'
expect(page).not_to have_link 'Submit as spam'
end
@@ -68,10 +68,10 @@ RSpec.describe 'issue header', :js, feature_category: :team_planning do
visit project_issue_path(project, authored_issue)
end
- it 'does not show "Report abuse" link in dropdown' do
+ it 'does not show "Report abuse" button in dropdown' do
click_button 'Issue actions'
- expect(page).not_to have_link 'Report abuse to administrator'
+ expect(page).not_to have_button 'Report abuse to administrator'
end
end
end
@@ -116,7 +116,7 @@ RSpec.describe 'issue header', :js, feature_category: :team_planning do
it 'only shows the "New related issue" and "Report abuse" items', :aggregate_failures do
expect(page).to have_link 'New related issue'
- expect(page).to have_link 'Report abuse to administrator'
+ expect(page).to have_button 'Report abuse to administrator'
expect(page).not_to have_link 'Submit as spam'
expect(page).not_to have_button 'Delete issue'
end
@@ -157,10 +157,10 @@ RSpec.describe 'issue header', :js, feature_category: :team_planning do
visit project_issue_path(project, authored_issue)
end
- it 'does not show "Report abuse" link in dropdown' do
+ it 'does not show "Report abuse" button in dropdown' do
click_button 'Issue actions'
- expect(page).not_to have_link 'Report abuse to administrator'
+ expect(page).not_to have_button 'Report abuse to administrator'
end
end
end
diff --git a/spec/features/issues/user_creates_issue_spec.rb b/spec/features/issues/user_creates_issue_spec.rb
index a4b8cb91999..df039493cec 100644
--- a/spec/features/issues/user_creates_issue_spec.rb
+++ b/spec/features/issues/user_creates_issue_spec.rb
@@ -161,6 +161,11 @@ RSpec.describe "User creates issue", feature_category: :team_planning do
let(:project) { create(:project, :public, :repository) }
before do
+ # With multistore feature flags enabled (using an actual Redis store instead of NullStore),
+ # it somehow writes an invalid content to Redis and the specs would fail.
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
+ stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
+
project.repository.create_file(
user,
'.gitlab/issue_templates/bug.md',
diff --git a/spec/features/issues/user_edits_issue_spec.rb b/spec/features/issues/user_edits_issue_spec.rb
index 223832a6ede..19b2633969d 100644
--- a/spec/features/issues/user_edits_issue_spec.rb
+++ b/spec/features/issues/user_edits_issue_spec.rb
@@ -107,14 +107,14 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin
end
it 'places focus on the web editor' do
- toggle_editing_mode_selector = '[data-testid="toggle-editing-mode-button"] label'
content_editor_focused_selector = '[data-testid="content-editor"].is-focused'
markdown_field_focused_selector = 'textarea:focus'
click_edit_issue_description
expect(page).to have_selector(markdown_field_focused_selector)
- find(toggle_editing_mode_selector, text: 'Rich text').click
+ click_on _('View rich text')
+ click_on _('Rich text')
expect(page).not_to have_selector(content_editor_focused_selector)
@@ -124,7 +124,8 @@ RSpec.describe "Issues > User edits issue", :js, feature_category: :team_plannin
expect(page).to have_selector(content_editor_focused_selector)
- find(toggle_editing_mode_selector, text: 'Source').click
+ click_on _('View markdown')
+ click_on _('Markdown')
expect(page).not_to have_selector(markdown_field_focused_selector)
end
diff --git a/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb b/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
index b9a25f47da9..91b18454af5 100644
--- a/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
+++ b/spec/features/issues/user_sees_sidebar_updates_in_realtime_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Issues > Real-time sidebar', :js, feature_category: :team_planning do
+RSpec.describe 'Issues > Real-time sidebar', :js, :with_license, feature_category: :team_planning do
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:user) { create(:user) }
diff --git a/spec/features/jira_connect/branches_spec.rb b/spec/features/jira_connect/branches_spec.rb
index 489d3743a2a..8cf07f2ade2 100644
--- a/spec/features/jira_connect/branches_spec.rb
+++ b/spec/features/jira_connect/branches_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Create GitLab branches from Jira', :js, feature_category: :integrations do
+ include ListboxHelpers
+
let_it_be(:alice) { create(:user, name: 'Alice') }
let_it_be(:bob) { create(:user, name: 'Bob') }
@@ -42,7 +44,7 @@ RSpec.describe 'Create GitLab branches from Jira', :js, feature_category: :integ
expect(page).not_to have_text('Alice / bar')
- click_on 'Alice / foo'
+ find('span', text: 'Alice / foo', match: :first).click
end
expect(page).to have_field('Branch name', with: 'ACME-123-my-issue-title')
@@ -57,11 +59,11 @@ RSpec.describe 'Create GitLab branches from Jira', :js, feature_category: :integ
# Switch to project2
- click_on 'Alice / foo'
+ find('span', text: 'Alice / foo', match: :first).click
within_dropdown do
fill_in 'Search', with: ''
- click_on 'Alice / bar'
+ find('span', text: 'Alice / bar', match: :first).click
end
click_on 'master'
@@ -70,9 +72,7 @@ RSpec.describe 'Create GitLab branches from Jira', :js, feature_category: :integ
fill_in 'Search', with: source_branch
wait_for_requests
- within '[role="listbox"]' do
- find('li', text: source_branch).click
- end
+ select_listbox_item(source_branch)
fill_in 'Branch name', with: new_branch
click_on 'Create branch'
diff --git a/spec/features/markdown/observability_spec.rb b/spec/features/markdown/observability_spec.rb
index 0c7d8cc006b..86caf3eb1b1 100644
--- a/spec/features/markdown/observability_spec.rb
+++ b/spec/features/markdown/observability_spec.rb
@@ -80,4 +80,78 @@ RSpec.describe 'Observability rendering', :js do
end
end
end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(observability_group_tab: false)
+ end
+
+ context 'when embedding in an issue' do
+ let(:issue) do
+ create(:issue, project: project, description: observable_url)
+ end
+
+ before do
+ visit project_issue_path(project, issue)
+ wait_for_requests
+ end
+
+ it 'does not render iframe in description' do
+ page.within('.description') do
+ expect(page.html).not_to include(expected)
+ expect(page.html).to include(observable_url)
+ end
+ end
+
+ it 'does not render iframe in comment' do
+ expect(page).not_to have_css('.note-text')
+
+ page.within('.js-main-target-form') do
+ fill_in('note[note]', with: observable_url)
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ page.within('.note-text') do
+ expect(page.html).not_to include(expected)
+ expect(page.html).to include(observable_url)
+ end
+ end
+ end
+
+ context 'when embedding in an MR' do
+ let(:merge_request) do
+ create(:merge_request, source_project: project, target_project: project, description: observable_url)
+ end
+
+ before do
+ visit merge_request_path(merge_request)
+ wait_for_requests
+ end
+
+ it 'does not render iframe in description' do
+ page.within('.description') do
+ expect(page.html).not_to include(expected)
+ expect(page.html).to include(observable_url)
+ end
+ end
+
+ it 'does not render iframe in comment' do
+ expect(page).not_to have_css('.note-text')
+
+ page.within('.js-main-target-form') do
+ fill_in('note[note]', with: observable_url)
+ click_button('Comment')
+ end
+
+ wait_for_requests
+
+ page.within('.note-text') do
+ expect(page.html).not_to include(expected)
+ expect(page.html).to include(observable_url)
+ end
+ end
+ end
+ end
end
diff --git a/spec/features/markdown/sandboxed_mermaid_spec.rb b/spec/features/markdown/sandboxed_mermaid_spec.rb
index 26b397a1fd5..0282d02d809 100644
--- a/spec/features/markdown/sandboxed_mermaid_spec.rb
+++ b/spec/features/markdown/sandboxed_mermaid_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Sandboxed Mermaid rendering', :js, feature_category: :team_plann
context 'in an issue' do
let(:issue) { create(:issue, project: project, description: description) }
- it 'includes mermaid frame correctly' do
+ it 'includes mermaid frame correctly', :with_license do
visit project_issue_path(project, issue)
wait_for_requests
diff --git a/spec/features/merge_request/admin_views_hidden_merge_request_spec.rb b/spec/features/merge_request/admin_views_hidden_merge_request_spec.rb
new file mode 100644
index 00000000000..0dbb42a633b
--- /dev/null
+++ b/spec/features/merge_request/admin_views_hidden_merge_request_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Admin views hidden merge request', feature_category: :insider_threat do
+ context 'when signed in as admin and viewing a hidden merge request', :js do
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:author) { create(:user, :banned) }
+ let_it_be(:project) { create(:project, :repository) }
+ let!(:merge_request) { create(:merge_request, source_project: project, author: author) }
+
+ before do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ visit(project_merge_request_path(project, merge_request))
+ end
+
+ it 'shows a hidden merge request icon' do
+ page.within('.detail-page-header-body') do
+ tooltip = format(_('This %{issuable} is hidden because its author has been banned'),
+ issuable: _('merge request'))
+ expect(page).to have_css("div[data-testid='hidden'][title='#{tooltip}']")
+ expect(page).to have_css('svg[data-testid="spam-icon"]')
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_request/batch_comments_spec.rb b/spec/features/merge_request/batch_comments_spec.rb
index e16c1ae094b..736c986d0fe 100644
--- a/spec/features/merge_request/batch_comments_spec.rb
+++ b/spec/features/merge_request/batch_comments_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > Batch comments', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > Batch comments', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
diff --git a/spec/features/merge_request/close_reopen_report_toggle_spec.rb b/spec/features/merge_request/close_reopen_report_toggle_spec.rb
index 63ed355b16e..9b8e50a31e3 100644
--- a/spec/features/merge_request/close_reopen_report_toggle_spec.rb
+++ b/spec/features/merge_request/close_reopen_report_toggle_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_review do
+RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_review_workflow do
include IssuablesHelper
let(:user) { create(:user) }
@@ -27,12 +27,12 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
find('[data-testid="merge-request-actions"]').click
expect(container).to have_link("Close merge request")
- expect(container).to have_link('Report abuse to administrator')
+ expect(container).to have_button('Report abuse to administrator')
end
it 'links to Report Abuse' do
find('[data-testid="merge-request-actions"]').click
- click_link 'Report abuse to administrator'
+ click_button 'Report abuse to administrator'
expect(page).to have_content('Report abuse to administrator')
end
@@ -47,7 +47,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
expect(container).to have_link('Edit')
expect(container).to have_link('Mark as draft')
expect(container).to have_link('Close merge request')
- expect(container).to have_link('Report abuse to administrator')
+ expect(container).to have_button('Report abuse to administrator')
expect(container).not_to have_link('Reopen merge request')
end
end
@@ -59,7 +59,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
find('[data-testid="merge-request-actions"]').click
expect(container).to have_link('Edit')
- expect(container).to have_link('Report abuse to administrator')
+ expect(container).to have_button('Report abuse to administrator')
expect(container).to have_link('Reopen merge request')
expect(container).not_to have_link('Close merge request')
end
@@ -73,7 +73,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
expect(container).to have_link('Edit')
expect(container).to have_link('Reopen merge request')
expect(container).not_to have_link('Close merge request')
- expect(container).not_to have_link('Report abuse to administrator')
+ expect(container).not_to have_button('Report abuse to administrator')
end
end
end
@@ -83,7 +83,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
it 'shows only the `Edit` button' do
expect(container).to have_link(exact_text: 'Edit')
- expect(container).not_to have_link('Report abuse to administrator')
+ expect(container).not_to have_button('Report abuse to administrator')
expect(container).not_to have_button('Close merge request')
expect(container).not_to have_button('Reopen merge request')
end
@@ -93,7 +93,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
it 'shows only the `Edit` button' do
expect(container).to have_link(exact_text: 'Edit')
- expect(container).not_to have_link('Report abuse to administrator')
+ expect(container).not_to have_button('Report abuse to administrator')
expect(container).not_to have_button('Close merge request')
expect(container).not_to have_button('Reopen merge request')
end
@@ -101,7 +101,7 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
end
end
- context 'when user doesnt have permission to update' do
+ context 'when user doesnt have permission to update', :js do
let(:cant_project) { create(:project, :repository) }
let(:cant_issuable) { create(:merge_request, source_project: cant_project) }
@@ -112,7 +112,9 @@ RSpec.describe 'Issuables Close/Reopen/Report toggle', feature_category: :code_r
end
it 'only shows a `Report abuse` button' do
- expect(container).to have_link('Report abuse to administrator')
+ find('[data-testid="merge-request-actions"]').click
+
+ expect(container).to have_button('Report abuse to administrator')
expect(container).not_to have_button('Close merge request')
expect(container).not_to have_button('Reopen merge request')
expect(container).not_to have_link(exact_text: 'Edit')
diff --git a/spec/features/merge_request/maintainer_edits_fork_spec.rb b/spec/features/merge_request/maintainer_edits_fork_spec.rb
index bd040a5b894..b8dc3af8a6a 100644
--- a/spec/features/merge_request/maintainer_edits_fork_spec.rb
+++ b/spec/features/merge_request/maintainer_edits_fork_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'a maintainer edits files on a source-branch of an MR from a fork', :js, :sidekiq_might_not_need_inline,
-feature_category: :code_review do
+feature_category: :code_review_workflow do
include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
include ProjectForksHelper
let(:user) { create(:user, username: 'the-maintainer') }
diff --git a/spec/features/merge_request/merge_request_discussion_lock_spec.rb b/spec/features/merge_request/merge_request_discussion_lock_spec.rb
index b48d4d80647..11ec2a86b43 100644
--- a/spec/features/merge_request/merge_request_discussion_lock_spec.rb
+++ b/spec/features/merge_request/merge_request_discussion_lock_spec.rb
@@ -4,7 +4,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Request Discussion Lock', :js, feature_category: :code_review do
+RSpec.describe 'Merge Request Discussion Lock', :js, feature_category: :code_review_workflow do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, author: user) }
diff --git a/spec/features/merge_request/user_accepts_merge_request_spec.rb b/spec/features/merge_request/user_accepts_merge_request_spec.rb
index dda22abada0..8ff0c294b24 100644
--- a/spec/features/merge_request/user_accepts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_accepts_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inline, feature_category: :code_review do
+RSpec.describe 'User accepts a merge request', :js, :sidekiq_might_not_need_inline, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb b/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
index cf6836b544b..0ff773ef02d 100644
--- a/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
+++ b/spec/features/merge_request/user_allows_commits_from_memebers_who_can_merge_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'create a merge request, allowing commits from members who can merge to the target branch', :js,
-feature_category: :code_review do
+feature_category: :code_review_workflow do
include ProjectForksHelper
let(:user) { create(:user) }
let(:target_project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_request/user_approves_spec.rb b/spec/features/merge_request/user_approves_spec.rb
index bfb6a3ec8de..5b5ad4468ec 100644
--- a/spec/features/merge_request/user_approves_spec.rb
+++ b/spec/features/merge_request/user_approves_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User approves', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User approves', :js, feature_category: :code_review_workflow do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/merge_request/user_assigns_themselves_reviewer_spec.rb b/spec/features/merge_request/user_assigns_themselves_reviewer_spec.rb
index 2b93f88e96b..711b2db8a32 100644
--- a/spec/features/merge_request/user_assigns_themselves_reviewer_spec.rb
+++ b/spec/features/merge_request/user_assigns_themselves_reviewer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User assigns themselves as a reviewer', feature_category: :code_review do
+RSpec.describe 'Merge request > User assigns themselves as a reviewer', feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, :simple, source_project: project, author: user, description: "test mr") }
diff --git a/spec/features/merge_request/user_assigns_themselves_spec.rb b/spec/features/merge_request/user_assigns_themselves_spec.rb
index 826904bd165..ed4ea91f704 100644
--- a/spec/features/merge_request/user_assigns_themselves_spec.rb
+++ b/spec/features/merge_request/user_assigns_themselves_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User assigns themselves', feature_category: :code_review do
+RSpec.describe 'Merge request > User assigns themselves', feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:issue1) { create(:issue, project: project) }
diff --git a/spec/features/merge_request/user_awards_emoji_spec.rb b/spec/features/merge_request/user_awards_emoji_spec.rb
index dceac8d6a69..f43672942ff 100644
--- a/spec/features/merge_request/user_awards_emoji_spec.rb
+++ b/spec/features/merge_request/user_awards_emoji_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User awards emoji', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User awards emoji', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project, author: create(:user)) }
diff --git a/spec/features/merge_request/user_clicks_merge_request_tabs_spec.rb b/spec/features/merge_request/user_clicks_merge_request_tabs_spec.rb
index 3e3ff91ad19..90cf07831ba 100644
--- a/spec/features/merge_request/user_clicks_merge_request_tabs_spec.rb
+++ b/spec/features/merge_request/user_clicks_merge_request_tabs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User clicks on merge request tabs', :js, feature_category: :code_review do
+RSpec.describe 'User clicks on merge request tabs', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
diff --git a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
index c5ef6b912fe..537702df12d 100644
--- a/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
+++ b/spec/features/merge_request/user_closes_reopens_merge_request_state_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User closes/reopens a merge request', :js, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/297500',
- feature_category: :code_review do
+ feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
diff --git a/spec/features/merge_request/user_comments_on_commit_spec.rb b/spec/features/merge_request/user_comments_on_commit_spec.rb
index 64fe144cd0d..0538f367022 100644
--- a/spec/features/merge_request/user_comments_on_commit_spec.rb
+++ b/spec/features/merge_request/user_comments_on_commit_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User comments on a commit', :js, feature_category: :code_review do
+RSpec.describe 'User comments on a commit', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
diff --git a/spec/features/merge_request/user_comments_on_diff_spec.rb b/spec/features/merge_request/user_comments_on_diff_spec.rb
index f1a942d5708..66b87148eb2 100644
--- a/spec/features/merge_request/user_comments_on_diff_spec.rb
+++ b/spec/features/merge_request/user_comments_on_diff_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User comments on a diff', :js, feature_category: :code_review do
+RSpec.describe 'User comments on a diff', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
diff --git a/spec/features/merge_request/user_comments_on_merge_request_spec.rb b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
index d5ad78746f4..9335615b4c7 100644
--- a/spec/features/merge_request/user_comments_on_merge_request_spec.rb
+++ b/spec/features/merge_request/user_comments_on_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User comments on a merge request', :js, feature_category: :code_review do
+RSpec.describe 'User comments on a merge request', :js, feature_category: :code_review_workflow do
include RepoHelpers
let(:project) { create(:project, :repository) }
diff --git a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
index eb7894f4ef7..1d7a3fae371 100644
--- a/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_creates_image_diff_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User creates image diff notes', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User creates image diff notes', :js, feature_category: :code_review_workflow do
include NoteInteractionHelpers
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_request/user_creates_merge_request_spec.rb b/spec/features/merge_request/user_creates_merge_request_spec.rb
index 50629f11959..1717069a259 100644
--- a/spec/features/merge_request/user_creates_merge_request_spec.rb
+++ b/spec/features/merge_request/user_creates_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User creates a merge request', :js, feature_category: :code_review do
+RSpec.describe 'User creates a merge request', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
shared_examples 'creates a merge request' do
diff --git a/spec/features/merge_request/user_creates_mr_spec.rb b/spec/features/merge_request/user_creates_mr_spec.rb
index 5effde234cd..523027582b3 100644
--- a/spec/features/merge_request/user_creates_mr_spec.rb
+++ b/spec/features/merge_request/user_creates_mr_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User creates MR', feature_category: :code_review do
+RSpec.describe 'Merge request > User creates MR', feature_category: :code_review_workflow do
include ProjectForksHelper
before do
diff --git a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
index 4f1119d6c33..3c30ef02a19 100644
--- a/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
+++ b/spec/features/merge_request/user_customizes_merge_commit_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request < User customizes merge commit message', :js, feature_category: :code_review do
+RSpec.describe 'Merge request < User customizes merge commit message', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:issue_1) { create(:issue, project: project) }
diff --git a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
index c04040dd6fd..60631027d9d 100644
--- a/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_assignees_sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User edits assignees sidebar', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:protected_branch) { create(:protected_branch, :maintainers_can_push, name: 'master', project: project) }
let(:merge_request) { create(:merge_request, :simple, source_project: project, target_branch: protected_branch.name) }
diff --git a/spec/features/merge_request/user_edits_merge_request_spec.rb b/spec/features/merge_request/user_edits_merge_request_spec.rb
index 6701c7d91ae..839081d00dc 100644
--- a/spec/features/merge_request/user_edits_merge_request_spec.rb
+++ b/spec/features/merge_request/user_edits_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User edits a merge request', :js, feature_category: :code_review do
+RSpec.describe 'User edits a merge request', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
diff --git a/spec/features/merge_request/user_edits_mr_spec.rb b/spec/features/merge_request/user_edits_mr_spec.rb
index 18e6827a872..6fcbfd309e2 100644
--- a/spec/features/merge_request/user_edits_mr_spec.rb
+++ b/spec/features/merge_request/user_edits_mr_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User edits MR', feature_category: :code_review do
+RSpec.describe 'Merge request > User edits MR', feature_category: :code_review_workflow do
include ProjectForksHelper
before do
diff --git a/spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb b/spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb
index 38c76314b9e..26a9b955e2d 100644
--- a/spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb
+++ b/spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User edits reviewers sidebar', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User edits reviewers sidebar', :js, feature_category: :code_review_workflow do
context 'with invite members considerations' do
let_it_be(:merge_request) { create(:merge_request) }
let_it_be(:project) { merge_request.project }
diff --git a/spec/features/merge_request/user_expands_diff_spec.rb b/spec/features/merge_request/user_expands_diff_spec.rb
index 8adbdcd310c..9fbb5773eae 100644
--- a/spec/features/merge_request/user_expands_diff_spec.rb
+++ b/spec/features/merge_request/user_expands_diff_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User expands diff', :js, feature_category: :code_review do
+RSpec.describe 'User expands diff', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_branch: 'expand-collapse-files', source_project: project, target_project: project) }
diff --git a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
index 1b9b3941714..a013666a496 100644
--- a/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
+++ b/spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Batch diffs', :js, feature_category: :code_review do
+RSpec.describe 'Batch diffs', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
diff --git a/spec/features/merge_request/user_locks_discussion_spec.rb b/spec/features/merge_request/user_locks_discussion_spec.rb
index 1bfd52d49e8..a603a5c1e0b 100644
--- a/spec/features/merge_request/user_locks_discussion_spec.rb
+++ b/spec/features/merge_request/user_locks_discussion_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User locks discussion', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User locks discussion', :js, feature_category: :code_review_workflow do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/merge_request/user_manages_subscription_spec.rb b/spec/features/merge_request/user_manages_subscription_spec.rb
index 16d869fc5a1..d4ccc4a93b5 100644
--- a/spec/features/merge_request/user_manages_subscription_spec.rb
+++ b/spec/features/merge_request/user_manages_subscription_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User manages subscription', :js, feature_category: :code_review do
+RSpec.describe 'User manages subscription', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:user) { create(:user) }
diff --git a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
index 201cdc94b56..8cbc2b975e4 100644
--- a/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
+++ b/spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User marks merge request as draft', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User marks merge request as draft', :js, feature_category: :code_review_workflow do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/merge_request/user_merges_immediately_spec.rb b/spec/features/merge_request/user_merges_immediately_spec.rb
index b0aeea997f0..79c166434aa 100644
--- a/spec/features/merge_request/user_merges_immediately_spec.rb
+++ b/spec/features/merge_request/user_merges_immediately_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge requests > User merges immediately', :js, feature_category: :code_review do
+RSpec.describe 'Merge requests > User merges immediately', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let!(:merge_request) do
diff --git a/spec/features/merge_request/user_merges_merge_request_spec.rb b/spec/features/merge_request/user_merges_merge_request_spec.rb
index 4196fdd5dac..6ffb33603d5 100644
--- a/spec/features/merge_request/user_merges_merge_request_spec.rb
+++ b/spec/features/merge_request/user_merges_merge_request_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User merges a merge request", :js, feature_category: :code_review do
+RSpec.describe "User merges a merge request", :js, feature_category: :code_review_workflow do
let(:user) { project.first_owner }
before do
diff --git a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
index 447418b5a4b..c73ba1bdbe5 100644
--- a/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_only_if_pipeline_succeeds_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User merges only if pipeline succeeds', :js, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request_with_diffs) }
let(:project) { merge_request.target_project }
diff --git a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
index 78a21527794..6d2c8f15a82 100644
--- a/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
+++ b/spec/features/merge_request/user_merges_when_pipeline_succeeds_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User merges when pipeline succeeds', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) do
diff --git a/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb b/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
index 116de50f2a2..7cb1c95f6dc 100644
--- a/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
+++ b/spec/features/merge_request/user_opens_checkout_branch_modal_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User opens checkout branch modal', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User opens checkout branch modal', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_request/user_opens_context_commits_modal_spec.rb b/spec/features/merge_request/user_opens_context_commits_modal_spec.rb
index f32a51cfcd4..99764e2c052 100644
--- a/spec/features/merge_request/user_opens_context_commits_modal_spec.rb
+++ b/spec/features/merge_request/user_opens_context_commits_modal_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > Context commits', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > Context commits', :js, feature_category: :code_review_workflow do
let(:user) { create(:user) }
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/merge_request/user_posts_diff_notes_spec.rb b/spec/features/merge_request/user_posts_diff_notes_spec.rb
index f2ec0e2df6d..a74a8b1cd5a 100644
--- a/spec/features/merge_request/user_posts_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_diff_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User posts diff notes', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User posts diff notes', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include Spec::Support::Helpers::ModalHelpers
diff --git a/spec/features/merge_request/user_posts_notes_spec.rb b/spec/features/merge_request/user_posts_notes_spec.rb
index 194e04a9544..f167ab8fe8a 100644
--- a/spec/features/merge_request/user_posts_notes_spec.rb
+++ b/spec/features/merge_request/user_posts_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User posts notes', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User posts notes', :js, feature_category: :code_review_workflow do
include NoteInteractionHelpers
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/features/merge_request/user_rebases_merge_request_spec.rb b/spec/features/merge_request/user_rebases_merge_request_spec.rb
index c3ee5ddc3b1..1b46b4e1d7f 100644
--- a/spec/features/merge_request/user_rebases_merge_request_spec.rb
+++ b/spec/features/merge_request/user_rebases_merge_request_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe "User rebases a merge request", :js, feature_category: :code_review do
+RSpec.describe "User rebases a merge request", :js, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:user) { project.first_owner }
diff --git a/spec/features/merge_request/user_resolves_conflicts_spec.rb b/spec/features/merge_request/user_resolves_conflicts_spec.rb
index d4c80c1e9e2..7b1afd786f7 100644
--- a/spec/features/merge_request/user_resolves_conflicts_spec.rb
+++ b/spec/features/merge_request/user_resolves_conflicts_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User resolves conflicts', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User resolves conflicts', :js, feature_category: :code_review_workflow do
include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
let(:project) { create(:project, :repository) }
diff --git a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
index f0507e94424..c3b9068d708 100644
--- a/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
+++ b/spec/features/merge_request/user_resolves_diff_notes_and_discussions_resolve_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User resolves diff notes and threads', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User resolves diff notes and threads', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:guest) { create(:user) }
diff --git a/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb b/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb
index a7508ede1a1..5c41ac79552 100644
--- a/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb
+++ b/spec/features/merge_request/user_resolves_outdated_diff_discussions_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User resolves outdated diff discussions', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User resolves outdated diff discussions',
+ :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :repository, :public) }
let(:merge_request) do
diff --git a/spec/features/merge_request/user_resolves_wip_mr_spec.rb b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
index b7f20a16a3f..8a19a72f6ae 100644
--- a/spec/features/merge_request/user_resolves_wip_mr_spec.rb
+++ b/spec/features/merge_request/user_resolves_wip_mr_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User resolves Draft', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User resolves Draft', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) do
diff --git a/spec/features/merge_request/user_reverts_merge_request_spec.rb b/spec/features/merge_request/user_reverts_merge_request_spec.rb
index edfa9267871..43ce473b407 100644
--- a/spec/features/merge_request/user_reverts_merge_request_spec.rb
+++ b/spec/features/merge_request/user_reverts_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User reverts a merge request', :js, feature_category: :code_review do
+RSpec.describe 'User reverts a merge request', :js, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request, :simple, source_project: project) }
let(:project) { create(:project, :public, :repository) }
let(:user) { create(:user) }
diff --git a/spec/features/merge_request/user_reviews_image_spec.rb b/spec/features/merge_request/user_reviews_image_spec.rb
index 5814dc6b58c..815b006d029 100644
--- a/spec/features/merge_request/user_reviews_image_spec.rb
+++ b/spec/features/merge_request/user_reviews_image_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > image review', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > image review', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
diff --git a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
index fdd2aeec274..1c8bb903f7d 100644
--- a/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
+++ b/spec/features/merge_request/user_scrolls_to_note_on_load_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User scrolls to note on load', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User scrolls to note on load', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project, author: user) }
diff --git a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
index 8c2fc62d16f..94393ea00e4 100644
--- a/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_avatar_on_diff_notes_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
include Spec::Support::Helpers::ModalHelpers # rubocop:disable Style/MixinUsage
-RSpec.describe 'Merge request > User sees avatars on diff notes', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees avatars on diff notes', :js, feature_category: :code_review_workflow do
include NoteInteractionHelpers
include Spec::Support::Helpers::ModalHelpers
include MergeRequestDiffHelpers
diff --git a/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb b/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
index 0b6aefcdab6..4d91669f206 100644
--- a/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
+++ b/spec/features/merge_request/user_sees_breadcrumb_links_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'New merge request breadcrumb', feature_category: :code_review do
+RSpec.describe 'New merge request breadcrumb', feature_category: :code_review_workflow do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
index bbfa2be47cc..ad2ceeb23e2 100644
--- a/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
+++ b/spec/features/merge_request/user_sees_check_out_branch_modal_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees check out branch modal', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees check out branch modal', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
+ let(:modal_window_title) { 'Check out, review, and resolve locally' }
before do
sign_in(user)
@@ -17,12 +18,12 @@ RSpec.describe 'Merge request > User sees check out branch modal', :js, feature_
end
it 'shows the check out branch modal' do
- expect(page).to have_content('Check out, review, and merge locally')
+ expect(page).to have_content(modal_window_title)
end
it 'closes the check out branch modal with the close action' do
find('.modal button[aria-label="Close"]').click
- expect(page).not_to have_content('Check out, review, and merge locally')
+ expect(page).not_to have_content(modal_window_title)
end
end
diff --git a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
index 07b7cb1e8d8..411a2865b1a 100644
--- a/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
+++ b/spec/features/merge_request/user_sees_cherry_pick_modal_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User cherry-picks', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User cherry-picks', :js, feature_category: :code_review_workflow do
let(:group) { create(:group) }
let(:project) { create(:project, :repository, namespace: group) }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
index 9a1d47a13b5..3d41d5e24b2 100644
--- a/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
+++ b/spec/features/merge_request/user_sees_closing_issues_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees closing issues message', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees closing issues message', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:issue_1) { create(:issue, project: project) }
diff --git a/spec/features/merge_request/user_sees_deleted_target_branch_spec.rb b/spec/features/merge_request/user_sees_deleted_target_branch_spec.rb
index 16ae8b4304b..5011bd6de14 100644
--- a/spec/features/merge_request/user_sees_deleted_target_branch_spec.rb
+++ b/spec/features/merge_request/user_sees_deleted_target_branch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees deleted target branch', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees deleted target branch', :js, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_sees_diff_spec.rb b/spec/features/merge_request/user_sees_diff_spec.rb
index 101ff8fc152..daeeaa1bd88 100644
--- a/spec/features/merge_request/user_sees_diff_spec.rb
+++ b/spec/features/merge_request/user_sees_diff_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees diff', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees diff', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
include RepoHelpers
include MergeRequestDiffHelpers
diff --git a/spec/features/merge_request/user_sees_discussions_navigation_spec.rb b/spec/features/merge_request/user_sees_discussions_navigation_spec.rb
index a22fb2cff00..9d3046a9a72 100644
--- a/spec/features/merge_request/user_sees_discussions_navigation_spec.rb
+++ b/spec/features/merge_request/user_sees_discussions_navigation_spec.rb
@@ -2,14 +2,12 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees discussions navigation', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees discussions navigation', :js, feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :public, :repository) }
let_it_be(:user) { project.creator }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
before do
- # FIXME: before removing this please fix discussions navigation with this flag enabled
- stub_feature_flags(moved_mr_sidebar: false)
project.add_maintainer(user)
sign_in(user)
end
@@ -194,29 +192,10 @@ RSpec.describe 'Merge request > User sees discussions navigation', :js, feature_
end
def goto_next_thread
- begin
- # this is required when moved_mr_sidebar is enabled
- page.within('.issue-sticky-header') do
- click_button 'Go to next unresolved thread'
- end
- rescue StandardError
- click_button 'Go to next unresolved thread'
- end
- wait_for_scroll_end
+ click_button 'Go to next unresolved thread', obscured: false
end
def goto_previous_thread
- begin
- page.within('.issue-sticky-header') do
- click_button 'Go to previous unresolved thread'
- end
- rescue StandardError
- click_button 'Go to previous unresolved thread'
- end
- wait_for_scroll_end
- end
-
- def wait_for_scroll_end
- sleep(1)
+ click_button 'Go to previous unresolved thread', obscured: false
end
end
diff --git a/spec/features/merge_request/user_sees_discussions_spec.rb b/spec/features/merge_request/user_sees_discussions_spec.rb
index 0eae6e39eec..3ca5ac23ddb 100644
--- a/spec/features/merge_request/user_sees_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_discussions_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees threads', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees threads', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
index 6db5480abb4..b83580565e4 100644
--- a/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_button_depending_on_unresolved_discussions_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Merge request > User sees merge button depending on unresolved threads', :js,
-feature_category: :code_review do
+feature_category: :code_review_workflow do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
let!(:merge_request) { create(:merge_request_with_diff_notes, source_project: project, author: user) }
diff --git a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
index f7594c717d1..458746f0854 100644
--- a/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_request_pipelines_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees pipelines triggered by merge request', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees pipelines triggered by merge request', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
include TestReportsHelper
diff --git a/spec/features/merge_request/user_sees_merge_widget_spec.rb b/spec/features/merge_request/user_sees_merge_widget_spec.rb
index e5aa0f6e64d..237f361bd72 100644
--- a/spec/features/merge_request/user_sees_merge_widget_spec.rb
+++ b/spec/features/merge_request/user_sees_merge_widget_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees merge widget', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees merge widget', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
include TestReportsHelper
include ReactiveCachingHelpers
diff --git a/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb b/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb
index 4bfdce29c6a..fac0a84f155 100644
--- a/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_mr_from_deleted_forked_project_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees MR from deleted forked project', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees MR from deleted forked project',
+ :js, feature_category: :code_review_workflow do
include ProjectForksHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb b/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
index 8e6f6d04676..9b46cf37648 100644
--- a/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
+++ b/spec/features/merge_request/user_sees_mr_with_deleted_source_branch_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
# This test serves as a regression test for a bug that caused an error
# message to be shown by JavaScript when the source branch was deleted.
# Please do not remove ":js".
-RSpec.describe 'Merge request > User sees MR with deleted source branch', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees MR with deleted source branch',
+ :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:merge_request) { create(:merge_request, source_project: project) }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
index 8f011f5616b..ac195dd9873 100644
--- a/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
+++ b/spec/features/merge_request/user_sees_notes_from_forked_project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees notes from forked project', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees notes from forked project', :js, feature_category: :code_review_workflow do
include ProjectForksHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_request/user_sees_page_metadata_spec.rb b/spec/features/merge_request/user_sees_page_metadata_spec.rb
index f97732f91a7..50fc60e342e 100644
--- a/spec/features/merge_request/user_sees_page_metadata_spec.rb
+++ b/spec/features/merge_request/user_sees_page_metadata_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees page metadata', feature_category: :code_review do
+RSpec.describe 'Merge request > User sees page metadata', feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request, description: '**Lorem** _ipsum_ dolor sit [amet](https://example.com)') }
let(:project) { merge_request.target_project }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_sees_pipelines_spec.rb b/spec/features/merge_request/user_sees_pipelines_spec.rb
index 8faaf6bf39b..cab940ba704 100644
--- a/spec/features/merge_request/user_sees_pipelines_spec.rb
+++ b/spec/features/merge_request/user_sees_pipelines_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees pipelines', :js, feature_category: :code_review_workflow do
describe 'pipeline tab' do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.target_project }
diff --git a/spec/features/merge_request/user_sees_system_notes_spec.rb b/spec/features/merge_request/user_sees_system_notes_spec.rb
index 40402c95d6f..d547aa84bdd 100644
--- a/spec/features/merge_request/user_sees_system_notes_spec.rb
+++ b/spec/features/merge_request/user_sees_system_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees system notes', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees system notes', :js, feature_category: :code_review_workflow do
let(:public_project) { create(:project, :public, :repository) }
let(:private_project) { create(:project, :private, :repository) }
let(:user) { private_project.creator }
diff --git a/spec/features/merge_request/user_sees_versions_spec.rb b/spec/features/merge_request/user_sees_versions_spec.rb
index f0ff6e1769a..f94b288300a 100644
--- a/spec/features/merge_request/user_sees_versions_spec.rb
+++ b/spec/features/merge_request/user_sees_versions_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees versions', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User sees versions', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
let(:merge_request) do
diff --git a/spec/features/merge_request/user_sees_wip_help_message_spec.rb b/spec/features/merge_request/user_sees_wip_help_message_spec.rb
index 1a751af6ded..fdefe5ffb06 100644
--- a/spec/features/merge_request/user_sees_wip_help_message_spec.rb
+++ b/spec/features/merge_request/user_sees_wip_help_message_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees draft help message', feature_category: :code_review do
+RSpec.describe 'Merge request > User sees draft help message', feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
index 8b6c9dc18f6..b7784de12b9 100644
--- a/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
+++ b/spec/features/merge_request/user_selects_branches_for_new_mr_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User selects branches for new MR', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User selects branches for new MR', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_squashes_merge_request_spec.rb b/spec/features/merge_request/user_squashes_merge_request_spec.rb
index 43590aed3cc..63faf830f7e 100644
--- a/spec/features/merge_request/user_squashes_merge_request_spec.rb
+++ b/spec/features/merge_request/user_squashes_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User squashes a merge request', :js, feature_category: :code_review do
+RSpec.describe 'User squashes a merge request', :js, feature_category: :code_review_workflow do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:source_branch) { 'csv' }
diff --git a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
index 5a5494a2fe9..efd88df0f97 100644
--- a/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
+++ b/spec/features/merge_request/user_suggests_changes_on_diff_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User comments on a diff', :js, feature_category: :code_review do
+RSpec.describe 'User comments on a diff', :js, feature_category: :code_review_workflow do
include MergeRequestDiffHelpers
include RepoHelpers
diff --git a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
index 993eb59cb74..15715a6d775 100644
--- a/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
+++ b/spec/features/merge_request/user_toggles_whitespace_changes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User toggles whitespace changes', :js, feature_category: :code_review do
+RSpec.describe 'Merge request > User toggles whitespace changes', :js, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request) }
let(:project) { merge_request.project }
let(:user) { project.creator }
diff --git a/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb b/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb
index 5095457509a..5770f5ab94d 100644
--- a/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb
+++ b/spec/features/merge_request/user_tries_to_access_private_project_info_through_new_mr_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Merge Request > User tries to access private project information through the new mr page',
-feature_category: :code_review do
+feature_category: :code_review_workflow do
let(:current_user) { create(:user) }
let(:private_project) do
create(:project, :public, :repository,
diff --git a/spec/features/merge_request/user_uses_quick_actions_spec.rb b/spec/features/merge_request/user_uses_quick_actions_spec.rb
index 99befbace74..1a88918da65 100644
--- a/spec/features/merge_request/user_uses_quick_actions_spec.rb
+++ b/spec/features/merge_request/user_uses_quick_actions_spec.rb
@@ -8,7 +8,7 @@ require 'spec_helper'
# Because this kind of spec takes more time to run there is no need to add new ones
# for each existing quick action unless they test something not tested by existing tests.
RSpec.describe 'Merge request > User uses quick actions', :js, :use_clean_rails_redis_caching,
-feature_category: :code_review do
+feature_category: :code_review_workflow do
include Spec::Support::Helpers::Features::NotesHelpers
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_request/user_views_auto_expanding_diff_spec.rb b/spec/features/merge_request/user_views_auto_expanding_diff_spec.rb
index 19a77a9192c..57072421986 100644
--- a/spec/features/merge_request/user_views_auto_expanding_diff_spec.rb
+++ b/spec/features/merge_request/user_views_auto_expanding_diff_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views diffs file-by-file', :js, feature_category: :code_review do
+RSpec.describe 'User views diffs file-by-file', :js, feature_category: :code_review_workflow do
let(:merge_request) do
create(:merge_request, source_branch: 'squash-large-files', source_project: project, target_project: project)
end
diff --git a/spec/features/merge_request/user_views_diffs_commit_spec.rb b/spec/features/merge_request/user_views_diffs_commit_spec.rb
index 84cbfb35539..ba1b41982c9 100644
--- a/spec/features/merge_request/user_views_diffs_commit_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_commit_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views diff by commit', :js, feature_category: :code_review do
+RSpec.describe 'User views diff by commit', :js, feature_category: :code_review_workflow do
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
diff --git a/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb b/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb
index 9db6f86e14d..4d59bd4ccb0 100644
--- a/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_file_by_file_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views diffs file-by-file', :js, feature_category: :code_review do
+RSpec.describe 'User views diffs file-by-file', :js, feature_category: :code_review_workflow do
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
diff --git a/spec/features/merge_request/user_views_diffs_spec.rb b/spec/features/merge_request/user_views_diffs_spec.rb
index 7363f6dfb32..6b759625c61 100644
--- a/spec/features/merge_request/user_views_diffs_spec.rb
+++ b/spec/features/merge_request/user_views_diffs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views diffs', :js, feature_category: :code_review do
+RSpec.describe 'User views diffs', :js, feature_category: :code_review_workflow do
let(:merge_request) do
create(:merge_request_with_diffs, source_project: project, target_project: project, source_branch: 'merge-test')
end
diff --git a/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb b/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb
index 2a9275adfcf..8f448d06db1 100644
--- a/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb
+++ b/spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb
@@ -6,7 +6,7 @@ require 'spec_helper'
# updated.
# This can occur when the fork a merge request is created from is in the process
# of being destroyed.
-RSpec.describe 'User views merged merge request from deleted fork', feature_category: :code_review do
+RSpec.describe 'User views merged merge request from deleted fork', feature_category: :code_review_workflow do
include ProjectForksHelper
let(:project) { create(:project, :repository) }
diff --git a/spec/features/merge_request/user_views_open_merge_request_spec.rb b/spec/features/merge_request/user_views_open_merge_request_spec.rb
index 8b9e973217d..e481e3f2dfb 100644
--- a/spec/features/merge_request/user_views_open_merge_request_spec.rb
+++ b/spec/features/merge_request/user_views_open_merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views an open merge request', feature_category: :code_review do
+RSpec.describe 'User views an open merge request', feature_category: :code_review_workflow do
let(:merge_request) do
create(:merge_request, source_project: project, target_project: project, description: '# Description header')
end
diff --git a/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb b/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
new file mode 100644
index 00000000000..e7727fbb9dc
--- /dev/null
+++ b/spec/features/merge_requests/admin_views_hidden_merge_requests_spec.rb
@@ -0,0 +1,27 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'Admin views hidden merge requests', feature_category: :insider_threat do
+ context 'when signed in as admin and viewing a hidden merge request' do
+ let_it_be(:admin) { create(:admin) }
+ let_it_be(:author) { create(:user, :banned) }
+ let_it_be(:project) { create(:project) }
+ let!(:merge_request) { create(:merge_request, source_project: project, author: author) }
+
+ before do
+ sign_in(admin)
+ gitlab_enable_admin_mode_sign_in(admin)
+ visit(project_merge_requests_path(project))
+ end
+
+ it 'shows a hidden merge request icon' do
+ page.within("#merge_request_#{merge_request.id}") do
+ tooltip = format(_('This %{issuable} is hidden because its author has been banned'),
+ issuable: _('merge request'))
+ expect(page).to have_css("span[title='#{tooltip}']")
+ expect(page).to have_css('svg[data-testid="spam-icon"]')
+ end
+ end
+ end
+end
diff --git a/spec/features/merge_requests/filters_generic_behavior_spec.rb b/spec/features/merge_requests/filters_generic_behavior_spec.rb
index 0d6b5edcbab..197b9fa770d 100644
--- a/spec/features/merge_requests/filters_generic_behavior_spec.rb
+++ b/spec/features/merge_requests/filters_generic_behavior_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > Filters generic behavior', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > Filters generic behavior', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/rss_spec.rb b/spec/features/merge_requests/rss_spec.rb
index 4c73ce3b684..9c9f46278f6 100644
--- a/spec/features/merge_requests/rss_spec.rb
+++ b/spec/features/merge_requests/rss_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project Merge Requests RSS', feature_category: :code_review do
+RSpec.describe 'Project Merge Requests RSS', feature_category: :code_review_workflow do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :repository, group: group, visibility_level: Gitlab::VisibilityLevel::PUBLIC) }
diff --git a/spec/features/merge_requests/user_exports_as_csv_spec.rb b/spec/features/merge_requests/user_exports_as_csv_spec.rb
index aedd7ef4d79..23ac1b264ad 100644
--- a/spec/features/merge_requests/user_exports_as_csv_spec.rb
+++ b/spec/features/merge_requests/user_exports_as_csv_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > Exports as CSV', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > Exports as CSV', :js, feature_category: :code_review_workflow do
let!(:project) { create(:project, :public, :repository) }
let!(:user) { project.creator }
let!(:open_mr) { create(:merge_request, title: 'Bugfix1', source_project: project, target_project: project, source_branch: 'bugfix1') }
diff --git a/spec/features/merge_requests/user_filters_by_approvals_spec.rb b/spec/features/merge_requests/user_filters_by_approvals_spec.rb
index 56c8a65385c..f2748c2549f 100644
--- a/spec/features/merge_requests/user_filters_by_approvals_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_approvals_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > User filters', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > User filters', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let_it_be(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_filters_by_assignees_spec.rb b/spec/features/merge_requests/user_filters_by_assignees_spec.rb
index 818cf6f076f..22e38679c0f 100644
--- a/spec/features/merge_requests/user_filters_by_assignees_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_assignees_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > User filters by assignees', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > User filters by assignees', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_filters_by_deployments_spec.rb b/spec/features/merge_requests/user_filters_by_deployments_spec.rb
index 5f7d2fa9f9a..06997806a74 100644
--- a/spec/features/merge_requests/user_filters_by_deployments_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_deployments_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > User filters by deployments', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > User filters by deployments', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let!(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_filters_by_draft_spec.rb b/spec/features/merge_requests/user_filters_by_draft_spec.rb
index d50d7edaefb..2b6a2062893 100644
--- a/spec/features/merge_requests/user_filters_by_draft_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_draft_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > User filters by draft', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > User filters by draft', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_filters_by_labels_spec.rb b/spec/features/merge_requests/user_filters_by_labels_spec.rb
index 030eb1b6431..84d75eb221e 100644
--- a/spec/features/merge_requests/user_filters_by_labels_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_labels_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > User filters by labels', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > User filters by labels', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_filters_by_milestones_spec.rb b/spec/features/merge_requests/user_filters_by_milestones_spec.rb
index abdb6c7787b..75bbc3a14a8 100644
--- a/spec/features/merge_requests/user_filters_by_milestones_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_milestones_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > User filters by milestones', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > User filters by milestones', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb b/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
index ae171f47ec3..9f10533bb72 100644
--- a/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_multiple_criteria_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge requests > User filters by multiple criteria', :js, feature_category: :code_review do
+RSpec.describe 'Merge requests > User filters by multiple criteria', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let!(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_filters_by_target_branch_spec.rb b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
index e0755695f5c..cc30c274b51 100644
--- a/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
+++ b/spec/features/merge_requests/user_filters_by_target_branch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests > User filters by target branch', :js, feature_category: :code_review do
+RSpec.describe 'Merge Requests > User filters by target branch', :js, feature_category: :code_review_workflow do
include FilteredSearchHelpers
let!(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_lists_merge_requests_spec.rb b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
index d9c3bcda0d3..3171ae89fe6 100644
--- a/spec/features/merge_requests/user_lists_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_lists_merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge requests > User lists merge requests', feature_category: :code_review do
+RSpec.describe 'Merge requests > User lists merge requests', feature_category: :code_review_workflow do
include MergeRequestHelpers
include SortingHelper
diff --git a/spec/features/merge_requests/user_mass_updates_spec.rb b/spec/features/merge_requests/user_mass_updates_spec.rb
index 133017d5b25..5a9054ece48 100644
--- a/spec/features/merge_requests/user_mass_updates_spec.rb
+++ b/spec/features/merge_requests/user_mass_updates_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge requests > User mass updates', :js, feature_category: :code_review do
+RSpec.describe 'Merge requests > User mass updates', :js, feature_category: :code_review_workflow do
let(:project) { create(:project, :repository) }
let(:user) { project.creator }
let(:user2) { create(:user) }
diff --git a/spec/features/merge_requests/user_sees_empty_state_spec.rb b/spec/features/merge_requests/user_sees_empty_state_spec.rb
index a50ea300249..f5803a47b2c 100644
--- a/spec/features/merge_requests/user_sees_empty_state_spec.rb
+++ b/spec/features/merge_requests/user_sees_empty_state_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge request > User sees empty state', feature_category: :code_review do
+RSpec.describe 'Merge request > User sees empty state', feature_category: :code_review_workflow do
include ProjectForksHelper
let(:project) { create(:project, :public, :repository) }
diff --git a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
index d268cfc59f3..cf99f2cb94a 100644
--- a/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_sorts_merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User sorts merge requests', :js, feature_category: :code_review do
+RSpec.describe 'User sorts merge requests', :js, feature_category: :code_review_workflow do
include CookieHelper
include Spec::Support::Helpers::Features::SortingHelpers
diff --git a/spec/features/merge_requests/user_views_all_merge_requests_spec.rb b/spec/features/merge_requests/user_views_all_merge_requests_spec.rb
index b55e4bd153f..c2eb43d7476 100644
--- a/spec/features/merge_requests/user_views_all_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_views_all_merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views all merge requests', feature_category: :code_review do
+RSpec.describe 'User views all merge requests', feature_category: :code_review_workflow do
let!(:closed_merge_request) { create(:closed_merge_request, source_project: project, target_project: project) }
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :public) }
diff --git a/spec/features/merge_requests/user_views_closed_merge_requests_spec.rb b/spec/features/merge_requests/user_views_closed_merge_requests_spec.rb
index 4c2598dcc9c..175e6bacba5 100644
--- a/spec/features/merge_requests/user_views_closed_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_views_closed_merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views closed merge requests', feature_category: :code_review do
+RSpec.describe 'User views closed merge requests', feature_category: :code_review_workflow do
let!(:closed_merge_request) { create(:closed_merge_request, source_project: project, target_project: project) }
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :public) }
diff --git a/spec/features/merge_requests/user_views_merged_merge_requests_spec.rb b/spec/features/merge_requests/user_views_merged_merge_requests_spec.rb
index 2526f1a855b..54b11c1ee1e 100644
--- a/spec/features/merge_requests/user_views_merged_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_views_merged_merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views merged merge requests', feature_category: :code_review do
+RSpec.describe 'User views merged merge requests', feature_category: :code_review_workflow do
let!(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let!(:merged_merge_request) { create(:merged_merge_request, source_project: project, target_project: project) }
let(:project) { create(:project, :public) }
diff --git a/spec/features/merge_requests/user_views_open_merge_requests_spec.rb b/spec/features/merge_requests/user_views_open_merge_requests_spec.rb
index 3c53bc5e283..1a2024a5511 100644
--- a/spec/features/merge_requests/user_views_open_merge_requests_spec.rb
+++ b/spec/features/merge_requests/user_views_open_merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'User views open merge requests', feature_category: :code_review do
+RSpec.describe 'User views open merge requests', feature_category: :code_review_workflow do
let_it_be(:user) { create(:user) }
shared_examples_for 'shows merge requests' do
diff --git a/spec/features/milestone_spec.rb b/spec/features/milestone_spec.rb
index 50cd6b9e801..61098a8f2cd 100644
--- a/spec/features/milestone_spec.rb
+++ b/spec/features/milestone_spec.rb
@@ -128,8 +128,8 @@ RSpec.describe 'Milestone', feature_category: :team_planning do
click_link 'Reopen Milestone'
- expect(page).not_to have_selector('.gl-bg-red-500')
- expect(page).to have_selector('.gl-bg-green-500')
+ expect(page).not_to have_selector('.badge-danger')
+ expect(page).to have_selector('.badge-success')
end
end
@@ -139,8 +139,8 @@ RSpec.describe 'Milestone', feature_category: :team_planning do
click_link 'Reopen Milestone'
- expect(page).not_to have_selector('.gl-bg-red-500')
- expect(page).to have_selector('.gl-bg-green-500')
+ expect(page).not_to have_selector('.badge-danger')
+ expect(page).to have_selector('.badge-success')
end
end
end
diff --git a/spec/features/nav/new_nav_toggle_spec.rb b/spec/features/nav/new_nav_toggle_spec.rb
index f040d801cfb..8e5cc7df053 100644
--- a/spec/features/nav/new_nav_toggle_spec.rb
+++ b/spec/features/nav/new_nav_toggle_spec.rb
@@ -48,14 +48,19 @@ RSpec.describe 'new navigation toggle', :js, feature_category: :navigation do
expect(user.reload.use_new_navigation).to eq true
end
+
+ it 'shows the old navigation' do
+ expect(page).to have_selector('.js-navbar')
+ expect(page).not_to have_selector('[data-testid="super-sidebar"]')
+ end
end
context 'when user has new nav enabled' do
let(:user_preference) { true }
it 'allows to disable new nav', :aggregate_failures do
- within '.js-nav-user-dropdown' do
- find('a[data-toggle="dropdown"]').click
+ within '[data-testid="super-sidebar"] [data-testid="user-dropdown"]' do
+ find('button').click
expect(page).to have_content('Navigation redesign')
toggle = page.find('.gl-toggle.is-checked')
@@ -66,6 +71,11 @@ RSpec.describe 'new navigation toggle', :js, feature_category: :navigation do
expect(user.reload.use_new_navigation).to eq false
end
+
+ it 'shows the new navigation' do
+ expect(page).not_to have_selector('.js-navbar')
+ expect(page).to have_selector('[data-testid="super-sidebar"]')
+ end
end
end
end
diff --git a/spec/features/oauth_registration_spec.rb b/spec/features/oauth_registration_spec.rb
index 48996164bd3..6e1445a9ed6 100644
--- a/spec/features/oauth_registration_spec.rb
+++ b/spec/features/oauth_registration_spec.rb
@@ -32,7 +32,6 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection, feature_cat
with_them do
before do
stub_omniauth_provider(provider)
- stub_feature_flags(update_oauth_registration_flow: true)
end
context 'when block_auto_created_users is true' do
@@ -120,22 +119,6 @@ RSpec.describe 'OAuth Registration', :js, :allow_forgery_protection, feature_cat
end
end
- context 'when update_oauth_registration_flow is disabled' do
- before do
- stub_omniauth_provider(:github)
- stub_omniauth_setting(block_auto_created_users: false)
- stub_feature_flags(update_oauth_registration_flow: false)
-
- enforce_terms
- end
-
- it 'presents the terms page' do
- register_via(:github, uid, email)
-
- expect(page).to have_content('These are the terms')
- end
- end
-
def fill_in_welcome_form
select 'Software Developer', from: 'user_role'
click_button 'Get started!'
diff --git a/spec/features/profiles/chat_names_spec.rb b/spec/features/profiles/chat_names_spec.rb
index b3d65ab3a3c..14fdb8ba56f 100644
--- a/spec/features/profiles/chat_names_spec.rb
+++ b/spec/features/profiles/chat_names_spec.rb
@@ -15,7 +15,7 @@ RSpec.describe 'Profile > Chat', feature_category: :users do
{ team_id: 'T00', team_domain: 'my_chat_team', user_id: 'U01', user_name: 'my_chat_user' }
end
- let!(:authorize_url) { ChatNames::AuthorizeUserService.new(integration, params).execute }
+ let!(:authorize_url) { ChatNames::AuthorizeUserService.new(params).execute }
let(:authorize_path) { URI.parse(authorize_url).request_uri }
before do
diff --git a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
index 9eee1b85e5e..e3940973c46 100644
--- a/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
+++ b/spec/features/profiles/user_visits_profile_preferences_page_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'User visits the profile preferences page', :js, feature_category: :users do
- include Select2Helper
+ include ListboxHelpers
let(:user) { create(:user) }
@@ -30,7 +30,7 @@ RSpec.describe 'User visits the profile preferences page', :js, feature_category
describe 'User changes their default dashboard', :js do
it 'creates a flash message' do
- select2('stars', from: '#user_dashboard')
+ select_from_listbox 'Starred Projects', from: 'Your Projects', exact_item_text: true
click_button 'Save changes'
wait_for_requests
@@ -39,7 +39,7 @@ RSpec.describe 'User visits the profile preferences page', :js, feature_category
end
it 'updates their preference' do
- select2('stars', from: '#user_dashboard')
+ select_from_listbox 'Starred Projects', from: 'Your Projects', exact_item_text: true
click_button 'Save changes'
wait_for_requests
@@ -58,7 +58,7 @@ RSpec.describe 'User visits the profile preferences page', :js, feature_category
describe 'User changes their language', :js do
it 'creates a flash message', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/31404' do
- select2('en', from: '#user_preferred_language')
+ select_from_listbox 'English', from: 'English'
click_button 'Save changes'
wait_for_requests
@@ -68,7 +68,7 @@ RSpec.describe 'User visits the profile preferences page', :js, feature_category
it 'updates their preference' do
wait_for_requests
- select2('pt_BR', from: '#user_preferred_language')
+ select_from_listbox 'Portuguese', from: 'English'
click_button 'Save changes'
wait_for_requests
diff --git a/spec/features/project_variables_spec.rb b/spec/features/project_variables_spec.rb
index d1258937ce6..1a951980141 100644
--- a/spec/features/project_variables_spec.rb
+++ b/spec/features/project_variables_spec.rb
@@ -24,8 +24,9 @@ RSpec.describe 'Project variables', :js, feature_category: :pipeline_authoring d
page.within('#add-ci-variable') do
fill_in 'Key', with: 'akey'
find('#ci-variable-value').set('akey_value')
- find('[data-testid="environment-scope"]').click
- find('[data-testid="ci-environment-search"]').set('review/*')
+
+ click_button('All (default)')
+ fill_in 'Search', with: 'review/*'
find('[data-testid="create-wildcard-button"]').click
click_button('Add variable')
diff --git a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
index a497be4cbc3..b723bd5690a 100644
--- a/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
+++ b/spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb
@@ -24,12 +24,6 @@ RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled
expect(file_name.value).to have_content('.gitlab-ci.yml')
end
- it 'chooses the .gitlab-ci.yml Template Type' do
- template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
-
- expect(template_type.text).to have_content('.gitlab-ci.yml')
- end
-
it 'displays suggest_gitlab_ci_yml popover' do
page.find(:css, '.gitlab-ci-yml-selector').click
@@ -60,12 +54,6 @@ RSpec.describe 'User follows pipeline suggest nudge spec when feature is enabled
expect(file_name.value).not_to have_content('.gitlab-ci.yml')
end
- it 'does not choose the .gitlab-ci.yml Template Type' do
- template_type = page.find(:css, '.template-type-selector .dropdown-toggle-text')
-
- expect(template_type.text).to have_content('Select a template type')
- end
-
it 'does not display suggest_gitlab_ci_yml popover' do
popover_selector = '.b-popover.suggest-gitlab-ci-yml'
diff --git a/spec/features/projects/commit/cherry_pick_spec.rb b/spec/features/projects/commit/cherry_pick_spec.rb
index dc8b84283a1..4b9b692b652 100644
--- a/spec/features/projects/commit/cherry_pick_spec.rb
+++ b/spec/features/projects/commit/cherry_pick_spec.rb
@@ -78,9 +78,9 @@ RSpec.describe 'Cherry-pick Commits', :js, feature_category: :source_code_manage
end
page.within("#{modal_selector} .dropdown-menu") do
- find('[data-testid="dropdown-search-box"]').set('feature')
+ fill_in 'Search branches', with: 'feature'
wait_for_requests
- click_button 'feature'
+ find('.gl-dropdown-item-text-wrapper', exact_text: 'feature').click
end
submit_cherry_pick
diff --git a/spec/features/projects/diffs/diff_show_spec.rb b/spec/features/projects/diffs/diff_show_spec.rb
index 973c61de31d..eb700e10141 100644
--- a/spec/features/projects/diffs/diff_show_spec.rb
+++ b/spec/features/projects/diffs/diff_show_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Diff file viewer', :js, :with_clean_rails_cache, feature_category: :code_review do
+RSpec.describe 'Diff file viewer', :js, :with_clean_rails_cache, feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
def visit_commit(sha, anchor: nil)
diff --git a/spec/features/projects/environments/environment_spec.rb b/spec/features/projects/environments/environment_spec.rb
index 75913082803..91401d19fd1 100644
--- a/spec/features/projects/environments/environment_spec.rb
+++ b/spec/features/projects/environments/environment_spec.rb
@@ -52,6 +52,16 @@ RSpec.describe 'Environment', feature_category: :projects do
end
end
+ context 'without deployments' do
+ before do
+ visit_environment(environment)
+ end
+
+ it 'does not show deployments', :js do
+ expect(page).to have_content('You don\'t have any deployments right now.')
+ end
+ end
+
context 'with deployments' do
before do
visit_environment(environment)
diff --git a/spec/features/projects/files/template_type_dropdown_spec.rb b/spec/features/projects/files/template_type_dropdown_spec.rb
deleted file mode 100644
index 990b118d172..00000000000
--- a/spec/features/projects/files/template_type_dropdown_spec.rb
+++ /dev/null
@@ -1,143 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'Projects > Files > Template type dropdown selector', :js, feature_category: :projects do
- let(:project) { create(:project, :repository) }
- let(:user) { project.first_owner }
-
- before do
- sign_in user
- end
-
- context 'editing a non-matching file' do
- before do
- create_and_edit_file('.random-file.js')
- end
-
- it 'not displayed' do
- check_type_selector_display(false)
- end
-
- it 'selects every template type correctly' do
- fill_in 'file_path', with: '.gitignore'
- try_selecting_all_types
- end
-
- it 'updates template type toggle value when template is chosen' do
- fill_in 'file_path', with: '.gitignore'
- select_template('gitignore', 'Actionscript')
- check_type_selector_toggle_text('.gitignore')
- end
- end
-
- context 'editing a matching file' do
- before do
- visit project_edit_blob_path(project, File.join(project.default_branch, 'LICENSE'))
- end
-
- it 'displayed' do
- check_type_selector_display(true)
- end
-
- it 'selects every template type correctly' do
- try_selecting_all_types
- end
-
- context 'user previews changes' do
- before do
- click_link 'Preview changes'
- end
-
- it 'type selector is hidden and shown correctly' do
- check_type_selector_display(false)
- click_link 'Write'
- check_type_selector_display(true)
- end
- end
- end
-
- context 'creating a matching file' do
- before do
- visit project_new_blob_path(project, 'master', file_name: '.gitignore')
- end
-
- it 'is displayed' do
- check_type_selector_display(true)
- end
-
- it 'toggle is set to the correct value' do
- select_template('gitignore', 'Actionscript')
- check_type_selector_toggle_text('.gitignore')
- end
-
- it 'sets the toggle text when selecting the template type' do
- select_template_type('.gitignore')
- check_type_selector_toggle_text('.gitignore')
- end
-
- it 'selects every template type correctly' do
- try_selecting_all_types
- end
- end
-
- context 'creating a file' do
- before do
- visit project_new_blob_path(project, project.default_branch)
- end
-
- it 'type selector is shown' do
- check_type_selector_display(true)
- end
-
- it 'toggle is set to the proper value' do
- check_type_selector_toggle_text('Select a template type')
- end
-
- it 'selects every template type correctly' do
- try_selecting_all_types
- end
- end
-end
-
-def check_type_selector_display(is_visible)
- count = is_visible ? 1 : 0
- expect(page).to have_css('.js-template-type-selector', count: count)
-end
-
-def try_selecting_all_types
- try_selecting_template_type('LICENSE', 'Apply a template')
- try_selecting_template_type('Dockerfile', 'Apply a template')
- try_selecting_template_type('.gitlab-ci.yml', 'Apply a template')
- try_selecting_template_type('.gitignore', 'Apply a template')
-end
-
-def try_selecting_template_type(template_type, selector_label)
- select_template_type(template_type)
- check_template_selector_display(selector_label)
-end
-
-def select_template_type(template_type)
- find('.js-template-type-selector').click
- find('.dropdown-content li', text: template_type).click
-end
-
-def select_template(type, template)
- find(".js-#{type}-selector-wrap").click
- find('.dropdown-content li', text: template).click
-end
-
-def check_template_selector_display(content)
- expect(page).to have_content(content)
-end
-
-def check_type_selector_toggle_text(template_type)
- dropdown_toggle_button = find('.template-type-selector .dropdown-toggle-text')
- expect(dropdown_toggle_button).to have_content(template_type)
-end
-
-def create_and_edit_file(file_name)
- visit project_new_blob_path(project, 'master', file_name: file_name)
- click_button "Commit changes"
- visit project_edit_blob_path(project, File.join(project.default_branch, file_name))
-end
diff --git a/spec/features/projects/files/undo_template_spec.rb b/spec/features/projects/files/undo_template_spec.rb
index afc9a5fd232..0dfed209ce9 100644
--- a/spec/features/projects/files/undo_template_spec.rb
+++ b/spec/features/projects/files/undo_template_spec.rb
@@ -13,19 +13,6 @@ RSpec.describe 'Projects > Files > Template Undo Button', :js, feature_category:
context 'editing a matching file and applying a template' do
before do
visit project_edit_blob_path(project, File.join(project.default_branch, "LICENSE"))
- select_file_template_type('LICENSE')
- select_file_template('.js-license-selector', 'Apache License 2.0')
- end
-
- it 'reverts template application' do
- try_template_undo('http://www.apache.org/licenses/', 'Apply a template')
- end
- end
-
- context 'creating a non-matching file' do
- before do
- visit project_new_blob_path(project, 'master')
- select_file_template_type('LICENSE')
select_file_template('.js-license-selector', 'Apache License 2.0')
end
@@ -53,7 +40,6 @@ end
def check_content_reverted(template_content)
find('.b-toaster a', text: 'Undo').click
expect(page).not_to have_content(template_content)
- expect(page).to have_css('.template-type-selector .dropdown-toggle-text')
end
def select_file_template(template_selector_selector, template_name)
@@ -61,8 +47,3 @@ def select_file_template(template_selector_selector, template_name)
find('.dropdown-content li', text: template_name).click
wait_for_requests
end
-
-def select_file_template_type(template_type)
- find('.js-template-type-selector').click
- find('.dropdown-content li', text: template_type).click
-end
diff --git a/spec/features/projects/fork_spec.rb b/spec/features/projects/fork_spec.rb
index 3867f7fd086..8b484141a95 100644
--- a/spec/features/projects/fork_spec.rb
+++ b/spec/features/projects/fork_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Project fork', feature_category: :projects do
+ include ListboxHelpers
include ProjectForksHelper
let(:user) { create(:user) }
@@ -137,10 +138,9 @@ RSpec.describe 'Project fork', feature_category: :projects do
let(:user) { create(:group_member, :maintainer, user: create(:user), group: group).user }
def submit_form(group_obj = group)
- find('[data-testid="select_namespace_dropdown"]').click
- find('[data-testid="select_namespace_dropdown_search_field"]').fill_in(with: group_obj.name)
- click_button group_obj.name
-
+ click_button(s_('ForkProject|Select a namespace'))
+ send_keys group_obj.name
+ select_listbox_item(group_obj.name)
click_button 'Fork project'
end
@@ -151,6 +151,13 @@ RSpec.describe 'Project fork', feature_category: :projects do
expect(page).to have_content 'Forked from'
end
+ it 'redirects to the source project when cancel is clicked' do
+ visit new_project_fork_path(project)
+ click_on 'Cancel'
+
+ expect(page).to have_current_path(project_path(project))
+ end
+
it 'shows the new forked project on the forks page' do
visit new_project_fork_path(project)
submit_form
diff --git a/spec/features/projects/import_export/export_file_spec.rb b/spec/features/projects/import_export/export_file_spec.rb
index 8986ce91ae3..0230c9e835b 100644
--- a/spec/features/projects/import_export/export_file_spec.rb
+++ b/spec/features/projects/import_export/export_file_spec.rb
@@ -20,7 +20,9 @@ RSpec.describe 'Import/Export - project export integration test', :js, feature_c
}
end
- let(:safe_hashes) { { yaml_variables: %w[key value public] } }
+ let(:safe_hashes) do
+ { yaml_variables: %w[key value public] }
+ end
let(:project) { setup_project }
diff --git a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
index 858d6751afa..55aa6db23c7 100644
--- a/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
+++ b/spec/features/projects/issues/design_management/user_uploads_designs_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe 'User uploads new design', :js, feature_category: :design_managem
context "when the feature is available" do
let(:feature_enabled) { true }
- it 'uploads designs' do
+ it 'uploads designs', quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/358845' do
upload_design(logo_fixture, count: 1)
expect(page).to have_selector('.js-design-list-item', count: 1)
diff --git a/spec/features/projects/jobs_spec.rb b/spec/features/projects/jobs_spec.rb
index 557a20ff2d6..4734a607ef1 100644
--- a/spec/features/projects/jobs_spec.rb
+++ b/spec/features/projects/jobs_spec.rb
@@ -739,7 +739,12 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :proj
it 'shows manual action empty state', :js do
expect(page).to have_content(job.detailed_status(user).illustration[:title])
expect(page).to have_content('This job requires a manual action')
- expect(page).to have_content('This job requires manual intervention to start. Before starting this job, you can add variables below for last-minute configuration changes.')
+ expect(page).to have_content(
+ _(
+ 'This job does not start automatically and must be started manually. ' \
+ 'You can add CI/CD variables below for last-minute configuration changes before starting the job.'
+ )
+ )
expect(page).to have_button('Trigger this manual action')
end
@@ -772,7 +777,12 @@ RSpec.describe 'Jobs', :clean_gitlab_redis_shared_state, feature_category: :proj
wait_for_requests
expect(page).to have_content('This job requires a manual action')
- expect(page).to have_content('This job requires manual intervention to start. Before starting this job, you can add variables below for last-minute configuration changes.')
+ expect(page).to have_content(
+ _(
+ 'This job does not start automatically and must be started manually. ' \
+ 'You can add CI/CD variables below for last-minute configuration changes before starting the job.'
+ )
+ )
expect(page).to have_button('Trigger this manual action')
end
end
diff --git a/spec/features/projects/labels/sort_labels_spec.rb b/spec/features/projects/labels/sort_labels_spec.rb
index 378a575348e..74ce2f40df8 100644
--- a/spec/features/projects/labels/sort_labels_spec.rb
+++ b/spec/features/projects/labels/sort_labels_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
+ include ListboxHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project) }
let!(:label1) { create(:label, title: 'Foo', description: 'Lorem ipsum', project: project) }
@@ -28,16 +30,16 @@ RSpec.describe 'Sort labels', :js, feature_category: :team_planning do
it 'sorts by date' do
click_button 'Name'
- sort_options = find('ul[role="listbox"]').all('li').collect(&:text)
-
- expect(sort_options[0]).to eq('Name')
- expect(sort_options[1]).to eq('Name, descending')
- expect(sort_options[2]).to eq('Last created')
- expect(sort_options[3]).to eq('Oldest created')
- expect(sort_options[4]).to eq('Updated date')
- expect(sort_options[5]).to eq('Oldest updated')
+ expect_listbox_items([
+ 'Name',
+ 'Name, descending',
+ 'Last created',
+ 'Oldest created',
+ 'Updated date',
+ 'Oldest updated'
+ ])
- find('li', text: 'Name, descending').click
+ select_listbox_item('Name, descending')
# assert default sorting
within '.other-labels' do
diff --git a/spec/features/projects/members/manage_members_spec.rb b/spec/features/projects/members/manage_members_spec.rb
index 3ffa402dc2c..615ef1b03dd 100644
--- a/spec/features/projects/members/manage_members_spec.rb
+++ b/spec/features/projects/members/manage_members_spec.rb
@@ -139,17 +139,15 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
it 'can only remove non-Owner members' do
page.within find_member_row(project_owner) do
- expect(page).not_to have_button('Remove member')
+ expect(page).not_to have_selector user_action_dropdown
end
- # Open modal
- page.within find_member_row(project_developer) do
- click_button 'Remove member'
- end
+ show_actions_for_username(project_developer)
+ click_button _('Remove member')
within_modal do
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
- click_button('Remove member')
+ click_button _('Remove member')
end
wait_for_requests
@@ -163,18 +161,12 @@ RSpec.describe 'Projects > Members > Manage members', :js, feature_category: :on
let(:current_user) { group_owner }
it 'can remove any direct member' do
- page.within find_member_row(project_owner) do
- expect(page).to have_button('Remove member')
- end
-
- # Open modal
- page.within find_member_row(project_owner) do
- click_button 'Remove member'
- end
+ show_actions_for_username(project_owner)
+ click_button _('Remove member')
within_modal do
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
- click_button('Remove member')
+ click_button _('Remove member')
end
wait_for_requests
diff --git a/spec/features/projects/members/sorting_spec.rb b/spec/features/projects/members/sorting_spec.rb
index 5c72d9efeb3..6df1e974f42 100644
--- a/spec/features/projects/members/sorting_spec.rb
+++ b/spec/features/projects/members/sorting_spec.rb
@@ -48,7 +48,7 @@ RSpec.describe 'Projects > Members > Sorting', :js, feature_category: :subgroups
expect(first_row.text).to have_content(maintainer.name)
expect(second_row.text).to have_content(developer.name)
- expect_sort_by('Created on', :asc)
+ expect_sort_by('User created', :asc)
end
it 'sorts by user created on descending' do
@@ -57,7 +57,7 @@ RSpec.describe 'Projects > Members > Sorting', :js, feature_category: :subgroups
expect(first_row.text).to have_content(developer.name)
expect(second_row.text).to have_content(maintainer.name)
- expect_sort_by('Created on', :desc)
+ expect_sort_by('User created', :desc)
end
it 'sorts by last activity ascending' do
diff --git a/spec/features/projects/milestones/milestones_sorting_spec.rb b/spec/features/projects/milestones/milestones_sorting_spec.rb
index 8a8e7d07435..9bf04164a09 100644
--- a/spec/features/projects/milestones/milestones_sorting_spec.rb
+++ b/spec/features/projects/milestones/milestones_sorting_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Milestones sorting', :js, feature_category: :team_planning do
+ include ListboxHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project, name: 'test', namespace: user.namespace) }
let(:milestones_for_sort_by) do
@@ -40,14 +42,13 @@ RSpec.describe 'Milestones sorting', :js, feature_category: :team_planning do
# assert milestones listed for given sort order
selected_sort_order = 'Due soon'
milestones_for_sort_by.each do |sort_by, expected_milestones|
- within '[data-testid=milestone_sort_by_dropdown]' do
- click_button selected_sort_order
- milestones = find('ul[role="listbox"]').all('li').map(&:text)
- expect(milestones).to eq(ordered_milestones)
+ click_button selected_sort_order
- find('li', text: sort_by).click
- expect(page).to have_button(sort_by)
- end
+ expect_listbox_items(ordered_milestones)
+
+ select_listbox_item(sort_by)
+
+ expect(page).to have_button(sort_by)
within '.milestones' do
expect(page.all('ul.content-list > li strong > a').map(&:text)).to eq(expected_milestones)
diff --git a/spec/features/projects/navbar_spec.rb b/spec/features/projects/navbar_spec.rb
index 4d85b5cfb2e..6090d132e3a 100644
--- a/spec/features/projects/navbar_spec.rb
+++ b/spec/features/projects/navbar_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Project navbar', feature_category: :projects do
+RSpec.describe 'Project navbar', :with_license, feature_category: :projects do
include NavbarStructureHelper
include WaitForRequests
@@ -15,6 +15,8 @@ RSpec.describe 'Project navbar', feature_category: :projects do
before do
sign_in(user)
+ stub_feature_flags(show_pages_in_deployments_menu: false)
+
stub_config(registry: { enabled: false })
stub_feature_flags(harbor_registry_integration: false)
insert_package_nav(_('Deployments'))
diff --git a/spec/features/projects/new_project_spec.rb b/spec/features/projects/new_project_spec.rb
index 769ad5bf61a..c6a6ee68185 100644
--- a/spec/features/projects/new_project_spec.rb
+++ b/spec/features/projects/new_project_spec.rb
@@ -552,7 +552,7 @@ RSpec.describe 'New project', :js, feature_category: :projects do
it_behaves_like 'has instructions to enable OAuth'
end
- context 'as an admin' do
+ context 'as an admin', :do_not_mock_admin_mode_setting do
let(:user) { create(:admin) }
let(:oauth_config_instructions) { 'To enable importing projects from Bitbucket, as administrator you need to configure OAuth integration' }
@@ -571,7 +571,7 @@ RSpec.describe 'New project', :js, feature_category: :projects do
it_behaves_like 'has instructions to enable OAuth'
end
- context 'as an admin' do
+ context 'as an admin', :do_not_mock_admin_mode_setting do
let(:user) { create(:admin) }
let(:oauth_config_instructions) { 'To enable importing projects from GitLab.com, as administrator you need to configure OAuth integration' }
diff --git a/spec/features/projects/pages/user_edits_settings_spec.rb b/spec/features/projects/pages/user_edits_settings_spec.rb
index 7ceefdecbae..8c713b6f73a 100644
--- a/spec/features/projects/pages/user_edits_settings_spec.rb
+++ b/spec/features/projects/pages/user_edits_settings_spec.rb
@@ -10,6 +10,8 @@ RSpec.describe 'Pages edits pages settings', :js, feature_category: :pages do
before do
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
+ stub_feature_flags(show_pages_in_deployments_menu: false)
+
project.add_maintainer(user)
sign_in(user)
diff --git a/spec/features/projects/pipelines/legacy_pipelines_spec.rb b/spec/features/projects/pipelines/legacy_pipelines_spec.rb
new file mode 100644
index 00000000000..e69de29bb2d
--- /dev/null
+++ b/spec/features/projects/pipelines/legacy_pipelines_spec.rb
diff --git a/spec/features/projects/pipelines/pipeline_spec.rb b/spec/features/projects/pipelines/pipeline_spec.rb
index d6067e22952..d5739386a30 100644
--- a/spec/features/projects/pipelines/pipeline_spec.rb
+++ b/spec/features/projects/pipelines/pipeline_spec.rb
@@ -18,6 +18,8 @@ RSpec.describe 'Pipeline', :js, feature_category: :projects do
end
shared_context 'pipeline builds' do
+ let!(:external_stage) { create(:ci_stage, name: 'external', pipeline: pipeline) }
+
let!(:build_passed) do
create(:ci_build, :success,
pipeline: pipeline, stage: 'build', stage_idx: 0, name: 'build')
@@ -52,7 +54,7 @@ RSpec.describe 'Pipeline', :js, feature_category: :projects do
create(:generic_commit_status, status: 'success',
pipeline: pipeline,
name: 'jenkins',
- stage: 'external',
+ ci_stage: external_stage,
ref: 'master',
target_url: 'http://gitlab.com/status')
end
@@ -98,42 +100,16 @@ RSpec.describe 'Pipeline', :js, feature_category: :projects do
end
end
- context 'with pipeline_name feature flag enabled' do
- before do
- stub_feature_flags(pipeline_name: true)
- end
-
- it 'displays pipeline name instead of commit title' do
- visit_pipeline
-
- within 'h3' do
- expect(page).to have_content(pipeline.name)
- end
-
- within '.well-segment[data-testid="commit-row"]' do
- expect(page).to have_content(project.commit.title)
- expect(page).to have_content(project.commit.short_id)
- end
- end
- end
+ it 'displays pipeline name instead of commit title' do
+ visit_pipeline
- context 'with pipeline_name feature flag disabled' do
- before do
- stub_feature_flags(pipeline_name: false)
+ within 'h3' do
+ expect(page).to have_content(pipeline.name)
end
- it 'displays commit title' do
- visit_pipeline
-
- within 'h3' do
- expect(page).not_to have_content(pipeline.name)
- expect(page).to have_content(project.commit.title)
- end
-
- within '.well-segment[data-testid="commit-row"]' do
- expect(page).not_to have_content(project.commit.title)
- expect(page).to have_content(project.commit.short_id)
- end
+ within '.well-segment[data-testid="commit-row"]' do
+ expect(page).to have_content(project.commit.title)
+ expect(page).to have_content(project.commit.short_id)
end
end
diff --git a/spec/features/projects/pipelines/pipelines_spec.rb b/spec/features/projects/pipelines/pipelines_spec.rb
index 3bdabd672c7..6a44f421249 100644
--- a/spec/features/projects/pipelines/pipelines_spec.rb
+++ b/spec/features/projects/pipelines/pipelines_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Pipelines', :js, feature_category: :projects do
+ include ListboxHelpers
include ProjectForksHelper
include Spec::Support::Helpers::ModalHelpers
@@ -594,7 +595,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
end
it 'changes the Pipeline ID column for Pipeline IID' do
- page.find('[data-testid="pipeline-key-dropdown"]').click
+ page.find('[data-testid="pipeline-key-collapsible-box"]').click
within '.gl-dropdown-contents' do
dropdown_options = page.find_all '.gl-dropdown-item'
@@ -618,6 +619,8 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
user: user)
end
+ let(:external_stage) { create(:ci_stage, name: 'external', pipeline: pipeline) }
+
before do
create_build('build', 0, 'build', :success)
create_build('test', 1, 'rspec 0:2', :pending)
@@ -627,7 +630,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
create_build('test', 1, 'audit', :created)
create_build('deploy', 2, 'production', :created)
- create(:generic_commit_status, pipeline: pipeline, stage: 'external', name: 'jenkins', stage_idx: 3, ref: 'master')
+ create(:generic_commit_status, pipeline: pipeline, ci_stage: external_stage, name: 'jenkins', ref: 'master')
visit project_pipeline_path(project, pipeline)
wait_for_requests
@@ -672,7 +675,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
click_button project.default_branch
wait_for_requests
- find('p', text: 'master').click
+ find('.gl-dropdown-item', text: 'master').click
wait_for_requests
end
@@ -776,8 +779,7 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
describe 'new pipeline page' do
it 'has field to add a new pipeline' do
- expect(page).to have_selector('[data-testid="ref-select"]')
- expect(find('[data-testid="ref-select"]')).to have_content project.default_branch
+ expect(page).to have_button project.default_branch
expect(page).to have_content('Run for')
end
end
@@ -785,14 +787,9 @@ RSpec.describe 'Pipelines', :js, feature_category: :projects do
describe 'find pipelines' do
it 'shows filtered pipelines', :js do
click_button project.default_branch
+ send_keys('fix')
- page.within '[data-testid="ref-select"]' do
- find('[data-testid="search-refs"]').native.send_keys('fix')
-
- page.within '.gl-dropdown-contents' do
- expect(page).to have_content('fix')
- end
- end
+ expect_listbox_item('fix')
end
end
end
diff --git a/spec/features/projects/settings/monitor_settings_spec.rb b/spec/features/projects/settings/monitor_settings_spec.rb
index 2cdcf86757e..4b553b57331 100644
--- a/spec/features/projects/settings/monitor_settings_spec.rb
+++ b/spec/features/projects/settings/monitor_settings_spec.rb
@@ -113,7 +113,7 @@ RSpec.describe 'Projects > Settings > For a forked project', :js, feature_catego
within('div#project-dropdown') do
click_button('Select project')
- click_button('Sentry | internal')
+ find('li', text: 'Sentry | internal').click
end
click_button('Save changes')
diff --git a/spec/features/projects/settings/pipelines_settings_spec.rb b/spec/features/projects/settings/pipelines_settings_spec.rb
index 37973c9b8d6..51858ddf8c5 100644
--- a/spec/features/projects/settings/pipelines_settings_spec.rb
+++ b/spec/features/projects/settings/pipelines_settings_spec.rb
@@ -149,29 +149,5 @@ RSpec.describe "Projects > Settings > Pipelines settings", feature_category: :pr
end
end
end
-
- describe 'runners registration token' do
- let!(:token) { project.runners_token }
-
- before do
- visit project_settings_ci_cd_path(project)
- end
-
- it 'has a registration token' do
- expect(page.find('#registration_token')).to have_content(token)
- end
-
- describe 'reload registration token' do
- let(:page_token) { find('#registration_token').text }
-
- before do
- click_link 'Reset registration token'
- end
-
- it 'changes registration token' do
- expect(page_token).not_to eq token
- end
- end
- end
end
end
diff --git a/spec/features/projects/settings/user_manages_project_members_spec.rb b/spec/features/projects/settings/user_manages_project_members_spec.rb
index ee832da48d9..fac4d5a99a5 100644
--- a/spec/features/projects/settings/user_manages_project_members_spec.rb
+++ b/spec/features/projects/settings/user_manages_project_members_spec.rb
@@ -22,13 +22,12 @@ RSpec.describe 'Projects > Settings > User manages project members', feature_cat
it 'cancels a team member', :js do
visit(project_project_members_path(project))
- page.within find_member_row(user_dmitriy) do
- click_button 'Remove member'
- end
+ show_actions_for_username(user_dmitriy)
+ click_button _('Remove member')
within_modal do
expect(page).to have_unchecked_field 'Also unassign this user from related issues and merge requests'
- click_button('Remove member')
+ click_button _('Remove member')
end
visit(project_project_members_path(project))
diff --git a/spec/features/projects/settings/user_searches_in_settings_spec.rb b/spec/features/projects/settings/user_searches_in_settings_spec.rb
index 8a11507d064..923a6a10671 100644
--- a/spec/features/projects/settings/user_searches_in_settings_spec.rb
+++ b/spec/features/projects/settings/user_searches_in_settings_spec.rb
@@ -68,6 +68,7 @@ RSpec.describe 'User searches project settings', :js, feature_category: :project
context 'in Pages page' do
before do
+ stub_feature_flags(show_pages_in_deployments_menu: false)
allow(Gitlab.config.pages).to receive(:enabled).and_return(true)
visit project_pages_path(project)
diff --git a/spec/features/projects/terraform_spec.rb b/spec/features/projects/terraform_spec.rb
index bbc7f675c55..5e2f65165c2 100644
--- a/spec/features/projects/terraform_spec.rb
+++ b/spec/features/projects/terraform_spec.rb
@@ -56,9 +56,9 @@ RSpec.describe 'Terraform', :js, feature_category: :projects do
end
context 'when clicking on the delete button' do
- let(:additional_state) { create(:terraform_state, project: project) }
+ let!(:additional_state) { create(:terraform_state, project: project) }
- it 'removes the state', :aggregate_failures, quarantine: 'https://gitlab.com/gitlab-org/gitlab/-/issues/333640' do
+ it 'removes the state', :aggregate_failures do
visit project_terraform_index_path(project)
expect(page).to have_content(additional_state.name)
@@ -69,7 +69,12 @@ RSpec.describe 'Terraform', :js, feature_category: :projects do
click_button 'Remove'
expect(page).to have_content("#{additional_state.name} successfully removed")
- expect { additional_state.reload }.to raise_error ActiveRecord::RecordNotFound
+
+ find("[data-testid='remove-icon']").hover
+ expect(page).to have_content("Deletion in progress")
+
+ additional_state.reload
+ expect(additional_state.deleted_at).not_to be_nil
end
end
diff --git a/spec/features/projects/tree/create_directory_spec.rb b/spec/features/projects/tree/create_directory_spec.rb
index 3a0160c42fb..58f572bc021 100644
--- a/spec/features/projects/tree/create_directory_spec.rb
+++ b/spec/features/projects/tree/create_directory_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Multi-file editor new directory', :js, feature_category: :web_ide do
+ include WebIdeSpecHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -16,9 +18,7 @@ RSpec.describe 'Multi-file editor new directory', :js, feature_category: :web_id
wait_for_requests
- click_link('Web IDE')
-
- wait_for_requests
+ ide_visit_from_link
end
after do
@@ -26,6 +26,8 @@ RSpec.describe 'Multi-file editor new directory', :js, feature_category: :web_id
end
it 'creates directory in current directory' do
+ wait_for_all_requests
+
all('.ide-tree-actions button').last.click
page.within('.modal') do
diff --git a/spec/features/projects/tree/create_file_spec.rb b/spec/features/projects/tree/create_file_spec.rb
index 61240150658..674aef8e6f4 100644
--- a/spec/features/projects/tree/create_file_spec.rb
+++ b/spec/features/projects/tree/create_file_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Multi-file editor new file', :js, feature_category: :web_ide do
+ include WebIdeSpecHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
@@ -16,9 +18,7 @@ RSpec.describe 'Multi-file editor new file', :js, feature_category: :web_ide do
wait_for_requests
- click_link('Web IDE')
-
- wait_for_requests
+ ide_visit_from_link
end
after do
@@ -26,6 +26,7 @@ RSpec.describe 'Multi-file editor new file', :js, feature_category: :web_ide do
end
it 'creates file in current directory' do
+ wait_for_requests
first('.ide-tree-actions button').click
page.within('.modal') do
diff --git a/spec/features/projects/tree/tree_show_spec.rb b/spec/features/projects/tree/tree_show_spec.rb
index 21932cae58b..835a3cda65e 100644
--- a/spec/features/projects/tree/tree_show_spec.rb
+++ b/spec/features/projects/tree/tree_show_spec.rb
@@ -3,6 +3,7 @@
require 'spec_helper'
RSpec.describe 'Projects tree', :js, feature_category: :web_ide do
+ include WebIdeSpecHelpers
include RepoHelpers
let(:user) { create(:user) }
@@ -123,10 +124,9 @@ RSpec.describe 'Projects tree', :js, feature_category: :web_ide do
it 'opens folder in IDE' do
visit project_tree_path(project, File.join('master', 'bar'))
+ ide_visit_from_link
- click_link 'Web IDE'
-
- wait_for_requests
+ wait_for_all_requests
find('.ide-file-list')
wait_for_requests
expect(page).to have_selector('.is-open', text: 'bar')
diff --git a/spec/features/projects/tree/upload_file_spec.rb b/spec/features/projects/tree/upload_file_spec.rb
index 1e4abc789c2..42fa88a0d3e 100644
--- a/spec/features/projects/tree/upload_file_spec.rb
+++ b/spec/features/projects/tree/upload_file_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe 'Multi-file editor upload file', :js, feature_category: :web_ide do
+ include WebIdeSpecHelpers
+
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:txt_file) { File.join(Rails.root, 'spec', 'fixtures', 'doc_sample.txt') }
@@ -18,9 +20,7 @@ RSpec.describe 'Multi-file editor upload file', :js, feature_category: :web_ide
wait_for_requests
- click_link('Web IDE')
-
- wait_for_requests
+ ide_visit_from_link
end
after do
@@ -28,6 +28,7 @@ RSpec.describe 'Multi-file editor upload file', :js, feature_category: :web_ide
end
it 'uploads text file' do
+ wait_for_all_requests
# make the field visible so capybara can use it
execute_script('document.querySelector("#file-upload").classList.remove("hidden")')
attach_file('file-upload', txt_file)
diff --git a/spec/features/projects_spec.rb b/spec/features/projects_spec.rb
index ec0b3f9d81b..84702b3a6bb 100644
--- a/spec/features/projects_spec.rb
+++ b/spec/features/projects_spec.rb
@@ -213,7 +213,7 @@ RSpec.describe 'Project', feature_category: :projects do
end
end
- describe 'showing information about source of a project fork' do
+ describe 'showing information about source of a project fork', :js do
let(:user) { create(:user) }
let(:base_project) { create(:project, :public, :repository) }
let(:forked_project) { fork_project(base_project, user, repository: true) }
@@ -224,6 +224,7 @@ RSpec.describe 'Project', feature_category: :projects do
it 'shows a link to the source project when it is available', :sidekiq_might_not_need_inline do
visit project_path(forked_project)
+ wait_for_requests
expect(page).to have_content('Forked from')
expect(page).to have_link(base_project.full_name)
@@ -233,6 +234,7 @@ RSpec.describe 'Project', feature_category: :projects do
forked_project
visit project_path(base_project)
+ wait_for_requests
expect(page).not_to have_content('In fork network of')
expect(page).not_to have_content('Forked from')
@@ -243,7 +245,7 @@ RSpec.describe 'Project', feature_category: :projects do
Projects::DestroyService.new(base_project, base_project.first_owner).execute
visit project_path(forked_project)
-
+ wait_for_requests
expect(page).to have_content('Forked from an inaccessible project')
end
@@ -255,7 +257,7 @@ RSpec.describe 'Project', feature_category: :projects do
Projects::DestroyService.new(forked_project, user).execute
visit project_path(fork_of_fork)
-
+ wait_for_requests
expect(page).to have_content("Forked from")
expect(page).to have_link(base_project.full_name)
end
diff --git a/spec/features/protected_branches_spec.rb b/spec/features/protected_branches_spec.rb
index c549d99a51f..04096b3e4f9 100644
--- a/spec/features/protected_branches_spec.rb
+++ b/spec/features/protected_branches_spec.rb
@@ -117,7 +117,7 @@ RSpec.describe 'Protected Branches', :js, feature_category: :source_code_managem
set_protected_branch_name('some-branch')
click_on "Protect"
- within(".protected-branches-list") { expect(page).to have_content('Branch was deleted') }
+ within(".protected-branches-list") { expect(page).to have_content('Branch does not exist') }
end
end
diff --git a/spec/features/protected_tags_spec.rb b/spec/features/protected_tags_spec.rb
index 1aadc7ce90a..c2058a5c345 100644
--- a/spec/features/protected_tags_spec.rb
+++ b/spec/features/protected_tags_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Protected Tags', :js, feature_category: :source_code_management do
+RSpec.describe 'Protected Tags', :js, :with_license, feature_category: :source_code_management do
include ProtectedTagHelpers
let(:project) { create(:project, :repository) }
diff --git a/spec/features/runners_spec.rb b/spec/features/runners_spec.rb
index 40ba0fa9ebb..e7c2452af93 100644
--- a/spec/features/runners_spec.rb
+++ b/spec/features/runners_spec.rb
@@ -9,370 +9,402 @@ RSpec.describe 'Runners', feature_category: :runner_fleet do
sign_in(user)
end
- context 'when user opens runners page' do
- let(:project) { create(:project) }
-
+ context 'when project_runners_vue_ui is disabled' do
before do
- project.add_maintainer(user)
+ stub_feature_flags(project_runners_vue_ui: false)
end
- it 'user can see a link with instructions on how to install GitLab Runner' do
- visit project_runners_path(project)
+ context 'when user opens runners page' do
+ let(:project) { create(:project) }
- expect(page).to have_link('Install GitLab Runner and ensure it\'s running.', href: "https://docs.gitlab.com/runner/install/")
- end
- end
+ before do
+ project.add_maintainer(user)
+ end
- context 'when a project has enabled shared_runners' do
- let_it_be(:project) { create(:project) }
+ it 'user can see a link with instructions on how to install GitLab Runner' do
+ visit project_runners_path(project)
- before do
- project.add_maintainer(user)
- end
+ expect(page).to have_link('Install GitLab Runner and ensure it\'s running.', href: "https://docs.gitlab.com/runner/install/")
+ end
- context 'when a project_type runner is activated on the project' do
- let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
+ describe 'runners registration token' do
+ let!(:token) { project.runners_token }
- it 'user sees the specific runner' do
- visit project_runners_path(project)
+ context 'when project_runners_vue_ui is disabled' do
+ before do
+ visit project_runners_path(project)
+ end
- within '.activated-specific-runners' do
- expect(page).to have_content(project_runner.display_name)
- end
+ it 'has a registration token' do
+ expect(page.find('#registration_token')).to have_content(token)
+ end
- click_on project_runner.short_sha
+ describe 'reload registration token' do
+ let(:page_token) { find('#registration_token').text }
- expect(page).to have_content(project_runner.platform)
+ before do
+ click_link 'Reset registration token'
+ end
+
+ it 'changes registration token' do
+ expect(page_token).not_to eq token
+ end
+ end
+ end
end
+ end
- it 'user can pause and resume the specific runner' do
- visit project_runners_path(project)
+ context 'when a project has enabled shared_runners' do
+ let_it_be(:project) { create(:project) }
- within '.activated-specific-runners' do
- expect(page).to have_link('Pause')
- end
+ before do
+ project.add_maintainer(user)
+ end
- click_on 'Pause'
+ context 'when a project_type runner is activated on the project' do
+ let_it_be(:project_runner) { create(:ci_runner, :project, projects: [project]) }
- within '.activated-specific-runners' do
- expect(page).to have_link('Resume')
- end
+ it 'user sees the specific runner' do
+ visit project_runners_path(project)
- click_on 'Resume'
+ within '.activated-specific-runners' do
+ expect(page).to have_content(project_runner.display_name)
+ end
+
+ click_on project_runner.short_sha
- within '.activated-specific-runners' do
- expect(page).to have_link('Pause')
+ expect(page).to have_content(project_runner.platform)
end
- end
- it 'user removes an activated specific runner if this is last project for that runners' do
- visit project_runners_path(project)
+ it 'user can pause and resume the specific runner' do
+ visit project_runners_path(project)
- within '.activated-specific-runners' do
- click_on 'Remove runner'
- end
+ within '.activated-specific-runners' do
+ expect(page).to have_link('Pause')
+ end
- expect(page).not_to have_content(project_runner.display_name)
- end
+ click_on 'Pause'
- it 'user edits the runner to be protected' do
- visit project_runners_path(project)
+ within '.activated-specific-runners' do
+ expect(page).to have_link('Resume')
+ end
- within '.activated-specific-runners' do
- first('[data-testid="edit-runner-link"]').click
- end
+ click_on 'Resume'
- expect(page.find_field('runner[access_level]')).not_to be_checked
+ within '.activated-specific-runners' do
+ expect(page).to have_link('Pause')
+ end
+ end
- check 'runner_access_level'
- click_button 'Save changes'
+ it 'user removes an activated specific runner if this is last project for that runners' do
+ visit project_runners_path(project)
- expect(page).to have_content 'Protected Yes'
- end
+ within '.activated-specific-runners' do
+ click_on 'Remove runner'
+ end
- context 'when a runner has a tag' do
- before do
- project_runner.update!(tag_list: ['tag'])
+ expect(page).not_to have_content(project_runner.display_name)
end
- it 'user edits runner not to run untagged jobs' do
+ it 'user edits the runner to be protected' do
visit project_runners_path(project)
within '.activated-specific-runners' do
first('[data-testid="edit-runner-link"]').click
end
- expect(page.find_field('runner[run_untagged]')).to be_checked
+ expect(page.find_field('runner[access_level]')).not_to be_checked
- uncheck 'runner_run_untagged'
+ check 'runner_access_level'
click_button 'Save changes'
- expect(page).to have_content 'Can run untagged jobs No'
+ expect(page).to have_content 'Protected Yes'
end
- end
-
- context 'when a shared runner is activated on the project' do
- let!(:shared_runner) { create(:ci_runner, :instance) }
- it 'user sees CI/CD setting page' do
- visit project_runners_path(project)
-
- within '[data-testid="available-shared-runners"]' do
- expect(page).to have_content(shared_runner.display_name)
+ context 'when a runner has a tag' do
+ before do
+ project_runner.update!(tag_list: ['tag'])
end
- end
- context 'when multiple shared runners are configured' do
- let_it_be(:shared_runner_2) { create(:ci_runner, :instance) }
-
- it 'shows the runner count' do
+ it 'user edits runner not to run untagged jobs' do
visit project_runners_path(project)
- within '[data-testid="available-shared-runners"]' do
- expect(page).to have_content format(_('Available shared runners: %{count}'), { count: 2 })
+ within '.activated-specific-runners' do
+ first('[data-testid="edit-runner-link"]').click
end
+
+ expect(page.find_field('runner[run_untagged]')).to be_checked
+
+ uncheck 'runner_run_untagged'
+ click_button 'Save changes'
+
+ expect(page).to have_content 'Can run untagged jobs No'
end
+ end
- it 'adds pagination to the shared runner list' do
- stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
+ context 'when a shared runner is activated on the project' do
+ let!(:shared_runner) { create(:ci_runner, :instance) }
+ it 'user sees CI/CD setting page' do
visit project_runners_path(project)
within '[data-testid="available-shared-runners"]' do
- expect(find('.pagination')).not_to be_nil
+ expect(page).to have_content(shared_runner.display_name)
end
end
- end
- end
- context 'when multiple project runners are configured' do
- let!(:project_runner_2) { create(:ci_runner, :project, projects: [project]) }
+ context 'when multiple shared runners are configured' do
+ let_it_be(:shared_runner_2) { create(:ci_runner, :instance) }
- it 'adds pagination to the runner list' do
- stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
+ it 'shows the runner count' do
+ visit project_runners_path(project)
- visit project_runners_path(project)
+ within '[data-testid="available-shared-runners"]' do
+ expect(page).to have_content format(_('Available shared runners: %{count}'), { count: 2 })
+ end
+ end
- expect(find('.pagination')).not_to be_nil
- end
- end
- end
+ it 'adds pagination to the shared runner list' do
+ stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
- context 'when a specific runner exists in another project' do
- let(:another_project) { create(:project) }
- let!(:project_runner) { create(:ci_runner, :project, projects: [another_project]) }
+ visit project_runners_path(project)
- before do
- another_project.add_maintainer(user)
- end
+ within '[data-testid="available-shared-runners"]' do
+ expect(find('.pagination')).not_to be_nil
+ end
+ end
+ end
+ end
- it 'user enables and disables a specific runner' do
- visit project_runners_path(project)
+ context 'when multiple project runners are configured' do
+ let!(:project_runner_2) { create(:ci_runner, :project, projects: [project]) }
- within '.available-specific-runners' do
- click_on 'Enable for this project'
- end
+ it 'adds pagination to the runner list' do
+ stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
- expect(page.find('.activated-specific-runners')).to have_content(project_runner.display_name)
+ visit project_runners_path(project)
- within '.activated-specific-runners' do
- click_on 'Disable for this project'
+ expect(find('.pagination')).not_to be_nil
+ end
end
-
- expect(page.find('.available-specific-runners')).to have_content(project_runner.display_name)
end
- end
- context 'shared runner text' do
- context 'when application settings have shared_runners_text' do
- let(:shared_runners_text) { 'custom **shared** runners description' }
- let(:shared_runners_html) { 'custom shared runners description' }
+ context 'when a specific runner exists in another project' do
+ let(:another_project) { create(:project) }
+ let!(:project_runner) { create(:ci_runner, :project, projects: [another_project]) }
before do
- stub_application_setting(shared_runners_text: shared_runners_text)
+ another_project.add_maintainer(user)
end
- it 'user sees shared runners description' do
+ it 'user enables and disables a specific runner' do
visit project_runners_path(project)
- page.within("[data-testid='shared-runners-description']") do
- expect(page).not_to have_content('The same shared runner executes code from multiple projects')
- expect(page).to have_content(shared_runners_html)
+ within '.available-specific-runners' do
+ click_on 'Enable for this project'
+ end
+
+ expect(page.find('.activated-specific-runners')).to have_content(project_runner.display_name)
+
+ within '.activated-specific-runners' do
+ click_on 'Disable for this project'
end
+
+ expect(page.find('.available-specific-runners')).to have_content(project_runner.display_name)
end
end
- context 'when application settings have an unsafe link in shared_runners_text' do
- let(:shared_runners_text) { '<a href="javascript:alert(\'xss\')">link</a>' }
+ context 'shared runner text' do
+ context 'when application settings have shared_runners_text' do
+ let(:shared_runners_text) { 'custom **shared** runners description' }
+ let(:shared_runners_html) { 'custom shared runners description' }
- before do
- stub_application_setting(shared_runners_text: shared_runners_text)
- end
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
- it 'user sees no link' do
- visit project_runners_path(project)
+ it 'user sees shared runners description' do
+ visit project_runners_path(project)
- page.within("[data-testid='shared-runners-description']") do
- expect(page).to have_content('link')
- expect(page).not_to have_link('link')
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).not_to have_content('The same shared runner executes code from multiple projects')
+ expect(page).to have_content(shared_runners_html)
+ end
end
end
- end
- context 'when application settings have an unsafe image in shared_runners_text' do
- let(:shared_runners_text) { '<img src="404.png" onerror="alert(\'xss\')"/>' }
+ context 'when application settings have an unsafe link in shared_runners_text' do
+ let(:shared_runners_text) { '<a href="javascript:alert(\'xss\')">link</a>' }
- before do
- stub_application_setting(shared_runners_text: shared_runners_text)
- end
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
- it 'user sees image safely' do
- visit project_runners_path(project)
+ it 'user sees no link' do
+ visit project_runners_path(project)
- page.within("[data-testid='shared-runners-description']") do
- expect(page).to have_css('img')
- expect(page).not_to have_css('img[onerror]')
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).to have_content('link')
+ expect(page).not_to have_link('link')
+ end
end
end
- end
- end
- end
- context 'enable shared runners in project settings', :js do
- before do
- project.add_maintainer(user)
+ context 'when application settings have an unsafe image in shared_runners_text' do
+ let(:shared_runners_text) { '<img src="404.png" onerror="alert(\'xss\')"/>' }
- visit project_runners_path(project)
- end
+ before do
+ stub_application_setting(shared_runners_text: shared_runners_text)
+ end
- context 'when a project has enabled shared_runners' do
- let(:project) { create(:project, shared_runners_enabled: true) }
+ it 'user sees image safely' do
+ visit project_runners_path(project)
- it 'shared runners toggle is on' do
- expect(page).to have_selector('[data-testid="toggle-shared-runners"]')
- expect(page).to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
+ page.within("[data-testid='shared-runners-description']") do
+ expect(page).to have_css('img')
+ expect(page).not_to have_css('img[onerror]')
+ end
+ end
+ end
end
end
- context 'when a project has disabled shared_runners' do
- let(:project) { create(:project, shared_runners_enabled: false) }
+ context 'enable shared runners in project settings', :js do
+ before do
+ project.add_maintainer(user)
- it 'shared runners toggle is off' do
- expect(page).not_to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
+ visit project_runners_path(project)
end
- end
- end
-
- context 'group runners in project settings' do
- before do
- project.add_maintainer(user)
- end
- let_it_be(:group) { create :group }
- let_it_be(:project) { create :project, group: group }
+ context 'when a project has enabled shared_runners' do
+ let(:project) { create(:project, shared_runners_enabled: true) }
- context 'as project and group maintainer' do
- before do
- group.add_maintainer(user)
+ it 'shared runners toggle is on' do
+ expect(page).to have_selector('[data-testid="toggle-shared-runners"]')
+ expect(page).to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
+ end
end
- context 'project with a group but no group runner' do
- it 'group runners are not available' do
- visit project_runners_path(project)
+ context 'when a project has disabled shared_runners' do
+ let(:project) { create(:project, shared_runners_enabled: false) }
- expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
- expect(page).to have_content 'Ask your group owner to set up a group runner'
+ it 'shared runners toggle is off' do
+ expect(page).not_to have_selector('[data-testid="toggle-shared-runners"] .is-checked')
end
end
end
- context 'as project maintainer and group owner' do
+ context 'group runners in project settings' do
before do
- group.add_owner(user)
+ project.add_maintainer(user)
end
- context 'project with a group but no group runner' do
- it 'group runners are available' do
- visit project_runners_path(project)
-
- expect(page).to have_content 'This group does not have any group runners yet.'
+ let_it_be(:group) { create :group }
+ let_it_be(:project) { create :project, group: group }
- expect(page).to have_content 'To register them, go to the group\'s Runners page.'
- expect(page).not_to have_content 'Ask your group owner to set up a group runner'
+ context 'as project and group maintainer' do
+ before do
+ group.add_maintainer(user)
end
- end
- end
-
- context 'as project maintainer' do
- context 'project without a group' do
- let(:project) { create :project }
- it 'group runners are not available' do
- visit project_runners_path(project)
+ context 'project with a group but no group runner' do
+ it 'group runners are not available' do
+ visit project_runners_path(project)
- expect(page).to have_content 'This project does not belong to a group and cannot make use of group runners.'
+ expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
+ expect(page).to have_content 'Ask your group owner to set up a group runner'
+ end
end
end
- context 'with group project' do
- let_it_be(:group) { create(:group) }
- let_it_be(:project) { create(:project, group: group) }
+ context 'as project maintainer and group owner' do
+ before do
+ group.add_owner(user)
+ end
context 'project with a group but no group runner' do
- it 'group runners are not available' do
+ it 'group runners are available' do
visit project_runners_path(project)
expect(page).to have_content 'This group does not have any group runners yet.'
- expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
- expect(page).to have_content 'Ask your group owner to set up a group runner.'
+ expect(page).to have_content 'To register them, go to the group\'s Runners page.'
+ expect(page).not_to have_content 'Ask your group owner to set up a group runner'
end
end
+ end
- context 'project with a group and a group runner' do
- let_it_be(:group_runner) do
- create(:ci_runner, :group, groups: [group], description: 'group-runner')
- end
+ context 'as project maintainer' do
+ context 'project without a group' do
+ let(:project) { create :project }
- it 'group runners are available' do
+ it 'group runners are not available' do
visit project_runners_path(project)
- expect(page).to have_content 'Available group runners: 1'
- expect(page).to have_content 'group-runner'
+ expect(page).to have_content 'This project does not belong to a group and cannot make use of group runners.'
end
+ end
- it 'group runners may be disabled for a project' do
- visit project_runners_path(project)
-
- click_on 'Disable group runners'
+ context 'with group project' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
- expect(page).to have_content 'Enable group runners'
- expect(project.reload.group_runners_enabled).to be false
+ context 'project with a group but no group runner' do
+ it 'group runners are not available' do
+ visit project_runners_path(project)
- click_on 'Enable group runners'
+ expect(page).to have_content 'This group does not have any group runners yet.'
- expect(page).to have_content 'Disable group runners'
- expect(project.reload.group_runners_enabled).to be true
+ expect(page).not_to have_content 'To register them, go to the group\'s Runners page.'
+ expect(page).to have_content 'Ask your group owner to set up a group runner.'
+ end
end
- context 'when multiple group runners are configured' do
- let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group]) }
+ context 'project with a group and a group runner' do
+ let_it_be(:group_runner) do
+ create(:ci_runner, :group, groups: [group], description: 'group-runner')
+ end
- it 'shows the runner count' do
+ it 'group runners are available' do
visit project_runners_path(project)
- within '[data-testid="group-runners"]' do
- expect(page).to have_content format(_('Available group runners: %{runners}'), { runners: 2 })
- end
+ expect(page).to have_content 'Available group runners: 1'
+ expect(page).to have_content 'group-runner'
end
- it 'adds pagination to the group runner list' do
- stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
-
+ it 'group runners may be disabled for a project' do
visit project_runners_path(project)
- within '[data-testid="group-runners"]' do
- expect(find('.pagination')).not_to be_nil
+ click_on 'Disable group runners'
+
+ expect(page).to have_content 'Enable group runners'
+ expect(project.reload.group_runners_enabled).to be false
+
+ click_on 'Enable group runners'
+
+ expect(page).to have_content 'Disable group runners'
+ expect(project.reload.group_runners_enabled).to be true
+ end
+
+ context 'when multiple group runners are configured' do
+ let_it_be(:group_runner_2) { create(:ci_runner, :group, groups: [group]) }
+
+ it 'shows the runner count' do
+ visit project_runners_path(project)
+
+ within '[data-testid="group-runners"]' do
+ expect(page).to have_content format(_('Available group runners: %{runners}'), { runners: 2 })
+ end
+ end
+
+ it 'adds pagination to the group runner list' do
+ stub_const('Projects::Settings::CiCdController::NUMBER_OF_RUNNERS_PER_PAGE', 1)
+
+ visit project_runners_path(project)
+
+ within '[data-testid="group-runners"]' do
+ expect(find('.pagination')).not_to be_nil
+ end
end
end
end
diff --git a/spec/features/search/user_searches_for_code_spec.rb b/spec/features/search/user_searches_for_code_spec.rb
index 14d67bac85f..dd7095107f4 100644
--- a/spec/features/search/user_searches_for_code_spec.rb
+++ b/spec/features/search/user_searches_for_code_spec.rb
@@ -17,20 +17,24 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
sign_in(user)
end
- it 'finds a file' do
- visit(project_path(project))
+ context 'when on a project page' do
+ before do
+ visit(project_path(project))
+ end
- submit_search('application.js')
- select_search_scope('Code')
+ it 'finds a file' do
+ submit_search('application.js')
+ select_search_scope('Code')
- expect(page).to have_selector('.results', text: 'application.js')
- expect(page).to have_selector('.file-content .code')
- expect(page).to have_selector("span.line[lang='javascript']")
- expect(page).to have_link('application.js', href: %r{master/files/js/application.js})
- expect(page).to have_button('Copy file path')
+ expect(page).to have_selector('.results', text: 'application.js')
+ expect(page).to have_selector('.file-content .code')
+ expect(page).to have_selector("span.line[lang='javascript']")
+ expect(page).to have_link('application.js', href: %r{master/files/js/application.js})
+ expect(page).to have_button('Copy file path')
+ end
end
- context 'when on a project page' do
+ context 'when on a project search page' do
before do
visit(search_path)
find('[data-testid="project-filter"]').click
@@ -47,28 +51,31 @@ RSpec.describe 'User searches for code', :js, :disable_rate_limiter, feature_cat
let(:additional_params) { { project_id: project.id } }
end
- it 'finds code and links to blob' do
- expected_result = 'Update capybara, rspec-rails, poltergeist to recent versions'
-
- fill_in('dashboard_search', with: 'rspec')
- find('.gl-search-box-by-click-search-button').click
+ context 'when searching code' do
+ let(:expected_result) { 'Update capybara, rspec-rails, poltergeist to recent versions' }
- expect(page).to have_selector('.results', text: expected_result)
+ before do
+ fill_in('dashboard_search', with: 'rspec')
+ find('.gl-search-box-by-click-search-button').click
+ end
- find("#blob-L3").click
- expect(current_url).to match(%r{blob/master/.gitignore#L3})
- end
+ it 'finds code and links to blob' do
+ expect(page).to have_selector('.results', text: expected_result)
- it 'finds code and links to blame' do
- expected_result = 'Update capybara, rspec-rails, poltergeist to recent versions'
+ find("#blob-L3").click
+ expect(current_url).to match(%r{blob/master/.gitignore#L3})
+ end
- fill_in('dashboard_search', with: 'rspec')
- find('.gl-search-box-by-click-search-button').click
+ it 'finds code and links to blame' do
+ expect(page).to have_selector('.results', text: expected_result)
- expect(page).to have_selector('.results', text: expected_result)
+ find("#blame-L3").click
+ expect(current_url).to match(%r{blame/master/.gitignore#L3})
+ end
- find("#blame-L3").click
- expect(current_url).to match(%r{blame/master/.gitignore#L3})
+ it_behaves_like 'code highlight' do
+ subject { page }
+ end
end
it 'search multiple words with refs switching' do
diff --git a/spec/features/signed_commits_spec.rb b/spec/features/signed_commits_spec.rb
index 34127787e47..bc82afc70a3 100644
--- a/spec/features/signed_commits_spec.rb
+++ b/spec/features/signed_commits_spec.rb
@@ -94,8 +94,6 @@ RSpec.describe 'GPG signed commits', feature_category: :source_code_management d
within '.popover' do
expect(page).to have_content 'This commit was signed with a verified signature, but the committer email is not associated with the GPG Key.'
- expect(page).to have_content 'Bette Cartwright'
- expect(page).to have_content '@bette.cartwright'
expect(page).to have_content "GPG Key ID: #{GpgHelpers::User2.primary_keyid}"
end
end
@@ -110,8 +108,6 @@ RSpec.describe 'GPG signed commits', feature_category: :source_code_management d
within '.popover' do
expect(page).to have_content "This commit was signed with a different user's verified signature."
- expect(page).to have_content 'Bette Cartwright'
- expect(page).to have_content '@bette.cartwright'
expect(page).to have_content "GPG Key ID: #{GpgHelpers::User2.primary_keyid}"
end
end
@@ -138,9 +134,7 @@ RSpec.describe 'GPG signed commits', feature_category: :source_code_management d
page.find('.gpg-status-box', text: 'Verified').click
within '.popover' do
- expect(page).to have_content 'This commit was signed with a verified signature and the committer email is verified to belong to the same user.'
- expect(page).to have_content 'Nannie Bernhard'
- expect(page).to have_content '@nannie.bernhard'
+ expect(page).to have_content 'This commit was signed with a verified signature and the committer email was verified to belong to the same user.'
expect(page).to have_content "GPG Key ID: #{GpgHelpers::User1.primary_keyid}"
end
end
@@ -162,9 +156,7 @@ RSpec.describe 'GPG signed commits', feature_category: :source_code_management d
page.find('.gpg-status-box', text: 'Verified').click
within '.popover' do
- expect(page).to have_content 'This commit was signed with a verified signature and the committer email is verified to belong to the same user.'
- expect(page).to have_content 'Nannie Bernhard'
- expect(page).to have_content 'nannie.bernhard@example.com'
+ expect(page).to have_content 'This commit was signed with a verified signature and the committer email was verified to belong to the same user.'
expect(page).to have_content "GPG Key ID: #{GpgHelpers::User1.primary_keyid}"
end
end
diff --git a/spec/features/snippets/show_spec.rb b/spec/features/snippets/show_spec.rb
index a6e0bc32d42..dc2fcdd7305 100644
--- a/spec/features/snippets/show_spec.rb
+++ b/spec/features/snippets/show_spec.rb
@@ -24,4 +24,25 @@ RSpec.describe 'Snippet', :js, feature_category: :source_code_management do
subject { visit snippet_path(snippet) }
end
+
+ it_behaves_like 'a dashboard page with sidebar', :dashboard_snippets_path, :snippets
+
+ context 'when unauthenticated' do
+ it 'does not have the sidebar' do
+ visit snippet_path(snippet)
+
+ expect(page).to have_title _('Snippets')
+ expect(page).not_to have_css('aside.nav-sidebar')
+ end
+ end
+
+ context 'when authenticated as a different user' do
+ let_it_be(:different_user) { create(:user) }
+
+ before do
+ sign_in(different_user)
+ end
+
+ it_behaves_like 'a dashboard page with sidebar', :dashboard_snippets_path, :snippets
+ end
end
diff --git a/spec/features/snippets/user_creates_snippet_spec.rb b/spec/features/snippets/user_creates_snippet_spec.rb
index 064250c5673..03f569fe4b0 100644
--- a/spec/features/snippets/user_creates_snippet_spec.rb
+++ b/spec/features/snippets/user_creates_snippet_spec.rb
@@ -21,6 +21,8 @@ RSpec.describe 'User creates snippet', :js, feature_category: :source_code_manag
visit new_snippet_path
end
+ it_behaves_like 'a dashboard page with sidebar', :new_snippet_path, :snippets
+
def fill_form
snippet_fill_in_form(title: title, content: file_content, description: md_description)
end
diff --git a/spec/features/triggers_spec.rb b/spec/features/triggers_spec.rb
index 3616fdb2e8e..23a13994fa4 100644
--- a/spec/features/triggers_spec.rb
+++ b/spec/features/triggers_spec.rb
@@ -23,7 +23,7 @@ RSpec.describe 'Triggers', :js, feature_category: :continuous_integration do
wait_for_requests
end
- shared_examples 'triggers page' do
+ describe 'triggers page' do
describe 'create trigger workflow' do
it 'prevents adding new trigger with no description' do
fill_in 'trigger_description', with: ''
@@ -139,16 +139,4 @@ RSpec.describe 'Triggers', :js, feature_category: :continuous_integration do
end
end
end
-
- context 'when ci_pipeline_triggers_settings_vue_ui is enabled' do
- it_behaves_like 'triggers page'
- end
-
- context 'when ci_pipeline_triggers_settings_vue_ui is disabled' do
- before do
- stub_feature_flags(ci_pipeline_triggers_settings_vue_ui: false)
- end
-
- it_behaves_like 'triggers page'
- end
end
diff --git a/spec/features/user_sees_revert_modal_spec.rb b/spec/features/user_sees_revert_modal_spec.rb
index ea5fd537c5b..ae3158e4270 100644
--- a/spec/features/user_sees_revert_modal_spec.rb
+++ b/spec/features/user_sees_revert_modal_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe 'Merge request > User sees revert modal', :js, :sidekiq_might_not_need_inline,
-feature_category: :code_review do
+feature_category: :code_review_workflow do
let(:project) { create(:project, :public, :repository) }
let(:user) { project.creator }
let(:merge_request) { create(:merge_request, source_project: project) }
diff --git a/spec/features/user_sorts_things_spec.rb b/spec/features/user_sorts_things_spec.rb
index 708caf79090..b45de88832c 100644
--- a/spec/features/user_sorts_things_spec.rb
+++ b/spec/features/user_sorts_things_spec.rb
@@ -34,7 +34,7 @@ RSpec.describe "User sorts things", :js do
expect(page).to have_button(sort_option)
end
- it "merge requests -> dashboard merge requests", feature_category: :code_review do
+ it "merge requests -> dashboard merge requests", feature_category: :code_review_workflow do
sort_option = s_('SortOptions|Updated date')
visit(project_merge_requests_path(project))
diff --git a/spec/features/users/login_spec.rb b/spec/features/users/login_spec.rb
index 105e9f97989..5e683befeec 100644
--- a/spec/features/users/login_spec.rb
+++ b/spec/features/users/login_spec.rb
@@ -926,7 +926,8 @@ RSpec.describe 'Login', :clean_gitlab_redis_sessions, feature_category: :system_
stub_omniauth_saml_config(enabled: true, auto_link_saml_user: true, allow_single_sign_on: ['saml'], providers: [mock_saml_config])
end
- it 'asks the user to accept the terms before setting an email' do
+ it 'asks the user to accept the terms before setting an email',
+ quarantine: { issue: 'https://gitlab.com/gitlab-org/gitlab/-/issues/388049', type: :flaky } do
expect(authentication_metrics)
.to increment(:user_authenticated_counter)
diff --git a/spec/finders/access_requests_finder_spec.rb b/spec/finders/access_requests_finder_spec.rb
index f4fda1f3dd2..b82495d55fd 100644
--- a/spec/finders/access_requests_finder_spec.rb
+++ b/spec/finders/access_requests_finder_spec.rb
@@ -40,7 +40,7 @@ RSpec.describe AccessRequestsFinder do
end
end
- describe '#execute' do
+ shared_examples '#execute' do
context 'when current user cannot see project access requests' do
it_behaves_like 'a finder returning no results', :execute do
let(:source) { project }
@@ -67,7 +67,7 @@ RSpec.describe AccessRequestsFinder do
end
end
- describe '#execute!' do
+ shared_examples '#execute!' do
context 'when current user cannot see access requests' do
it_behaves_like 'a finder raising Gitlab::Access::AccessDeniedError', :execute! do
let(:source) { project }
@@ -93,4 +93,16 @@ RSpec.describe AccessRequestsFinder do
end
end
end
+
+ it_behaves_like '#execute'
+ it_behaves_like '#execute!'
+
+ context 'when project_members_index_by_project_namespace feature flag is disabled' do
+ before do
+ stub_feature_flags(project_members_index_by_project_namespace: false)
+ end
+
+ it_behaves_like '#execute'
+ it_behaves_like '#execute!'
+ end
end
diff --git a/spec/finders/branches_finder_spec.rb b/spec/finders/branches_finder_spec.rb
index 18f8d1adecc..9f185c8b8fb 100644
--- a/spec/finders/branches_finder_spec.rb
+++ b/spec/finders/branches_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BranchesFinder do
+RSpec.describe BranchesFinder, feature_category: :source_code_management do
let(:user) { create(:user) }
let(:project) { create(:project, :repository) }
let(:repository) { project.repository }
@@ -72,6 +72,19 @@ RSpec.describe BranchesFinder do
end
end
+ context 'by string' do
+ let(:params) { { search: 'add' } }
+
+ it 'returns all branches contain name' do
+ result = subject
+
+ result.each do |branch|
+ expect(branch.name).to include('add')
+ end
+ expect(result.count).to eq(5)
+ end
+ end
+
context 'by provided names' do
let(:params) { { names: %w[fix csv lfs does-not-exist] } }
@@ -127,6 +140,34 @@ RSpec.describe BranchesFinder do
end
end
+ context 'by invalid regex' do
+ let(:params) { { regex: '[' } }
+
+ it { expect { subject }.to raise_error(RegexpError) }
+ end
+
+ context 'by `|` regex' do
+ let(:params) { { regex: 'audio|add-ipython-files' } }
+
+ it 'filters branches' do
+ branches = subject
+ expect(branches.first.name).to eq('add-ipython-files')
+ expect(branches.second.name).to eq('audio')
+ expect(branches.count).to eq(2)
+ end
+ end
+
+ context 'by exclude name' do
+ let(:params) { { regex: '^[^a]' } }
+
+ it 'filters branches' do
+ result = subject
+ result.each do |branch|
+ expect(branch.name).not_to start_with('a')
+ end
+ end
+ end
+
context 'by name with multiple wildcards' do
let(:params) { { search: 'f*a*e' } }
diff --git a/spec/finders/ci/pipelines_finder_spec.rb b/spec/finders/ci/pipelines_finder_spec.rb
index a2e8fe8df5a..9ce3becf013 100644
--- a/spec/finders/ci/pipelines_finder_spec.rb
+++ b/spec/finders/ci/pipelines_finder_spec.rb
@@ -260,16 +260,6 @@ RSpec.describe Ci::PipelinesFinder do
end
end
- context 'when pipeline_name feature flag is off' do
- before do
- stub_feature_flags(pipeline_name: false)
- end
-
- it 'ignores name parameter' do
- is_expected.to contain_exactly(pipeline, pipeline_other)
- end
- end
-
context 'when pipeline_name_search feature flag is off' do
before do
stub_feature_flags(pipeline_name_search: false)
diff --git a/spec/finders/ci/runners_finder_spec.rb b/spec/finders/ci/runners_finder_spec.rb
index a8ef99eeaec..1aba77f4d6e 100644
--- a/spec/finders/ci/runners_finder_spec.rb
+++ b/spec/finders/ci/runners_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::RunnersFinder do
+RSpec.describe Ci::RunnersFinder, feature_category: :runner_fleet do
context 'admin' do
let_it_be(:admin) { create(:user, :admin) }
@@ -630,16 +630,6 @@ RSpec.describe Ci::RunnersFinder do
expect(subject).to be_empty
end
end
-
- context 'when on_demand_scans_runner_tags feature flag is disabled' do
- before do
- stub_feature_flags(on_demand_scans_runner_tags: false)
- end
-
- it 'returns no runners' do
- expect(subject).to be_empty
- end
- end
end
end
end
diff --git a/spec/finders/members_finder_spec.rb b/spec/finders/members_finder_spec.rb
index aa7d32e51ac..c48a0271471 100644
--- a/spec/finders/members_finder_spec.rb
+++ b/spec/finders/members_finder_spec.rb
@@ -2,205 +2,217 @@
require 'spec_helper'
-RSpec.describe MembersFinder, '#execute' do
- let_it_be(:group) { create(:group) }
- let_it_be(:nested_group) { create(:group, parent: group) }
- let_it_be(:project, reload: true) { create(:project, namespace: nested_group) }
- let_it_be(:user1) { create(:user) }
- let_it_be(:user2) { create(:user) }
- let_it_be(:user3) { create(:user) }
- let_it_be(:user4) { create(:user) }
- let_it_be(:blocked_user) { create(:user, :blocked) }
-
- it 'returns members for project and parent groups' do
- nested_group.request_access(user1)
- member1 = group.add_maintainer(user2)
- member2 = nested_group.add_maintainer(user3)
- member3 = project.add_maintainer(user4)
- blocked_member = project.add_maintainer(blocked_user)
-
- result = described_class.new(project, user2).execute
-
- expect(result).to contain_exactly(member1, member2, member3, blocked_member)
- end
+RSpec.describe MembersFinder, feature_category: :subgroups do
+ shared_examples '#execute' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:nested_group) { create(:group, parent: group) }
+ let_it_be(:project, reload: true) { create(:project, namespace: nested_group) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:user3) { create(:user) }
+ let_it_be(:user4) { create(:user) }
+ let_it_be(:blocked_user) { create(:user, :blocked) }
+
+ it 'returns members for project and parent groups' do
+ nested_group.request_access(user1)
+ member1 = group.add_maintainer(user2)
+ member2 = nested_group.add_maintainer(user3)
+ member3 = project.add_maintainer(user4)
+ blocked_member = project.add_maintainer(blocked_user)
+
+ result = described_class.new(project, user2).execute
+
+ expect(result).to contain_exactly(member1, member2, member3, blocked_member)
+ end
- it 'returns owners and maintainers' do
- member1 = group.add_owner(user1)
- group.add_developer(user2)
- member3 = project.add_maintainer(user3)
- project.add_developer(user4)
+ it 'returns owners and maintainers' do
+ member1 = group.add_owner(user1)
+ group.add_developer(user2)
+ member3 = project.add_maintainer(user3)
+ project.add_developer(user4)
- result = described_class.new(project, user2, params: { owners_and_maintainers: true }).execute
+ result = described_class.new(project, user2, params: { owners_and_maintainers: true }).execute
- expect(result).to contain_exactly(member1, member3)
- end
+ expect(result).to contain_exactly(member1, member3)
+ end
- it 'returns active users and excludes invited users' do
- member1 = project.add_maintainer(user2)
- create(:project_member, :invited, project: project, invite_email: create(:user).email)
- project.add_maintainer(blocked_user)
+ it 'returns active users and excludes invited users' do
+ member1 = project.add_maintainer(user2)
+ create(:project_member, :invited, project: project, invite_email: create(:user).email)
+ project.add_maintainer(blocked_user)
- result = described_class.new(project, user2, params: { active_without_invites_and_requests: true }).execute
+ result = described_class.new(project, user2, params: { active_without_invites_and_requests: true }).execute
- expect(result).to contain_exactly(member1)
- end
+ expect(result).to contain_exactly(member1)
+ end
- it 'does not return members of parent group with minimal access' do
- nested_group.request_access(user1)
- member1 = group.add_maintainer(user2)
- member2 = nested_group.add_maintainer(user3)
- member3 = project.add_maintainer(user4)
- create(:group_member, :minimal_access, user: create(:user), source: group)
+ it 'does not return members of parent group with minimal access' do
+ nested_group.request_access(user1)
+ member1 = group.add_maintainer(user2)
+ member2 = nested_group.add_maintainer(user3)
+ member3 = project.add_maintainer(user4)
+ create(:group_member, :minimal_access, user: create(:user), source: group)
- result = described_class.new(project, user2).execute
+ result = described_class.new(project, user2).execute
- expect(result).to contain_exactly(member1, member2, member3)
- end
+ expect(result).to contain_exactly(member1, member2, member3)
+ end
- it 'includes only non-invite members if user do not have amdin permissions on project' do
- create(:project_member, :invited, project: project, invite_email: create(:user).email)
- member1 = project.add_maintainer(user1)
- member2 = project.add_developer(user2)
+ it 'includes only non-invite members if user do not have amdin permissions on project' do
+ create(:project_member, :invited, project: project, invite_email: create(:user).email)
+ member1 = project.add_maintainer(user1)
+ member2 = project.add_developer(user2)
- result = described_class.new(project, user2).execute(include_relations: [:direct])
+ result = described_class.new(project, user2).execute(include_relations: [:direct])
- expect(result).to contain_exactly(member1, member2)
- end
+ expect(result).to contain_exactly(member1, member2)
+ end
- it 'includes invited members if user have admin permissions on project' do
- member_invite = create(:project_member, :invited, project: project, invite_email: create(:user).email)
- member1 = project.add_maintainer(user1)
- member2 = project.add_maintainer(user2)
+ it 'includes invited members if user have admin permissions on project' do
+ member_invite = create(:project_member, :invited, project: project, invite_email: create(:user).email)
+ member1 = project.add_maintainer(user1)
+ member2 = project.add_maintainer(user2)
- result = described_class.new(project, user2).execute(include_relations: [:direct])
+ result = described_class.new(project, user2).execute(include_relations: [:direct])
- expect(result).to contain_exactly(member1, member2, member_invite)
- end
+ expect(result).to contain_exactly(member1, member2, member_invite)
+ end
- it 'includes nested group members if asked', :nested_groups do
- nested_group.request_access(user1)
- member1 = group.add_maintainer(user2)
- member2 = nested_group.add_maintainer(user3)
- member3 = project.add_maintainer(user4)
+ it 'includes nested group members if asked', :nested_groups do
+ nested_group.request_access(user1)
+ member1 = group.add_maintainer(user2)
+ member2 = nested_group.add_maintainer(user3)
+ member3 = project.add_maintainer(user4)
- result = described_class.new(project, user2).execute(include_relations: [:direct, :descendants])
+ result = described_class.new(project, user2).execute(include_relations: [:direct, :descendants])
- expect(result).to contain_exactly(member1, member2, member3)
- end
+ expect(result).to contain_exactly(member1, member2, member3)
+ end
- it 'returns only members of project if asked' do
- nested_group.request_access(user1)
- group.add_maintainer(user2)
- nested_group.add_maintainer(user3)
- member4 = project.add_maintainer(user4)
+ it 'returns only members of project if asked' do
+ nested_group.request_access(user1)
+ group.add_maintainer(user2)
+ nested_group.add_maintainer(user3)
+ member4 = project.add_maintainer(user4)
- result = described_class.new(project, user2).execute(include_relations: [:direct])
+ result = described_class.new(project, user2).execute(include_relations: [:direct])
- expect(result).to contain_exactly(member4)
- end
+ expect(result).to contain_exactly(member4)
+ end
- it 'returns only inherited members of project if asked' do
- nested_group.request_access(user1)
- member2 = group.add_maintainer(user2)
- member3 = nested_group.add_maintainer(user3)
- project.add_maintainer(user4)
+ it 'returns only inherited members of project if asked' do
+ nested_group.request_access(user1)
+ member2 = group.add_maintainer(user2)
+ member3 = nested_group.add_maintainer(user3)
+ project.add_maintainer(user4)
- result = described_class.new(project, user2).execute(include_relations: [:inherited])
+ result = described_class.new(project, user2).execute(include_relations: [:inherited])
- expect(result).to contain_exactly(member2, member3)
- end
+ expect(result).to contain_exactly(member2, member3)
+ end
- it 'returns only inherited members of a personal project' do
- project = create(:project, namespace: user1.namespace)
- member = project.members.first
+ it 'returns only inherited members of a personal project' do
+ project = create(:project, namespace: user1.namespace)
+ member = project.members.first
- result = described_class.new(project, user1).execute(include_relations: [:inherited])
+ result = described_class.new(project, user1).execute(include_relations: [:inherited])
- expect(result).to contain_exactly(member)
- end
+ expect(result).to contain_exactly(member)
+ end
- it 'returns the members.access_level when the user is invited', :nested_groups do
- member_invite = create(:project_member, :invited, project: project, invite_email: create(:user).email)
- member1 = group.add_maintainer(user2)
+ it 'returns the members.access_level when the user is invited', :nested_groups do
+ member_invite = create(:project_member, :invited, project: project, invite_email: create(:user).email)
+ member1 = group.add_maintainer(user2)
- result = described_class.new(project, user2).execute(include_relations: [:direct, :descendants])
+ result = described_class.new(project, user2).execute(include_relations: [:direct, :descendants])
- expect(result).to contain_exactly(member1, member_invite)
- expect(result.last.access_level).to eq(member_invite.access_level)
- end
+ expect(result).to contain_exactly(member1, member_invite)
+ expect(result.last.access_level).to eq(member_invite.access_level)
+ end
- it 'returns the highest access_level for the user', :nested_groups do
- member1 = project.add_guest(user1)
- group.add_developer(user1)
- nested_group.add_reporter(user1)
+ it 'returns the highest access_level for the user', :nested_groups do
+ member1 = project.add_guest(user1)
+ group.add_developer(user1)
+ nested_group.add_reporter(user1)
- result = described_class.new(project, user1).execute(include_relations: [:direct, :descendants])
+ result = described_class.new(project, user1).execute(include_relations: [:direct, :descendants])
- expect(result).to contain_exactly(member1)
- expect(result.first.access_level).to eq(Gitlab::Access::DEVELOPER)
- end
+ expect(result).to contain_exactly(member1)
+ expect(result.first.access_level).to eq(Gitlab::Access::DEVELOPER)
+ end
- it 'returns searched members if requested' do
- project.add_maintainer(user2)
- project.add_maintainer(user3)
- member3 = project.add_maintainer(user4)
+ it 'returns searched members if requested' do
+ project.add_maintainer(user2)
+ project.add_maintainer(user3)
+ member3 = project.add_maintainer(user4)
- result = described_class.new(project, user2, params: { search: user4.name }).execute
+ result = described_class.new(project, user2, params: { search: user4.name }).execute
- expect(result).to contain_exactly(member3)
- end
+ expect(result).to contain_exactly(member3)
+ end
- it 'returns members sorted by id_desc' do
- member1 = project.add_maintainer(user2)
- member2 = project.add_maintainer(user3)
- member3 = project.add_maintainer(user4)
+ it 'returns members sorted by id_desc' do
+ member1 = project.add_maintainer(user2)
+ member2 = project.add_maintainer(user3)
+ member3 = project.add_maintainer(user4)
- result = described_class.new(project, user2, params: { sort: 'id_desc' }).execute
+ result = described_class.new(project, user2, params: { sort: 'id_desc' }).execute
- expect(result).to eq([member3, member2, member1])
- end
+ expect(result).to eq([member3, member2, member1])
+ end
- context 'when :invited_groups is passed' do
- shared_examples 'with invited_groups param' do
- subject { described_class.new(project, user2).execute(include_relations: [:inherited, :direct, :invited_groups]) }
+ context 'when :invited_groups is passed' do
+ shared_examples 'with invited_groups param' do
+ subject { described_class.new(project, user2).execute(include_relations: [:inherited, :direct, :invited_groups]) }
- let_it_be(:linked_group) { create(:group, :public) }
- let_it_be(:nested_linked_group) { create(:group, parent: linked_group) }
- let_it_be(:linked_group_member) { linked_group.add_guest(user1) }
- let_it_be(:nested_linked_group_member) { nested_linked_group.add_guest(user2) }
+ let_it_be(:linked_group) { create(:group, :public) }
+ let_it_be(:nested_linked_group) { create(:group, parent: linked_group) }
+ let_it_be(:linked_group_member) { linked_group.add_guest(user1) }
+ let_it_be(:nested_linked_group_member) { nested_linked_group.add_guest(user2) }
- it 'includes all the invited_groups members including members inherited from ancestor groups' do
- create(:project_group_link, project: project, group: nested_linked_group)
+ it 'includes all the invited_groups members including members inherited from ancestor groups' do
+ create(:project_group_link, project: project, group: nested_linked_group)
- expect(subject).to contain_exactly(linked_group_member, nested_linked_group_member)
- end
+ expect(subject).to contain_exactly(linked_group_member, nested_linked_group_member)
+ end
- it 'includes all the invited_groups members' do
- create(:project_group_link, project: project, group: linked_group)
+ it 'includes all the invited_groups members' do
+ create(:project_group_link, project: project, group: linked_group)
- expect(subject).to contain_exactly(linked_group_member)
- end
+ expect(subject).to contain_exactly(linked_group_member)
+ end
- it 'excludes group_members not visible to the user' do
- create(:project_group_link, project: project, group: linked_group)
- private_linked_group = create(:group, :private)
- private_linked_group.add_developer(user3)
- create(:project_group_link, project: project, group: private_linked_group)
+ it 'excludes group_members not visible to the user' do
+ create(:project_group_link, project: project, group: linked_group)
+ private_linked_group = create(:group, :private)
+ private_linked_group.add_developer(user3)
+ create(:project_group_link, project: project, group: private_linked_group)
- expect(subject).to contain_exactly(linked_group_member)
- end
+ expect(subject).to contain_exactly(linked_group_member)
+ end
- context 'when the user is a member of invited group and ancestor groups' do
- it 'returns the highest access_level for the user limited by project_group_link.group_access', :nested_groups do
- create(:project_group_link, project: project, group: nested_linked_group, group_access: Gitlab::Access::REPORTER)
- nested_linked_group.add_developer(user1)
+ context 'when the user is a member of invited group and ancestor groups' do
+ it 'returns the highest access_level for the user limited by project_group_link.group_access', :nested_groups do
+ create(:project_group_link, project: project, group: nested_linked_group, group_access: Gitlab::Access::REPORTER)
+ nested_linked_group.add_developer(user1)
- expect(subject.map(&:user)).to contain_exactly(user1, user2)
- expect(subject.max_by(&:access_level).access_level).to eq(Gitlab::Access::REPORTER)
+ expect(subject.map(&:user)).to contain_exactly(user1, user2)
+ expect(subject.max_by(&:access_level).access_level).to eq(Gitlab::Access::REPORTER)
+ end
end
end
+
+ it_behaves_like 'with invited_groups param'
+ end
+ end
+
+ it_behaves_like '#execute'
+
+ context 'when project_members_index_by_project_namespace feature flag is disabled' do
+ before do
+ stub_feature_flags(project_members_index_by_project_namespace: false)
end
- it_behaves_like 'with invited_groups param'
+ it_behaves_like '#execute'
end
end
diff --git a/spec/finders/merge_requests_finder_spec.rb b/spec/finders/merge_requests_finder_spec.rb
index 349ffd09324..e58ec0cd59e 100644
--- a/spec/finders/merge_requests_finder_spec.rb
+++ b/spec/finders/merge_requests_finder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequestsFinder do
+RSpec.describe MergeRequestsFinder, feature_category: :code_review_workflow do
context "multiple projects with merge requests" do
include_context 'MergeRequestsFinder multiple projects with merge requests context'
@@ -993,4 +993,29 @@ RSpec.describe MergeRequestsFinder do
end
end
end
+
+ context 'when the author of a merge request is banned', feature_category: :insider_threat do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:banned_user) { create(:user, :banned) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:banned_merge_request) { create(:merge_request, author: banned_user, source_project: project) }
+
+ subject { described_class.new(user).execute }
+
+ it { is_expected.not_to include(banned_merge_request) }
+
+ context 'when the user is an admin', :enable_admin_mode do
+ let_it_be(:user) { create(:user, :admin) }
+
+ it { is_expected.to include(banned_merge_request) }
+ end
+
+ context 'when the `hide_merge_requests_from_banned_users` feature flag is disabled' do
+ before do
+ stub_feature_flags(hide_merge_requests_from_banned_users: false)
+ end
+
+ it { is_expected.to include(banned_merge_request) }
+ end
+ end
end
diff --git a/spec/fixtures/api/schemas/public_api/v4/integration.json b/spec/fixtures/api/schemas/public_api/v4/integration.json
index d1538db7de4..18e61636fa2 100644
--- a/spec/fixtures/api/schemas/public_api/v4/integration.json
+++ b/spec/fixtures/api/schemas/public_api/v4/integration.json
@@ -30,6 +30,9 @@
"issues_events": {
"type": "boolean"
},
+ "incident_events": {
+ "type": "boolean"
+ },
"confidential_issues_events": {
"type": "boolean"
},
diff --git a/spec/fixtures/api/schemas/remote_mirror.json b/spec/fixtures/api/schemas/remote_mirror.json
index 87bde189db5..a4e886a8c7c 100644
--- a/spec/fixtures/api/schemas/remote_mirror.json
+++ b/spec/fixtures/api/schemas/remote_mirror.json
@@ -12,16 +12,57 @@
"only_protected_branches"
],
"properties": {
- "id": { "type": "integer" },
- "enabled": { "type": "boolean" },
- "url": { "type": "string" },
- "update_status": { "type": "string" },
- "last_update_at": { "type": ["string", "null"] },
- "last_update_started_at": { "type": ["string", "null"] },
- "last_successful_update_at": { "type": ["string", "null"] },
- "last_error": { "type": ["string", "null"] },
- "only_protected_branches": { "type": "boolean" },
- "keep_divergent_refs": { "type": ["boolean", "null"] }
+ "id": {
+ "type": "integer"
+ },
+ "enabled": {
+ "type": "boolean"
+ },
+ "url": {
+ "type": "string"
+ },
+ "update_status": {
+ "type": "string"
+ },
+ "last_update_at": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "last_update_started_at": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "last_successful_update_at": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "last_error": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "only_protected_branches": {
+ "type": "boolean"
+ },
+ "mirror_branch_regex": {
+ "type": [
+ "string",
+ "null"
+ ]
+ },
+ "keep_divergent_refs": {
+ "type": [
+ "boolean",
+ "null"
+ ]
+ }
},
"additionalProperties": false
}
diff --git a/spec/fixtures/config/mail_room_enabled_ms_graph.yml b/spec/fixtures/config/mail_room_enabled_ms_graph.yml
index 791760e1dfd..71fc7e73e35 100644
--- a/spec/fixtures/config/mail_room_enabled_ms_graph.yml
+++ b/spec/fixtures/config/mail_room_enabled_ms_graph.yml
@@ -4,7 +4,7 @@ test:
address: "gitlab-incoming+%{key}@gmail.com"
user: "gitlab-incoming@gmail.com"
mailbox: "inbox"
- expunge_deleted: true
+ delete_after_delivery: false
inbox_method: "microsoft_graph"
inbox_options:
tenant_id: "12345"
@@ -17,7 +17,7 @@ test:
address: "gitlab-incoming+%{key}@gmail.com"
user: "gitlab-incoming@gmail.com"
mailbox: "inbox"
- expunge_deleted: true
+ delete_after_delivery: false
inbox_method: "microsoft_graph"
inbox_options:
tenant_id: "12345"
diff --git a/spec/fixtures/lib/gitlab/import_export/complex/project.json b/spec/fixtures/lib/gitlab/import_export/complex/project.json
index 8e7cb487444..88439965cf3 100644
--- a/spec/fixtures/lib/gitlab/import_export/complex/project.json
+++ b/spec/fixtures/lib/gitlab/import_export/complex/project.json
@@ -5,6 +5,8 @@
"visibility_level": 10,
"archived": false,
"ci_config_path": "config/path",
+ "allow_merge_on_skipped_pipeline": true,
+ "squash_option": 3,
"labels": [
{
"id": 2,
@@ -370,13 +372,13 @@
],
"resource_label_events": [
{
- "id":244,
- "action":"remove",
- "issue_id":40,
- "merge_request_id":null,
- "label_id":2,
- "user_id":1,
- "created_at":"2018-08-28T08:24:00.494Z",
+ "id": 244,
+ "action": "remove",
+ "issue_id": 40,
+ "merge_request_id": null,
+ "label_id": 2,
+ "user_id": 1,
+ "created_at": "2018-08-28T08:24:00.494Z",
"label": {
"id": 2,
"title": "test2",
@@ -2350,7 +2352,7 @@
"name": "thumbsup",
"user_id": 1,
"awardable_type": "Snippet",
- "awardable_id": 1,
+ "awardable_id": 1,
"created_at": "2019-11-05T15:37:21.287Z",
"updated_at": "2019-11-05T15:37:21.287Z"
},
@@ -2359,7 +2361,7 @@
"name": "coffee",
"user_id": 1,
"awardable_type": "Snippet",
- "awardable_id": 1,
+ "awardable_id": 1,
"created_at": "2019-11-05T15:37:24.645Z",
"updated_at": "2019-11-05T15:37:24.645Z"
}
@@ -2446,7 +2448,7 @@
"links": [
{
"id": 1,
- "release_id" : 1,
+ "release_id": 1,
"url": "http://localhost/namespace6/project6/-/jobs/140463678/artifacts/download",
"name": "release-1.1.dmg",
"created_at": "2019-12-26T10:17:14.621Z",
@@ -2906,13 +2908,13 @@
],
"resource_label_events": [
{
- "id":243,
- "action":"add",
- "issue_id":null,
- "merge_request_id":27,
- "label_id":null,
- "user_id":1,
- "created_at":"2018-08-28T08:24:00.494Z"
+ "id": 243,
+ "action": "add",
+ "issue_id": null,
+ "merge_request_id": 27,
+ "label_id": null,
+ "user_id": 1,
+ "created_at": "2018-08-28T08:24:00.494Z"
}
],
"merge_request_diff": {
@@ -7472,8 +7474,7 @@
"started_at": null,
"finished_at": null,
"duration": null,
- "stages": [
- ]
+ "stages": []
},
{
"id": 20,
@@ -7491,11 +7492,9 @@
"started_at": null,
"finished_at": null,
"duration": null,
- "stages": [
- ],
+ "stages": [],
"source": "external_pull_request_event",
- "external_pull_request":
- {
+ "external_pull_request": {
"id": 3,
"pull_request_iid": 4,
"source_branch": "feature",
@@ -7505,8 +7504,8 @@
"source_sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
"target_sha": "a09386439ca39abe575675ffd4b89ae824fec22f",
"status": "open",
- "created_at": "2016-03-22T15:20:35.763Z",
- "updated_at": "2016-03-22T15:20:35.763Z"
+ "created_at": "2016-03-22T15:20:35.763Z",
+ "updated_at": "2016-03-22T15:20:35.763Z"
}
}
],
@@ -7563,7 +7562,7 @@
"updated_at": "2016-08-30T07:32:52.490Z"
}
],
- "allow_force_push":false
+ "allow_force_push": false
}
],
"protected_environments": [
@@ -7670,17 +7669,17 @@
},
"external_pull_requests": [
{
- "id": 3,
- "pull_request_iid": 4,
- "source_branch": "feature",
- "target_branch": "master",
- "source_repository": "the-repository",
- "target_repository": "the-repository",
- "source_sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
- "target_sha": "a09386439ca39abe575675ffd4b89ae824fec22f",
- "status": "open",
- "created_at": "2019-12-24T14:04:50.053Z",
- "updated_at": "2019-12-24T14:05:18.138Z"
+ "id": 3,
+ "pull_request_iid": 4,
+ "source_branch": "feature",
+ "target_branch": "master",
+ "source_repository": "the-repository",
+ "target_repository": "the-repository",
+ "source_sha": "ce84140e8b878ce6e7c4d298c7202ff38170e3ac",
+ "target_sha": "a09386439ca39abe575675ffd4b89ae824fec22f",
+ "status": "open",
+ "created_at": "2019-12-24T14:04:50.053Z",
+ "updated_at": "2019-12-24T14:05:18.138Z"
}
],
"boards": [
@@ -7848,5 +7847,4 @@
"commit_committer_check": true,
"regexp_uses_re2": true
}
-
-}
+} \ No newline at end of file
diff --git a/spec/fixtures/markdown/markdown_golden_master_examples.yml b/spec/fixtures/markdown/markdown_golden_master_examples.yml
deleted file mode 100644
index 9e7de030a29..00000000000
--- a/spec/fixtures/markdown/markdown_golden_master_examples.yml
+++ /dev/null
@@ -1,909 +0,0 @@
-# Related Specs:
-#
-# This data file drives the specs in the following specs:
-#
-# CE Backend: spec/requests/api/markdown_golden_master_spec.rb
-# CE Frontend: spec/frontend/content_editor/markdown_processing_spec.js
-#
-# For EE, these files are used:
-# EE Data: ee/spec/fixtures/markdown/markdown_golden_master_examples.yml
-# EE Backend: ee/spec/requests/api/markdown_golden_master_spec.rb
-# EE Frontend: ee/spec/frontend/content_editor/ee_markdown_processing_spec.js
-#
-#
-# Requirements:
-#
-# 1. Frontend: We should have test coverage that the Content Editor can properly serialize HTML
-# to Markdown for all GFM source elements which it currently supports.
-# 2. Frontend: We should have test coverage that the Content Editor can properly render the expected
-# HTML for all GFM source elements which it currently supports (not currently implemented in the
-# frontend - this will likely be a standalone module outside of the Content Editor).
-# 3. Backend: We should ensure that for all GFM elements, the backend always renders the expected
-# HTML, for **all** supported GFM source elements.
-#
-# If any of this this ever changes unexpectedly, tests will start failing, and force the same change
-# to be made on the backend and frontend.
-#
-#
-# Overview:
-#
-# These specs ensure that the bidirectional Markdown <-> HTML conversion logic is implemented
-# identically on the backend and frontend, for all supported GitLab-Flavored Markdown examples, by
-# running hardcoded examples through the logic and ensuring the results match.
-#
-# This is an example of the "Golden Master Testing" approach, which is also referred to as
-# "Approval Testing" or "Characterization Testing".
-#
-# The term "Golden Master" originally comes from the recording industry, and refers to process
-# of "mastering", or making a final mix from which all other copies will be produced.
-#
-# See:
-# - https://en.wikipedia.org/wiki/Characterization_test
-# - https://en.wikipedia.org/wiki/Gold_master_(disambiguation)
-#
-#
-# What we are doing is actually a type Golden Master testing with modifications:
-#
-# 1. The original markdown examples used to drive the tests are taken from this YAML, and can be
-# considered a form of "fixture" in this case.
-# 2. The HTML in the YAML is the "Golden Master", but we are going to use it to assert
-# against **TWO** different implementations of markdown rendering:
-# 1. The frontend, implemented as Jest specs.
-# 1. This will assert both HTML -> markdown serialization (what it currently does), as well as...
-# 2. Markdown -> HTML rendering (not currently implemented in the frontend - this will likely
-# be a standalone module outside of the Content Editor)
-# 1. The backend, implemented as requests specs
-# 1. This will assert markdown -> HTML conversion by the backend.
-#
-# Also see the MR for more explanation on the details of this approach:
-# https://gitlab.com/gitlab-org/gitlab/-/merge_requests/68671
-#
-#
-# Usage:
-#
-# * Please keep this file alphabetized.
-# * To run focused example(s), set the `FOCUSED_MARKDOWN_EXAMPLES` environment variable to a
-# comma-separated list of example names. This works for the frontend and backend specs.
-# * Required attributes for every example:
-# 1. `name`: Specifies the Name of the example, which will be printed when specs are run.
-# 2. `markdown`: Specifies the Markdown for the example, which will be compared with the
-# Markdown the code generates from the corresponding specified HTML.
-# 3. `html`: Specifies the HTML for the example, which will be compared with the
-# HTML the code generated from the corresponding specified Markdown.
-# * `api_context` (optional): This is used when a single markdown can be
-# rendered differently depending on the API endpoint or area of the app from which it is called or
-# used. The valid values for `api_context` are: `project`, `group`, `project_wiki`,
-# and (for EE only) `group_wiki`.The `name` attribute must also have a `_for_[API_CONTEXT]` suffix
-# which matches the `api_context`, in order to ensure that each example has a unique `name`
-# identifier. For example, `attachment_image_for_project`.
-# * `pending`: To skip an example that is broken or not yet fully implemented, add
-# a `pending: <reason with issue/MR URL>` attribute to the example. See
-# the `a_example_of_pending` entry for an example.
-# * `pending` with key: You can also mark an example pending on only the frontend or backend. See
-# the `a_example_of_pending_with_keys` entry for an example.
-# * `substitutions`: For examples which may have variable content in different environments,
-# such as portions of the URI, or database record IDs, you can specify
-# `substitutions`, which is an array of regex/replacement pairs. The HTML
-# value will be normalized with each of these pairs using Ruby `gsub`
-# before comparing.
-# The substitution values can (and are) also reused in multiple examples
-# via YAML anchors.
-#
-#
-# Notes:
-#
-# * The html values should exactly match what the backend markdown API endpoints return for the
-# given markdown example. The HTML is intentionally not indented, formatted, or split across lines.
-# This is a bit less readable, but it makes the spec logic simpler and less error prone for edge
-# cases.
-#
-#
-# Debugging Failures and Writing New Entries:
-#
-# * You need to compare what is different between the expected and actual values.
-# * In rspec, the diff printed out includes the full text of the HTML. This may be long, so you
-# may want to turn line wrapping on or off or copy the diff to separate file(s) for easier comparison.
-# * If the difference is just in an attribute value, use the `substitutions` support to normalize
-# the HTML before comparing. These specs are only validating the HTML structure, the individual
-# markdown elements' unit tests can provide coverage that the exact attribute values are correct.
-# * If you are making a new entry, you can create the entry according to the `Usage` section above,
-# but leave the `html` value blank. This will cause the spec to fail, and you can fill in the
-# `html` value based on the spec failure that is printed out.
-
----
-#- name: an_example_of_pending
-# pending: 'This is an example of the pending attribute: http://example.com'
-# markdown: ;)
-# html: |-
-# <blink data-sourcepos="1:1-1:2"/></blink>
-#
-
-#- name: an_example_of_pending_with_keys
-# pending:
-# frontend: 'This is an example of the frontend-only pending attribute: http://example.com'
-# backend: 'This is an example of the backend-only pending attribute: http://example.com'
-# markdown: ;)
-# html: |-
-# <blink data-sourcepos="1:1-1:2"/></blink>
-
-- name: attachment_image_for_group
- api_context: group
- substitutions:
- # Note: having the top level `substitutions` data structure be a hash of arrays
- # allows us to compose multiple substitutions via YAML anchors (YAML anchors
- # pointing to arrays can't be combined)
- uri_substitution: &uri_substitution
- # NOTE: We don't care about verifying specific attribute values here, that should be the
- # responsibility of unit tests. These tests are about the structure of the HTML.
- - regex: '(href|data-src)(=")(.*?)(test-file\.(png|zip)")'
- replacement: '\1\2URI_PREFIX\4'
- markdown: |-
- ![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png)
- html: |-
- <p data-sourcepos="1:1-1:69" dir="auto"><a class="no-attachment-icon gfm" href="/groups/group58/-/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" target="_blank" rel="noopener noreferrer" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-link="true"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="test-file" decoding="async" class="lazy gfm" data-src="/groups/group58/-/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png"></a></p>
-
-- name: attachment_image_for_project
- api_context: project
- substitutions:
- uri_substitution: *uri_substitution
- markdown: |-
- ![test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png)
- html: |-
- <p data-sourcepos="1:1-1:69" dir="auto"><a class="no-attachment-icon gfm" href="/group58/project22/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" target="_blank" rel="noopener noreferrer" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-link="true"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="test-file" decoding="async" class="lazy gfm" data-src="/group58/project22/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.png"></a></p>
-
-- name: attachment_image_for_project_wiki
- api_context: project_wiki
- substitutions:
- uri_substitution: *uri_substitution
- markdown: |-
- ![test-file](test-file.png)
- html: |-
- <p data-sourcepos="1:1-1:27" dir="auto"><a class="no-attachment-icon" href="/group1/project1/-/wikis/test-file.png" target="_blank" rel="noopener noreferrer" data-canonical-src="test-file.png"><img alt="test-file" decoding="async" class="lazy" data-src="/group1/project1/-/wikis/test-file.png" data-canonical-src="test-file.png"></a></p>
-
-- name: attachment_link_for_group
- api_context: group
- substitutions:
- uri_substitution: *uri_substitution
- markdown: |-
- [test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip)
- html: |-
- <p data-sourcepos="1:1-1:68" dir="auto"><a href="/groups/group58/-/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-link="true" class="gfm">test-file</a></p>
-
-- name: attachment_link_for_project
- api_context: project
- substitutions:
- uri_substitution: *uri_substitution
- markdown: |-
- [test-file](/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip)
- html: |-
- <p data-sourcepos="1:1-1:68" dir="auto"><a href="/group58/project22/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-canonical-src="/uploads/aa45a38ec2cfe97433281b10bbff042c/test-file.zip" data-link="true" class="gfm">test-file</a></p>
-
-- name: attachment_link_for_project_wiki
- api_context: project_wiki
- substitutions:
- uri_substitution: *uri_substitution
- # TODO: The current frontend example doesn't include the path, need to look into why it does after refactoring to the new golden master approach
- pending:
- frontend: 'The current frontend example doesnt include the path, need to look into why it does after refactoring to the new golden master approach'
- markdown: |-
- [test-file](test-file.zip)
- html: |-
- <p data-sourcepos="1:1-1:26" dir="auto"><a href="/group1/project1/-/wikis/test-file.zip" data-canonical-src="test-file.zip">test-file</a></p>
-
-- name: audio
- markdown: |-
- ![Sample Audio](https://gitlab.com/gitlab.mp3)
- html: |-
- <p data-sourcepos="1:1-1:46" dir="auto"><span class="media-container audio-container"><audio src="https://gitlab.com/gitlab.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/gitlab.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span></p>
-
-- name: audio_and_video_in_lists
- markdown: |-
- * ![Sample Audio](https://gitlab.com/1.mp3)
- * ![Sample Video](https://gitlab.com/2.mp4)
-
- 1. ![Sample Video](https://gitlab.com/1.mp4)
- 2. ![Sample Audio](https://gitlab.com/2.mp3)
-
- * [x] ![Sample Audio](https://gitlab.com/1.mp3)
- * [x] ![Sample Audio](https://gitlab.com/2.mp3)
- * [x] ![Sample Video](https://gitlab.com/3.mp4)
- html: |-
- <ul data-sourcepos="1:1-3:0" dir="auto">
- <li data-sourcepos="1:1-1:43"><span class="media-container audio-container"><audio src="https://gitlab.com/1.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/1.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span></li>
- <li data-sourcepos="2:1-3:0"><span class="media-container video-container"><video src="https://gitlab.com/2.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/2.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span></li>
- </ul>
- <ol data-sourcepos="4:1-6:0" dir="auto">
- <li data-sourcepos="4:1-4:44"><span class="media-container video-container"><video src="https://gitlab.com/1.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/1.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span></li>
- <li data-sourcepos="5:1-6:0"><span class="media-container audio-container"><audio src="https://gitlab.com/2.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/2.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span></li>
- </ol>
- <ul data-sourcepos="7:1-9:47" class="task-list" dir="auto">
- <li data-sourcepos="7:1-7:47" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/1.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/1.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span>
- </li>
- <li data-sourcepos="8:1-8:47" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container audio-container"><audio src="https://gitlab.com/2.mp3" controls="true" data-setup="{}" data-title="Sample Audio"></audio><a href="https://gitlab.com/2.mp3" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Audio'">Sample Audio</a></span>
- </li>
- <li data-sourcepos="9:1-9:47" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> <span class="media-container video-container"><video src="https://gitlab.com/3.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/3.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span>
- </li>
- </ul>
-
-- name: blockquote
- markdown: |-
- > This is a blockquote
- >
- > This is another one
- html: |-
- <blockquote data-sourcepos="1:1-3:21" dir="auto">
- <p data-sourcepos="1:3-1:22">This is a blockquote</p>
- <p data-sourcepos="3:3-3:21">This is another one</p>
- </blockquote>
-
-- name: bold
- markdown: |-
- **bold**
- html: |-
- <p data-sourcepos="1:1-1:8" dir="auto"><strong>bold</strong></p>
-
-- name: bullet_list_style_1
- markdown: |-
- * list item 1
- * list item 2
- * embedded list item 3
- html: |-
- <ul data-sourcepos="1:1-3:24" dir="auto">
- <li data-sourcepos="1:1-1:13">list item 1</li>
- <li data-sourcepos="2:1-3:24">list item 2
- <ul data-sourcepos="3:3-3:24">
- <li data-sourcepos="3:3-3:24">embedded list item 3</li>
- </ul>
- </li>
- </ul>
-
-- name: bullet_list_style_2
- markdown: |-
- - list item 1
- - list item 2
- * embedded list item 3
- html: |-
- <ul data-sourcepos="1:1-3:24" dir="auto">
- <li data-sourcepos="1:1-1:13">list item 1</li>
- <li data-sourcepos="2:1-3:24">list item 2
- <ul data-sourcepos="3:3-3:24">
- <li data-sourcepos="3:3-3:24">embedded list item 3</li>
- </ul>
- </li>
- </ul>
-
-- name: bullet_list_style_3
- markdown: |-
- + list item 1
- + list item 2
- - embedded list item 3
- html: |-
- <ul data-sourcepos="1:1-3:24" dir="auto">
- <li data-sourcepos="1:1-1:13">list item 1</li>
- <li data-sourcepos="2:1-3:24">list item 2
- <ul data-sourcepos="3:3-3:24">
- <li data-sourcepos="3:3-3:24">embedded list item 3</li>
- </ul>
- </li>
- </ul>
-
-- name: code_block_javascript
- markdown: |-
- ```javascript
- console.log('hello world')
- ```
- html: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" lang="javascript" class="code highlight js-syntax-highlight language-javascript" v-pre="true"><code><span id="LC1" class="line" lang="javascript"> <span class="nx">console</span><span class="p">.</span><span class="nx">log</span><span class="p">(</span><span class="dl">'</span><span class="s1">hello world</span><span class="dl">'</span><span class="p">)</span></span></code></pre>
- <copy-code></copy-code>
- </div>
-
-- name: code_block_plaintext
- markdown: |-
- ```
- plaintext
- ```
- html: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" lang="plaintext" class="code highlight js-syntax-highlight language-plaintext" data-canonical-lang="" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> plaintext</span></code></pre>
- <copy-code></copy-code>
- </div>
-
-- name: code_block_unknown
- markdown: |-
- ```foobar
- custom_language = >> this <<
- ```
- html: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" lang="plaintext" class="code highlight js-syntax-highlight language-plaintext" data-canonical-lang="foobar" v-pre="true"><code><span id="LC1" class="line" lang="plaintext"> custom_language = &gt;&gt; this &lt;&lt;</span></code></pre>
- <copy-code></copy-code>
- </div>
-
-- name: color_chips
- markdown: |-
- - `#F00`
- - `#F00A`
- - `#FF0000`
- - `#FF0000AA`
- - `RGB(0,255,0)`
- - `RGB(0%,100%,0%)`
- - `RGBA(0,255,0,0.3)`
- - `HSL(540,70%,50%)`
- - `HSLA(540,70%,50%,0.3)`
- html: |-
- <ul data-sourcepos="1:1-9:25" dir="auto">
- <li data-sourcepos="1:1-1:8"><code>#F00<span class="gfm-color_chip"><span style="background-color: #F00;"></span></span></code></li>
- <li data-sourcepos="2:1-2:9"><code>#F00A<span class="gfm-color_chip"><span style="background-color: #F00A;"></span></span></code></li>
- <li data-sourcepos="3:1-3:11"><code>#FF0000<span class="gfm-color_chip"><span style="background-color: #FF0000;"></span></span></code></li>
- <li data-sourcepos="4:1-4:13"><code>#FF0000AA<span class="gfm-color_chip"><span style="background-color: #FF0000AA;"></span></span></code></li>
- <li data-sourcepos="5:1-5:16"><code>RGB(0,255,0)<span class="gfm-color_chip"><span style="background-color: RGB(0,255,0);"></span></span></code></li>
- <li data-sourcepos="6:1-6:19"><code>RGB(0%,100%,0%)<span class="gfm-color_chip"><span style="background-color: RGB(0%,100%,0%);"></span></span></code></li>
- <li data-sourcepos="7:1-7:21"><code>RGBA(0,255,0,0.3)<span class="gfm-color_chip"><span style="background-color: RGBA(0,255,0,0.3);"></span></span></code></li>
- <li data-sourcepos="8:1-8:20"><code>HSL(540,70%,50%)<span class="gfm-color_chip"><span style="background-color: HSL(540,70%,50%);"></span></span></code></li>
- <li data-sourcepos="9:1-9:25"><code>HSLA(540,70%,50%,0.3)<span class="gfm-color_chip"><span style="background-color: HSLA(540,70%,50%,0.3);"></span></span></code></li>
- </ul>
-
-- name: comment
- markdown: |-
- <!-- this is a
- multiline markdown
- comment -->
- html: |-
- <!-- this is a
- multiline markdown
- comment -->
-
-- name: description_list
- markdown: |-
- <dl>
- <dt>Frog</dt>
- <dd>Wet green thing</dd>
- <dt>Rabbit</dt>
- <dd>Warm fluffy thing</dd>
- <dt>Punt</dt>
- <dd>Kick a ball</dd>
- <dd>Take a bet</dd>
- <dt>Color</dt>
- <dt>Colour</dt>
- <dd>
-
- Any hue except _white_ or **black**
-
- </dd>
- </dl>
- html: |-
- <dl>
- <dt>Frog</dt>
- <dd>Wet green thing</dd>
- <dt>Rabbit</dt>
- <dd>Warm fluffy thing</dd>
- <dt>Punt</dt>
- <dd>Kick a ball</dd>
- <dd>Take a bet</dd>
- <dt>Color</dt>
- <dt>Colour</dt>
- <dd>
- <p data-sourcepos="13:1-13:35">Any hue except <em>white</em> or <strong>black</strong></p>
- </dd>
- </dl>
-
-- name: details
- markdown: |-
- <details>
- <summary>This is the visible summary of the collapsible section</summary>
-
- 1. collapsed markdown
- 2. more collapsed markdown
-
- </details>
- html: |-
- <details>
- <summary>This is the visible summary of the collapsible section</summary>
- <ol data-sourcepos="4:1-6:0">
- <li data-sourcepos="4:1-4:21">collapsed markdown</li>
- <li data-sourcepos="5:1-6:0">more collapsed markdown</li>
- </ol>
- </details>
-
-- name: diagram_kroki_nomnoml
- markdown: |-
- ```nomnoml
- #stroke: #a86128
- [<frame>Decorator pattern|
- [<abstract>Component||+ operation()]
- [Client] depends --> [Component]
- [Decorator|- next: Component]
- [Decorator] decorates -- [ConcreteComponent]
- [Component] <:- [Decorator]
- [Component] <:- [ConcreteComponent]
- ]
- ```
- html: |-
- <a class="no-attachment-icon" href="http://localhost:8000/nomnoml/svg/eNp1jbsOwjAMRfd-haUuIJQBBlRFVZb2L1CGkBqpgtpR6oEhH0_CW6hsts-9xwD1LJHPqKF2zX67ayqAQ3uKbkLTo-fohCMEJ4KRUoYFu2MuOS-m4ykwIUlKG-CAOT0yrdb2EewuY2YWBgxIwwxKmXx8dZ6h95ekgPAqGv4miuk-YnEVFfmIgr-Fzw6tVt-CZb7osdUNUAReJA==" target="_blank" rel="noopener noreferrer" data-diagram="nomnoml" data-diagram-src="data:text/plain;base64,ICAjc3Ryb2tlOiAjYTg2MTI4CiAgWzxmcmFtZT5EZWNvcmF0b3IgcGF0dGVybnwKICAgIFs8YWJzdHJhY3Q+Q29tcG9uZW50fHwrIG9wZXJhdGlvbigpXQogICAgW0NsaWVudF0gZGVwZW5kcyAtLT4gW0NvbXBvbmVudF0KICAgIFtEZWNvcmF0b3J8LSBuZXh0OiBDb21wb25lbnRdCiAgICBbRGVjb3JhdG9yXSBkZWNvcmF0ZXMgLS0gW0NvbmNyZXRlQ29tcG9uZW50XQogICAgW0NvbXBvbmVudF0gPDotIFtEZWNvcmF0b3JdCiAgICBbQ29tcG9uZW50XSA8Oi0gW0NvbmNyZXRlQ29tcG9uZW50XQogIF0K"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" class="js-render-kroki lazy" decoding="async" data-src="http://localhost:8000/nomnoml/svg/eNp1jbsOwjAMRfd-haUuIJQBBlRFVZb2L1CGkBqpgtpR6oEhH0_CW6hsts-9xwD1LJHPqKF2zX67ayqAQ3uKbkLTo-fohCMEJ4KRUoYFu2MuOS-m4ykwIUlKG-CAOT0yrdb2EewuY2YWBgxIwwxKmXx8dZ6h95ekgPAqGv4miuk-YnEVFfmIgr-Fzw6tVt-CZb7osdUNUAReJA=="></a>
-
-- name: diagram_plantuml
- markdown: |-
- ```plantuml
- Alice -> Bob: Authentication Request
- Bob --> Alice: Authentication Response
-
- Alice -> Bob: Another authentication Request
- Alice <-- Bob: Another authentication Response
- ```
- html: |-
- <a class="no-attachment-icon" href="http://localhost:8080/png/U9nJK73CoKnELT2rKt3AJx9IS2mjoKZDAybCJYp9pCzJ24ejB4qjBk5I0Cagw09LWPLZKLTSa9zNdCe5L8bcO5u-K6MHGY8kWo7ARNHr2QY7MW00AeWxTG00" target="_blank" rel="noopener noreferrer" data-diagram="plantuml" data-diagram-src="data:text/plain;base64,ICBBbGljZSAtPiBCb2I6IEF1dGhlbnRpY2F0aW9uIFJlcXVlc3QKICBCb2IgLS0+IEFsaWNlOiBBdXRoZW50aWNhdGlvbiBSZXNwb25zZQoKICBBbGljZSAtPiBCb2I6IEFub3RoZXIgYXV0aGVudGljYXRpb24gUmVxdWVzdAogIEFsaWNlIDwtLSBCb2I6IEFub3RoZXIgYXV0aGVudGljYXRpb24gUmVzcG9uc2UK"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" decoding="async" class="lazy" data-src="http://localhost:8080/png/U9nJK73CoKnELT2rKt3AJx9IS2mjoKZDAybCJYp9pCzJ24ejB4qjBk5I0Cagw09LWPLZKLTSa9zNdCe5L8bcO5u-K6MHGY8kWo7ARNHr2QY7MW00AeWxTG00"></a>
-
-- name: diagram_plantuml_unicode
- markdown: |-
- ```plantuml
- A -> B : Text with norwegian characters: æøå
- ```
- html: |-
- <a class="no-attachment-icon" href="http://localhost:8080/png/U9npLD2rKt1Ii588IQqeKIZFBCbGoCilAazDpqpCKqZEI2nAJ2v9BIgsKZYyxF2Emqkv07hO4WG0" target="_blank" rel="noopener noreferrer" data-diagram="plantuml" data-diagram-src="data:text/plain;base64,QSAtPiBCIDogVGV4dCB3aXRoIG5vcndlZ2lhbiBjaGFyYWN0ZXJzOiDDpsO4w6UK"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" decoding="async" class="lazy" data-src="http://localhost:8080/png/U9npLD2rKt1Ii588IQqeKIZFBCbGoCilAazDpqpCKqZEI2nAJ2v9BIgsKZYyxF2Emqkv07hO4WG0"></a>
-
-- name: div
- markdown: |-
- <div>plain text</div>
- <div>
-
- just a plain ol' div, not much to _expect_!
-
- </div>
- html: |-
- <div>plain text</div>
- <div>
- <p data-sourcepos="4:1-4:43">just a plain ol' div, not much to <em>expect</em>!</p>
- </div>
-
-- name: emoji
- markdown: |-
- :sparkles: :heart: :100:
- html: |-
- <p data-sourcepos="1:1-1:24" dir="auto"><gl-emoji title="sparkles" data-name="sparkles" data-unicode-version="6.0">✨</gl-emoji> <gl-emoji title="heavy black heart" data-name="heart" data-unicode-version="1.1">❤</gl-emoji> <gl-emoji title="hundred points symbol" data-name="100" data-unicode-version="6.0">💯</gl-emoji></p>
-
-- name: emphasis
- markdown: _emphasized text_
- html: <p data-sourcepos="1:1-1:17" dir="auto"><em>emphasized text</em></p>
-
-- name: figure
- markdown: |-
- <figure>
-
- ![Elephant at sunset](elephant-sunset.jpg)
-
- <figcaption>An elephant at sunset</figcaption>
- </figure>
- <figure>
-
- ![A crocodile wearing crocs](croc-crocs.jpg)
-
- <figcaption>
-
- A crocodile wearing _crocs_!
-
- </figcaption>
- </figure>
- html: |-
- <figure>
- <p data-sourcepos="3:1-3:42"><a class="no-attachment-icon" href="elephant-sunset.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="Elephant at sunset" decoding="async" class="lazy" data-src="elephant-sunset.jpg"></a></p>
- <figcaption>An elephant at sunset</figcaption>
- </figure>
- <figure>
- <p data-sourcepos="9:1-9:44"><a class="no-attachment-icon" href="croc-crocs.jpg" target="_blank" rel="noopener noreferrer"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="A crocodile wearing crocs" decoding="async" class="lazy" data-src="croc-crocs.jpg"></a></p>
- <figcaption>
- <p data-sourcepos="13:1-13:28">A crocodile wearing <em>crocs</em>!</p>
- </figcaption>
- </figure>
-
-- name: footnotes
- substitutions:
- # NOTE: We don't care about verifying specific attribute values here, that should be the
- # responsibility of unit tests. These tests are about the structure of the HTML.
- fn_href_substitution:
- - regex: '(href)(=")(.+?)(")'
- replacement: '\1\2REF\4'
- footnote_id_substitution:
- - regex: '(id)(=")(.+?)(")'
- replacement: '\1\2ID\4'
-
- pending:
- backend: https://gitlab.com/gitlab-org/gitlab/-/issues/346591
- markdown: |-
- A footnote reference tag looks like this: [^1]
-
- This reference tag is a mix of letters and numbers. [^footnote]
-
- [^1]: This is the text inside a footnote.
-
- [^footnote]: This is another footnote.
- html: |-
- <p data-sourcepos="1:1-1:46" dir="auto">A footnote reference tag looks like this: <sup class="footnote-ref"><a href="#fn-1-2717" id="fnref-1-2717" data-footnote-ref="">1</a></sup></p>
- <p data-sourcepos="3:1-3:56" dir="auto">This reference tag is a mix of letters and numbers. <sup class="footnote-ref"><a href="#fn-footnote-2717" id="fnref-footnote-2717" data-footnote-ref="">2</a></sup></p>
- <section class="footnotes" data-footnotes><ol>
- <li id="fn-1-2717">
- <p data-sourcepos="5:7-5:41">This is the text inside a footnote. <a href="#fnref-1-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
- </li>
- <li id="fn-footnote-2717">
- <p data-sourcepos="6:7-6:31">This is another footnote. <a href="#fnref-footnote-2717" aria-label="Back to content" class="footnote-backref" data-footnote-backref=""><gl-emoji title="leftwards arrow with hook" data-name="leftwards_arrow_with_hook" data-unicode-version="1.1">↩</gl-emoji></a></p>
- </li>
- </ol></section>
-
-- name: frontmatter_json
- markdown: |-
- ;;;
- {
- "title": "Page title"
- }
- ;;;
- html: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-5:3" lang="json" class="code highlight js-syntax-highlight language-json" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="json"><span class="p">{</span></span>
- <span id="LC2" class="line" lang="json"><span class="w"> </span><span class="nl">"title"</span><span class="p">:</span><span class="w"> </span><span class="s2">"Page title"</span></span>
- <span id="LC3" class="line" lang="json"><span class="p">}</span></span></code></pre>
- <copy-code></copy-code>
- </div>
-
-- name: frontmatter_toml
- markdown: |-
- +++
- title = "Page title"
- +++
- html: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" lang="toml" class="code highlight js-syntax-highlight language-toml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="toml"><span class="py">title</span> <span class="p">=</span> <span class="s">"Page title"</span></span></code></pre>
- <copy-code></copy-code>
- </div>
-
-- name: frontmatter_yaml
- markdown: |-
- ---
- title: Page title
- ---
- html: |-
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="1:1-3:3" lang="yaml" class="code highlight js-syntax-highlight language-yaml" data-lang-params="frontmatter" v-pre="true"><code><span id="LC1" class="line" lang="yaml"><span class="na">title</span><span class="pi">:</span> <span class="s">Page title</span></span></code></pre>
- <copy-code></copy-code>
- </div>
-
-- name: hard_break
- markdown: |-
- This is a line after a\
- hard break
- html: |-
- <p data-sourcepos="1:1-2:10" dir="auto">This is a line after a<br>
- hard break</p>
-
-- name: headings
- markdown: |-
- # Heading 1
-
- ## Heading 2
-
- ### Heading 3
-
- #### Heading 4
-
- ##### Heading 5
-
- ###### Heading 6
- html: |-
- <h1 data-sourcepos="1:1-1:11" dir="auto">
- <a id="user-content-heading-1" class="anchor" href="#heading-1" aria-hidden="true"></a>Heading 1</h1>
- <h2 data-sourcepos="3:1-3:12" dir="auto">
- <a id="user-content-heading-2" class="anchor" href="#heading-2" aria-hidden="true"></a>Heading 2</h2>
- <h3 data-sourcepos="5:1-5:13" dir="auto">
- <a id="user-content-heading-3" class="anchor" href="#heading-3" aria-hidden="true"></a>Heading 3</h3>
- <h4 data-sourcepos="7:1-7:14" dir="auto">
- <a id="user-content-heading-4" class="anchor" href="#heading-4" aria-hidden="true"></a>Heading 4</h4>
- <h5 data-sourcepos="9:1-9:15" dir="auto">
- <a id="user-content-heading-5" class="anchor" href="#heading-5" aria-hidden="true"></a>Heading 5</h5>
- <h6 data-sourcepos="11:1-11:16" dir="auto">
- <a id="user-content-heading-6" class="anchor" href="#heading-6" aria-hidden="true"></a>Heading 6</h6>
-
-- name: horizontal_rule
- markdown: |-
- ---
- html: |-
- <hr data-sourcepos="1:1-1:3">
-
-- name: html_marks
- markdown: |-
- * Content editor is ~~great~~<ins>amazing</ins>.
- * If the changes <abbr title="Looks good to merge">LGTM</abbr>, please <abbr title="Merge when pipeline succeeds">MWPS</abbr>.
- * The English song <q>Oh I do like to be beside the seaside</q> looks like this in Hebrew: <span dir="rtl">אה, אני אוהב להיות ליד חוף הים</span>. In the computer's memory, this is stored as <bdo dir="ltr">אה, אני אוהב להיות ליד חוף הים</bdo>.
- * <cite>The Scream</cite> by Edvard Munch. Painted in 1893.
- * <dfn>HTML</dfn> is the standard markup language for creating web pages.
- * Do not forget to buy <mark>milk</mark> today.
- * This is a paragraph and <small>smaller text goes here</small>.
- * The concert starts at <time datetime="20:00">20:00</time> and you'll be able to enjoy the band for at least <time datetime="PT2H30M">2h 30m</time>.
- * Press <kbd>Ctrl</kbd> + <kbd>C</kbd> to copy text (Windows).
- * WWF's goal is to: <q>Build a future where people live in harmony with nature.</q> We hope they succeed.
- * The error occurred was: <samp>Keyboard not found. Press F1 to continue.</samp>
- * The area of a triangle is: 1/2 x <var>b</var> x <var>h</var>, where <var>b</var> is the base, and <var>h</var> is the vertical height.
- * <ruby>漢<rt>ㄏㄢˋ</rt></ruby>
- * C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O
- * The **Pythagorean theorem** is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var>
- html: |-
- <ul data-sourcepos="1:1-15:130" dir="auto">
- <li data-sourcepos="1:1-1:48">Content editor is <del>great</del><ins>amazing</ins>.</li>
- <li data-sourcepos="2:1-2:126">If the changes <abbr title="Looks good to merge">LGTM</abbr>, please <abbr title="Merge when pipeline succeeds">MWPS</abbr>.</li>
- <li data-sourcepos="3:1-3:288">The English song <q>Oh I do like to be beside the seaside</q> looks like this in Hebrew: <span dir="rtl">אה, אני אוהב להיות ליד חוף הים</span>. In the computer's memory, this is stored as <bdo dir="ltr">אה, אני אוהב להיות ליד חוף הים</bdo>.</li>
- <li data-sourcepos="4:1-4:59">
- <cite>The Scream</cite> by Edvard Munch. Painted in 1893.</li>
- <li data-sourcepos="5:1-5:73">
- <dfn>HTML</dfn> is the standard markup language for creating web pages.</li>
- <li data-sourcepos="6:1-6:47">Do not forget to buy <mark>milk</mark> today.</li>
- <li data-sourcepos="7:1-7:64">This is a paragraph and <small>smaller text goes here</small>.</li>
- <li data-sourcepos="8:1-8:149">The concert starts at <time datetime="20:00">20:00</time> and you'll be able to enjoy the band for at least <time datetime="PT2H30M">2h 30m</time>.</li>
- <li data-sourcepos="9:1-9:62">Press <kbd>Ctrl</kbd> + <kbd>C</kbd> to copy text (Windows).</li>
- <li data-sourcepos="10:1-10:105">WWF's goal is to: <q>Build a future where people live in harmony with nature.</q> We hope they succeed.</li>
- <li data-sourcepos="11:1-11:80">The error occurred was: <samp>Keyboard not found. Press F1 to continue.</samp>
- </li>
- <li data-sourcepos="12:1-12:136">The area of a triangle is: 1/2 x <var>b</var> x <var>h</var>, where <var>b</var> is the base, and <var>h</var> is the vertical height.</li>
- <li data-sourcepos="13:1-13:35"><ruby>漢<rt>ㄏㄢˋ</rt></ruby></li>
- <li data-sourcepos="14:1-14:81">C<sub>7</sub>H<sub>16</sub> + O<sub>2</sub> → CO<sub>2</sub> + H<sub>2</sub>O</li>
- <li data-sourcepos="15:1-15:130">The <strong>Pythagorean theorem</strong> is often expressed as <var>a<sup>2</sup></var> + <var>b<sup>2</sup></var> = <var>c<sup>2</sup></var>
- </li>
- </ul>
-
-- name: image
- markdown: |-
- ![alt text](https://gitlab.com/logo.png)
- html: |-
- <p data-sourcepos="1:1-1:40" dir="auto"><a class="no-attachment-icon" href="https://gitlab.com/logo.png" target="_blank" rel="nofollow noreferrer noopener"><img src="data:image/gif;base64,R0lGODlhAQABAAAAACH5BAEKAAEALAAAAAABAAEAAAICTAEAOw==" alt="alt text" decoding="async" class="lazy" data-src="https://gitlab.com/logo.png"></a></p>
-
-- name: inline_code
- markdown: |-
- `code`
- html: |-
- <p data-sourcepos="1:1-1:6" dir="auto"><code>code</code></p>
-
-- name: inline_diff
- markdown: |-
- * {-deleted-}
- * {+added+}
- html: |-
- <ul data-sourcepos="1:1-2:11" dir="auto">
- <li data-sourcepos="1:1-1:13"><span class="idiff left right deletion">deleted</span></li>
- <li data-sourcepos="2:1-2:11"><span class="idiff left right addition">added</span></li>
- </ul>
-
-- name: label
- pending:
- # TODO: There is an error with the frontend HTML to markdown spec adding a double escape (\\) to the label tilde.
- frontend: 'There is an error with the frontend HTML to markdown spec adding a double escape (\\) to the label tilde.'
- markdown: |-
- ~bug
- html: |-
- <p data-sourcepos="1:1-1:4" dir="auto">~bug</p>
-
-- name: link
- markdown: |-
- [GitLab](https://gitlab.com)
- html: |-
- <p data-sourcepos="1:1-1:28" dir="auto"><a href="https://gitlab.com" rel="nofollow noreferrer noopener" target="_blank">GitLab</a></p>
-
-- name: math
- markdown: |-
- This math is inline $`a^2+b^2=c^2`$.
-
- This is on a separate line:
-
- ```math
- a^2+b^2=c^2
- ```
- html: |-
- <p data-sourcepos="1:1-1:36" dir="auto">This math is inline <code class="code math js-render-math" data-math-style="inline">a^2+b^2=c^2</code>.</p>
- <p data-sourcepos="3:1-3:27" dir="auto">This is on a separate line:</p>
- <div class="gl-relative markdown-code-block js-markdown-code">
- <pre data-sourcepos="5:1-7:3" lang="math" data-math-style="display" class="js-render-math code highlight js-syntax-highlight language-math" v-pre="true"><code><span id="LC1" class="line" lang="math">a^2+b^2=c^2</span></code></pre>
- <copy-code></copy-code>
- </div>
-
-- name: ordered_list
- markdown: |-
- 1. list item 1
- 2. list item 2
- 3. list item 3
- html: |-
- <ol data-sourcepos="1:1-3:14" dir="auto">
- <li data-sourcepos="1:1-1:14">list item 1</li>
- <li data-sourcepos="2:1-2:14">list item 2</li>
- <li data-sourcepos="3:1-3:14">list item 3</li>
- </ol>
-
-- name: ordered_list_with_start_order
- markdown: |-
- 134. list item 1
- 135. list item 2
- 136. list item 3
- html: |-
- <ol start="134" data-sourcepos="1:1-3:16" dir="auto">
- <li data-sourcepos="1:1-1:16">list item 1</li>
- <li data-sourcepos="2:1-2:16">list item 2</li>
- <li data-sourcepos="3:1-3:16">list item 3</li>
- </ol>
-
-- name: ordered_task_list
- markdown: |-
- 1. [x] hello
- 2. [x] world
- 3. [ ] example
- 1. [ ] of nested
- 1. [x] task list
- 2. [ ] items
- html: |-
- <ol data-sourcepos="1:1-6:18" class="task-list" dir="auto">
- <li data-sourcepos="1:1-1:12" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
- <li data-sourcepos="2:1-2:12" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
- <li data-sourcepos="3:1-6:18" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> example
- <ol data-sourcepos="4:4-6:18" class="task-list">
- <li data-sourcepos="4:4-6:18" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> of nested
- <ol data-sourcepos="5:7-6:18" class="task-list">
- <li data-sourcepos="5:7-5:22" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li>
- <li data-sourcepos="6:7-6:18" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> items</li>
- </ol>
- </li>
- </ol>
- </li>
- </ol>
-
-- name: ordered_task_list_with_order
- markdown: |-
- 4893. [x] hello
- 4894. [x] world
- 4895. [ ] example
- html: |-
- <ol start="4893" data-sourcepos="1:1-3:17" class="task-list" dir="auto">
- <li data-sourcepos="1:1-1:15" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
- <li data-sourcepos="2:1-2:15" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
- <li data-sourcepos="3:1-3:17" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> example</li>
- </ol>
-
-- name: reference_for_project_wiki
- api_context: project_wiki
- substitutions:
- # NOTE: We don't care about verifying specific attribute values here, that should be the
- # responsibility of unit tests. These tests are about the structure of the HTML.
- uri_substitution: *uri_substitution
- data_attribute_id_substitution:
- - regex: '(data-user|data-project|data-issue|data-iid|data-merge-request|data-milestone|data-label)(=")(\d+?)(")'
- replacement: '\1\2ID\4'
- text_attribute_substitution:
- - regex: '(title)(=")([^"]*)(")'
- replacement: '\1\2TEXT\4'
- path_attribute_id_substitution:
- - regex: '(group|project)(\d+)'
- replacement: '\1ID'
- markdown: |-
- Hi @gfm_user - thank you for reporting this ~"UX bug" (#1) we hope to fix it in %1.1 as part of !1
- html: |-
- <p data-sourcepos="1:1-1:98" dir="auto">Hi <a href="/gfm_user" data-reference-type="user" data-user="1" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="John Doe1">@gfm_user</a> - thank you for reporting this <span class="gl-label gl-label-sm"><a href="/groupID/projectID/-/issues?label_name=UX+bug" data-reference-type="label" data-original='~"UX bug"' data-link="false" data-link-reference="false" data-project="ID" data-label="2" data-container="body" data-placement="top" title="TEXT" class="gfm gfm-label has-tooltip gl-link gl-label-link"><span class="gl-label-text gl-label-text-light" data-container="body" data-html="true" style="background-color: #990000">UX bug</span></a></span> (<a href="/group1/project1/-/issues/1" data-reference-type="issue" data-original="#1" data-link="false" data-link-reference="false" data-project="11" data-issue="11" data-project-path="group1/project1" data-iid="1" data-issue-type="issue" data-container="body" data-placement="top" title="My title 1" class="gfm gfm-issue">#1</a>) we hope to fix it in <a href="/group1/project1/-/milestones/1" data-reference-type="milestone" data-original="%1.1" data-link="false" data-link-reference="false" data-project="11" data-milestone="11" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%1.1</a> as part of <a href="/group1/project1/-/merge_requests/1" data-reference-type="merge_request" data-original="!1" data-link="false" data-link-reference="false" data-project="11" data-merge-request="11" data-project-path="group1/project1" data-iid="1" data-container="body" data-placement="top" title="My title 2" class="gfm gfm-merge_request">!1</a></p>
-- name: strike
- markdown: |-
- ~~del~~
- html: |-
- <p data-sourcepos="1:1-1:7" dir="auto"><del>del</del></p>
-
-- name: table
- markdown: |-
- | header | header |
- |--------|--------|
- | `code` | cell with **bold** |
- | ~~strike~~ | cell with _italic_ |
-
- # content after table
- html: |-
- <table data-sourcepos="1:1-4:35" dir="auto">
- <thead>
- <tr data-sourcepos="1:1-1:19">
- <th data-sourcepos="1:2-1:9">header</th>
- <th data-sourcepos="1:11-1:18">header</th>
- </tr>
- </thead>
- <tbody>
- <tr data-sourcepos="3:1-3:31">
- <td data-sourcepos="3:2-3:9"><code>code</code></td>
- <td data-sourcepos="3:11-3:30">cell with <strong>bold</strong>
- </td>
- </tr>
- <tr data-sourcepos="4:1-4:35">
- <td data-sourcepos="4:2-4:13"><del>strike</del></td>
- <td data-sourcepos="4:15-4:34">cell with <em>italic</em>
- </td>
- </tr>
- </tbody>
- </table>
- <h1 data-sourcepos="6:1-6:21" dir="auto">
- <a id="user-content-content-after-table" class="anchor" href="#content-after-table" aria-hidden="true"></a>content after table</h1>
-
-- name: table_of_contents
- markdown: |-
- [[_TOC_]]
-
- # Lorem
-
- Well, that's just like... your opinion.. man.
-
- ## Ipsum
-
- ### Dolar
-
- # Sit amit
-
- ### I don't know
- html: |-
- <ul class="section-nav">
- <li>
- <a href="#lorem">Lorem</a><ul><li>
- <a href="#ipsum">Ipsum</a><ul><li><a href="#dolar">Dolar</a></li></ul>
- </li></ul>
- </li>
- <li>
- <a href="#sit-amit">Sit amit</a><ul><li><a href="#i-dont-know">I don't know</a></li></ul>
- </li>
- </ul>
- <h1 data-sourcepos="3:1-3:7" dir="auto">
- <a id="user-content-lorem" class="anchor" href="#lorem" aria-hidden="true"></a>Lorem</h1>
- <p data-sourcepos="5:1-5:45" dir="auto">Well, that's just like... your opinion.. man.</p>
- <h2 data-sourcepos="7:1-7:8" dir="auto">
- <a id="user-content-ipsum" class="anchor" href="#ipsum" aria-hidden="true"></a>Ipsum</h2>
- <h3 data-sourcepos="9:1-9:9" dir="auto">
- <a id="user-content-dolar" class="anchor" href="#dolar" aria-hidden="true"></a>Dolar</h3>
- <h1 data-sourcepos="11:1-11:10" dir="auto">
- <a id="user-content-sit-amit" class="anchor" href="#sit-amit" aria-hidden="true"></a>Sit amit</h1>
- <h3 data-sourcepos="13:1-13:16" dir="auto">
- <a id="user-content-i-dont-know" class="anchor" href="#i-dont-know" aria-hidden="true"></a>I don't know</h3>
-
-- name: task_list
- markdown: |-
- * [x] hello
- * [x] world
- * [ ] example
- * [ ] of nested
- * [x] task list
- * [ ] items
- html: |-
- <ul data-sourcepos="1:1-6:15" class="task-list" dir="auto">
- <li data-sourcepos="1:1-1:11" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> hello</li>
- <li data-sourcepos="2:1-2:11" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> world</li>
- <li data-sourcepos="3:1-6:15" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> example
- <ul data-sourcepos="4:3-6:15" class="task-list">
- <li data-sourcepos="4:3-6:15" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> of nested
- <ul data-sourcepos="5:5-6:15" class="task-list">
- <li data-sourcepos="5:5-5:19" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" checked disabled> task list</li>
- <li data-sourcepos="6:5-6:15" class="task-list-item">
- <task-button></task-button><input type="checkbox" class="task-list-item-checkbox" disabled> items</li>
- </ul>
- </li>
- </ul>
- </li>
- </ul>
-
-- name: video
- markdown: |-
- ![Sample Video](https://gitlab.com/gitlab.mp4)
- html: |-
- <p data-sourcepos="1:1-1:46" dir="auto"><span class="media-container video-container"><video src="https://gitlab.com/gitlab.mp4" controls="true" data-setup="{}" data-title="Sample Video" width="400" preload="metadata"></video><a href="https://gitlab.com/gitlab.mp4" target="_blank" rel="nofollow noreferrer noopener" title="Download 'Sample Video'">Sample Video</a></span></p>
-
-- name: word_break
- markdown: Fernstraßen<wbr>bau<wbr>privat<wbr>finanzierungs<wbr>gesetz
- html: <p data-sourcepos="1:1-1:60" dir="auto">Fernstraßen<wbr>bau<wbr>privat<wbr>finanzierungs<wbr>gesetz</wbr></wbr></wbr></wbr></p>
diff --git a/spec/fixtures/tasks/gitlab/security/expected_banned_keys.yml b/spec/fixtures/tasks/gitlab/security/expected_banned_keys.yml
new file mode 100644
index 00000000000..2939b651f82
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/expected_banned_keys.yml
@@ -0,0 +1,12 @@
+---
+dsa:
+- SHA256:/JLp6z6uGE3BPcs70RQob6QOdEWQ6nDC0xY7ejPOCc0
+- SHA256:whDP3xjKBEettbDuecxtGsfWBST+78gb6McdB9P7jCU
+- SHA256:MEc4HfsOlMqJ3/9QMTmrKn5Xj/yfnMITMW8EwfUfTww
+- SHA256:aPoYT2nPIfhqv6BIlbCCpbDjirBxaDFOtPfZ2K20uWw
+- SHA256:VtjqZ5fiaeoZ3mXOYi49Lk9aO31iT4pahKFP9JPiQPc
+rsa:
+- SHA256:Z+q4XhSwWY7q0BIDVPR1v/S306FjGBsid7tLq/8kIxM
+- SHA256:uy5wXyEgbRCGsk23+J6f85om7G55Cu3UIPwC7oMZhNQ
+- SHA256:9prMbqhS4QteoFQ1ZRJDqSBLWoHXPyKB0iWR05Ghro4
+- SHA256:1M4RzhMyWuFS/86uPY/ce2prh/dVTHW7iD2RhpquOZA
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/LICENSE b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/LICENSE
new file mode 100644
index 00000000000..059a3757d67
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/LICENSE
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 Rapid7
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/README.md b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/README.md
new file mode 100644
index 00000000000..4c0b33e1117
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/README.md
@@ -0,0 +1,12 @@
+# SSH Bad Keys
+
+This is a collection of static SSH keys (host and authentication) that have made their way into software and hardware products. This was inspired by the [Little Black Box](https://code.google.com/p/littleblackbox/) project, but focused primarily on SSH (as opposed to TLS) keys.
+
+Keys are split into two categories; authorized keys and host keys. The authorized keys can be used to gain access to a device with this public key. The host keys can be used to conduct a MITM attack against the device, but do not provide direct access.
+
+This collection depends on submissions from researchers to stay relevant. If you are aware of a static key (host or authorized), please open an [Issue](https://github.com/rapid7/ssh-badkeys/issues) or submit a Pull Request. The [Issues](https://github.com/rapid7/ssh-badkeys/issues) list also contains a wishlist of known bad keys that we would like to include.
+
+For additional key types and a broader scope, take a look at the [Kompromat](https://github.com/BenBE/kompromat) project.
+
+
+
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/array-networks-vapv-vxag.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/array-networks-vapv-vxag.pub
new file mode 100644
index 00000000000..467e5fb566f
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/array-networks-vapv-vxag.pub
@@ -0,0 +1 @@
+ssh-dss AAAAB3NzaC1kc3MAAACBAJTDsX+8olPZeyr58g9XE0L8PKT5030NZBPlE7np4hBqx36HoWarWq1Csn8M57dWN9StKbs03k2ggY6sYJK5AW2EWar70um3pYjKQHiZq7mITmitsozFN/K7wu2e2iKRgquUwH5SuYoOJ29n7uhaILXiKZP4/H/dDudqPRSY6tJPAAAAFQDtuWH90mDbU2L/Ms2lfl/cja/wHwAAAIAMBwSHZt2ysOHCFe1WLUvdwVDHUqk3QHTskuuAnMlwMtSvCaUxSatdHahsMZ9VCHjoQUx6j+TcgRLDbMlRLnwUlb6wpniehLBFk+qakGcREqks5NxYzFTJXwROzP72jPvVgQyOZHWq81gCild/ljL7hmrduCqYwxDIz4o7U92UKQAAAIBmhSl9CVPgVMv1xO8DAHVhM1huIIK8mNFrzMJz+JXzBx81ms1kWSeQOC/nraaXFTBlqiQsvB8tzr4xZdbaI/QzVLKNAF5C8BJ4ScNlTIx1aZJwyMil8Nzb+0YAsw5Ja+bEZZvEVlAYnd10qRWrPeEY1txLMmX3wDa+JvJL7fmuBg==
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/barracuda_load_balancer_vm.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/barracuda_load_balancer_vm.pub
new file mode 100644
index 00000000000..a8a832f162b
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/barracuda_load_balancer_vm.pub
@@ -0,0 +1 @@
+ssh-dss AAAAB3NzaC1kc3MAAACBAMq5EcIFdfCjJakyQnP/BBp9oc6mpaZVguf0Znp5C40twiG1lASQJZlM1qOB/hkBWYeBCHUkcOLEnVXSZzB62L+W/LGKodqnsiQPRr57AA6jPc6mNBnejHai8cSdAl9n/0s2IQjdcrxM8CPq2uEyfm0J3AV6Lrbbxr5NgE5xxM+DAAAAFQCmFk/M7Rx2jexsJ9COpHkHwUjcNQAAAIAdg18oByp/tjjDKhWhmmv+HbVIROkRqSxBvuEZEmcWlg38mLIT1bydfpSou/V4rI5ctxwCfJ1rRr66pw6GwCrz4fXmyVlhrj7TrktyQ9+zRXhynF4wdNPWErhNHb8tGlSOFiOBcUTlouX3V/ka6Dkd6ZQrZLQFaH+gjfyTZZ82HQAAAIEArsJgp7RLPOsCeLqoia/eljseBFVDazO5Q0ysUotTw9wgXGGVWREwm8wNggFNb9eCiBAAUfVZVfhVAtFT0pBf/eIVLPXyaMw3prBt7LqeBrbagODc3WAAdMTPIdYYcOKgv+YvTXa51zG64v6pQOfS8WXgKCzDl44puXfYeDk5lVQ=
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/ceragon-fibeair-cve-2015-0936.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/ceragon-fibeair-cve-2015-0936.pub
new file mode 100644
index 00000000000..1a8016efb1e
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/ceragon-fibeair-cve-2015-0936.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAwRIdDlHaIqZXND/l1vFT7ue3rc/DvXh2yx5EFtuxGQRHVxGMazDhV4vj5ANGXDQwUYI0iZh6aOVrDy8I/y9/y+YDGCvsnqrDbuPDjW26s2bBXWgUPiC93T3TA6L2KOxhVcl7mljEOIYACRHPpJNYVGhinCxDUH9LxMrdNXgP5Ok= mateidu@localhost
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/exagrid-cve-2016-1561.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/exagrid-cve-2016-1561.pub
new file mode 100644
index 00000000000..387cd23fb47
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/exagrid-cve-2016-1561.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAABJQAAAIBnZQ+6nhlPX/JnX5i5hXpljJ89bSnnrsSs51hSPuoJGmoKowBddISK7s10AIpO0xAWGcr8PUr2FOjEBbDHqlRxoXF0Ocms9xv3ql9EYUQ5+U+M6BymWhNTFPOs6gFHUl8Bw3t6c+SRKBpfRFB0yzBj9d093gSdfTAFoz+yLo4vRw==
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/f5-bigip-cve-2012-1493.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/f5-bigip-cve-2012-1493.pub
new file mode 100644
index 00000000000..5cc9954edf4
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/f5-bigip-cve-2012-1493.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAIEAvIhC5skTzxyHif/7iy3yhxuK6/OB13hjPqrskogkYFrcW8OK4VJT+5+Fx7wd4sQCnVn8rNqahw/x6sfcOMDI/Xvn4yKU4t8TnYf2MpUVr4ndz39L5Ds1n7Si1m2suUNxWbKv58I8+NMhlt2ITraSuTU0NGymWOc8+LNi+MHXdLk= SCCP Superuser
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/loadbalancer.org-enterprise-va.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/loadbalancer.org-enterprise-va.pub
new file mode 100644
index 00000000000..e47ea5ca1fd
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/loadbalancer.org-enterprise-va.pub
@@ -0,0 +1 @@
+ssh-dss AAAAB3NzaC1kc3MAAACBAKwKBw7D4OA1H/uD4htdh04TBIHdbSjeXUSnWJsce8C0tvoB01Yarjv9TFj+tfeDYVWtUK1DA1JkyqSuoAtDANJzF4I6Isyd0KPrW3dHFTcg6Xlz8d3KEaHokY93NOmB/xWEkhme8b7Q0U2iZie2pgWbTLXV0FA+lhskTtPHW3+VAAAAFQDRyayUlVZKXEweF3bUe03zt9e8VQAAAIAEPK1k3Y6ErAbIl96dnUCnZjuWQ7xXy062pf63QuRWI6LYSscm3f1pEknWUNFr/erQ02pkfi2eP9uHl1TI1ql+UmJX3g3frfssLNZwWXAW0m8PbY3HZSs+f5hevM3ua32pnKDmbQ2WpvKNyycKHi81hSI14xMcdblJolhN5iY8/wAAAIAjEe5+0m/TlBtVkqQbUit+s/g+eB+PFQ+raaQdL1uztW3etntXAPH1MjxsAC/vthWYSTYXORkDFMhrO5ssE2rfg9io0NDyTIZt+VRQMGdi++dH8ptU+ldl2ZejLFdTJFwFgcfXz+iQ1mx6h9TPX1crE1KoMAVOj3yKVfKpLB1EkA== root@lbslave
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/monroe-dasdec-cve-2013-0137.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/monroe-dasdec-cve-2013-0137.pub
new file mode 100644
index 00000000000..f7fee2c59ac
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/monroe-dasdec-cve-2013-0137.pub
@@ -0,0 +1 @@
+ssh-dss AAAAB3NzaC1kc3MAAACBAN3AITryJMQyOKZjAky+mQ/8pOHIlu4q8pzmR0qotKaLm2yye5a0PY2rOaQRAzi7EPheBXbqTb8a8TrHhGXI5P7GUHaJho5HhEnw+5TwAvP72L7LcPwxMxj/rLcR/jV+uLMsVeJVWjwJcUv83yzPXoVjK0hrIm+RLLeuTM+gTylHAAAAFQD5gBdXsXAiTz1atzMg3xDFF1zlowAAAIAlLy6TCMlOBM0IcPsvP/9bEjDj0M8YZazdqt4amO2IaNUPYt9/sIsLOQfxIj8myDK1TOp8NyRJep7V5aICG4f3Q+XktlmLzdWn3sjvbWuIAXe1opjG2T69YhxfHZr8Wn7P4tpCgyqM4uHmUKrfnBzQQ9vkUUWsZoUXM2Z7vUXVfQAAAIAU6eNlphQWDwx0KOBiiYhF9BM6kDbQlyw8333rAG3G4CcjI2G8eYGtpBNliaD185UjCEsjPiudhGil/j4Zt/+VY3aGOLoi8kqXBBc8ZAML9bbkXpyhQhMgwiywx3ciFmvSn2UAin8yurStYPQxtXauZN5PYbdwCHPS7ApIStdpMA== wood@endec1
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/quantum-dxi-v1000.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/quantum-dxi-v1000.pub
new file mode 100644
index 00000000000..1cba0ffbdfd
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/quantum-dxi-v1000.pub
@@ -0,0 +1 @@
+ssh-dss AAAAB3NzaC1kc3MAAACBAISAE3CAX4hsxTw0dRc0gx8nQ41r3Vkj9OmG6LGeKWRmpy7C6vaExuupjxid76fd4aS56lCUEEoRlJ3zE93qoK9acI6EGqGQFLuDZ0fqMyRSX+ilf+1HDo/TRyuraggxp9Hj9LMpZVbpFATMm0+d9Xs7eLmaJjuMsowNlOf8NFdHAAAAFQCwdvqOAkR6QhuiAapQ/9iVuR0UAQAAAIBpLMo4dhSeWkChfv659WLPftxRrX/HR8YMD/jqa3R4PsVM2g6dQ1191nHugtdV7uaMeOqOJ/QRWeYM+UYwT0Zgx2LqvgVSjNDfdjk+ZRY8x3SmExFi62mKFoTGSOCXfcAfuanjaoF+sepnaiLUd+SoJShGYHoqR2QWiysTRqknlwAAAIBLEgYmr9XCSqjENFDVQPFELYKT7Zs9J87PjPS1AP0qF1OoRGZ5mefK6X/6VivPAUWmmmev/BuAs8M1HtfGeGGzMzDIiU/WZQ3bScLB1Ykrcjk7TOFD6xrnk/inYAp5l29hjidoAONcXoHmUAMYOKqn63Q2AsDpExVcmfj99/BlpQ==
diff --git a/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/vagrant-default.pub b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/vagrant-default.pub
new file mode 100644
index 00000000000..18a9c00fd56
--- /dev/null
+++ b/spec/fixtures/tasks/gitlab/security/ssh-badkeys/authorized/vagrant-default.pub
@@ -0,0 +1 @@
+ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA6NF8iallvQVp22WDkTkyrtvp9eWW6A8YVr+kz4TjGYe7gHzIw+niNltGEFHzD8+v1I2YJ6oXevct1YeS0o9HZyN1Q9qgCgzUFtdOKLv6IedplqoPkcmF0aYet2PkEDo3MlTBckFXPITAMzF8dJSIFo9D8HfdOV0IAdx4O7PtixWKn5y2hMNG0zQPyUecp4pzC6kivAIhyfHilFR61RGL+GPXQ2MWZWFYbAGjyiYJnAmCP3NOTd0jMZEnDkbUvxhMmBYSdETk1rRgm+R4LOzFUGaHqHDLKLX+FIPKcF96hrucXzcWyLbIbEgE98OHlnVYCzRdK8jlqm8tehUc9c9WhQ== vagrant insecure public key
diff --git a/spec/fixtures/valid.po b/spec/fixtures/valid.po
index e580af66939..99f6b317ffc 100644
--- a/spec/fixtures/valid.po
+++ b/spec/fixtures/valid.po
@@ -256,9 +256,6 @@ msgstr "crear un token de acceso personal"
msgid "Cron Timezone"
msgstr "Zona horaria del Cron"
-msgid "Cron syntax"
-msgstr "Sintaxis de Cron"
-
msgid "Custom notification events"
msgstr "Eventos de notificaciones personalizadas"
@@ -442,6 +439,9 @@ msgstr "Última actualización"
msgid "Last commit"
msgstr "Último cambio"
+msgid "Learn more."
+msgstr "Más información."
+
msgid "Learn more in the"
msgstr "Más información en la"
diff --git a/spec/frontend/__mocks__/@cubejs-client/core.js b/spec/frontend/__mocks__/@cubejs-client/core.js
new file mode 100644
index 00000000000..549899aa8d8
--- /dev/null
+++ b/spec/frontend/__mocks__/@cubejs-client/core.js
@@ -0,0 +1,26 @@
+let mockLoad = jest.fn();
+let mockMetadata = jest.fn();
+
+export const CubejsApi = jest.fn().mockImplementation(() => ({
+ load: mockLoad,
+ meta: mockMetadata,
+}));
+
+export const HttpTransport = jest.fn();
+
+export const GRANULARITIES = [
+ {
+ name: 'seconds',
+ title: 'Seconds',
+ },
+];
+
+// eslint-disable-next-line no-underscore-dangle
+export const __setMockLoad = (x) => {
+ mockLoad = x;
+};
+
+// eslint-disable-next-line no-underscore-dangle
+export const __setMockMetadata = (x) => {
+ mockMetadata = x;
+};
diff --git a/spec/frontend/abuse_reports/components/abuse_category_selector_spec.js b/spec/frontend/abuse_reports/components/abuse_category_selector_spec.js
new file mode 100644
index 00000000000..6efd9fb1dd0
--- /dev/null
+++ b/spec/frontend/abuse_reports/components/abuse_category_selector_spec.js
@@ -0,0 +1,126 @@
+import { GlDrawer, GlForm, GlFormGroup, GlFormRadioGroup } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
+
+jest.mock('~/lib/utils/common_utils', () => ({
+ contentTop: jest.fn(),
+}));
+
+jest.mock('~/lib/utils/csrf', () => ({ token: 'mock-csrf-token' }));
+
+describe('AbuseCategorySelector', () => {
+ let wrapper;
+
+ const ACTION_PATH = '/abuse_reports/add_category';
+ const USER_ID = '1';
+ const REPORTED_FROM_URL = 'http://example.com';
+
+ const createComponent = (props) => {
+ wrapper = shallowMountExtended(AbuseCategorySelector, {
+ propsData: {
+ ...props,
+ },
+ provide: {
+ reportAbusePath: ACTION_PATH,
+ reportedUserId: USER_ID,
+ reportedFromUrl: REPORTED_FROM_URL,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent({ showDrawer: true });
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findDrawer = () => wrapper.findComponent(GlDrawer);
+ const findTitle = () => wrapper.findByTestId('category-drawer-title');
+
+ const findForm = () => wrapper.findComponent(GlForm);
+ const findFormGroup = () => wrapper.findComponent(GlFormGroup);
+ const findRadioGroup = () => wrapper.findComponent(GlFormRadioGroup);
+
+ const findCSRFToken = () => findForm().find('input[name="authenticity_token"]');
+ const findUserId = () => wrapper.findByTestId('input-user-id');
+ const findReferer = () => wrapper.findByTestId('input-referer');
+
+ const findSubmitFormButton = () => wrapper.findByTestId('submit-form-button');
+
+ describe('Drawer', () => {
+ it('is open when prop showDrawer = true', () => {
+ expect(findDrawer().exists()).toBe(true);
+ expect(findDrawer().props('open')).toBe(true);
+ expect(findDrawer().props('zIndex')).toBe(300);
+ });
+
+ it('renders title', () => {
+ expect(findTitle().text()).toBe(wrapper.vm.$options.i18n.title);
+ });
+
+ it('emits close-drawer event', async () => {
+ await findDrawer().vm.$emit('close');
+
+ expect(wrapper.emitted('close-drawer')).toHaveLength(1);
+ });
+
+ describe('when props showDrawer = false', () => {
+ beforeEach(() => {
+ createComponent({ showDrawer: false });
+ });
+
+ it('hides the drawer', () => {
+ expect(findDrawer().props('open')).toBe(false);
+ });
+ });
+ });
+
+ describe('Select category form', () => {
+ it('renders POST form with path', () => {
+ expect(findForm().attributes()).toMatchObject({
+ method: 'post',
+ action: ACTION_PATH,
+ });
+ });
+
+ it('renders csrf token', () => {
+ expect(findCSRFToken().attributes('value')).toBe('mock-csrf-token');
+ });
+
+ it('renders label', () => {
+ expect(findFormGroup().exists()).toBe(true);
+ expect(findFormGroup().attributes('label')).toBe(wrapper.vm.$options.i18n.label);
+ });
+
+ it('renders radio group', () => {
+ expect(findRadioGroup().exists()).toBe(true);
+ expect(findRadioGroup().props('options')).toEqual(wrapper.vm.$options.categoryOptions);
+ expect(findRadioGroup().attributes('name')).toBe('abuse_report[category]');
+ expect(findRadioGroup().attributes('required')).not.toBeUndefined();
+ });
+
+ it('renders userId as a hidden fields', () => {
+ expect(findUserId().attributes()).toMatchObject({
+ type: 'hidden',
+ name: 'user_id',
+ value: USER_ID,
+ });
+ });
+
+ it('renders referer as a hidden fields', () => {
+ expect(findReferer().attributes()).toMatchObject({
+ type: 'hidden',
+ name: 'abuse_report[reported_from_url]',
+ value: REPORTED_FROM_URL,
+ });
+ });
+
+ it('renders submit button', () => {
+ expect(findSubmitFormButton().exists()).toBe(true);
+ expect(findSubmitFormButton().text()).toBe(wrapper.vm.$options.i18n.next);
+ });
+ });
+});
diff --git a/spec/frontend/admin/broadcast_messages/components/message_form_spec.js b/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
index 88ea79f38b3..36c0ac303ba 100644
--- a/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
+++ b/spec/frontend/admin/broadcast_messages/components/message_form_spec.js
@@ -3,7 +3,7 @@ import { GlBroadcastMessage, GlForm } from '@gitlab/ui';
import AxiosMockAdapter from 'axios-mock-adapter';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
import MessageForm from '~/admin/broadcast_messages/components/message_form.vue';
import {
BROADCAST_MESSAGES_PATH,
@@ -160,7 +160,7 @@ describe('MessageForm', () => {
it('shows an error alert if the create request fails', async () => {
createComponent({ broadcastMessage: { id: undefined } });
- axiosMock.onPost(BROADCAST_MESSAGES_PATH).replyOnce(httpStatus.BAD_REQUEST);
+ axiosMock.onPost(BROADCAST_MESSAGES_PATH).replyOnce(HTTP_STATUS_BAD_REQUEST);
findForm().vm.$emit('submit', { preventDefault: () => {} });
await waitForPromises();
@@ -187,7 +187,7 @@ describe('MessageForm', () => {
it('shows an error alert if the update request fails', async () => {
const id = 1337;
createComponent({ broadcastMessage: { id } });
- axiosMock.onPost(`${BROADCAST_MESSAGES_PATH}/${id}`).replyOnce(httpStatus.BAD_REQUEST);
+ axiosMock.onPost(`${BROADCAST_MESSAGES_PATH}/${id}`).replyOnce(HTTP_STATUS_BAD_REQUEST);
findForm().vm.$emit('submit', { preventDefault: () => {} });
await waitForPromises();
diff --git a/spec/frontend/admin/users/components/user_date_spec.js b/spec/frontend/admin/users/components/user_date_spec.js
index af262c6d3f0..73be33d5a9d 100644
--- a/spec/frontend/admin/users/components/user_date_spec.js
+++ b/spec/frontend/admin/users/components/user_date_spec.js
@@ -24,7 +24,7 @@ describe('FormatDate component', () => {
it.each`
date | dateFormat | output
- ${mockDate} | ${undefined} | ${'13 Nov, 2020'}
+ ${mockDate} | ${undefined} | ${'Nov 13, 2020'}
${null} | ${undefined} | ${'Never'}
${undefined} | ${undefined} | ${'Never'}
${mockDate} | ${ISO_SHORT_FORMAT} | ${'2020-11-13'}
diff --git a/spec/frontend/admin/users/mock_data.js b/spec/frontend/admin/users/mock_data.js
index 193ac3fa043..17cddebfcaf 100644
--- a/spec/frontend/admin/users/mock_data.js
+++ b/spec/frontend/admin/users/mock_data.js
@@ -62,3 +62,11 @@ export const userDeletionObstacles = [
{ name: 'schedule1', type: OBSTACLE_TYPES.oncallSchedules },
{ name: 'policy1', type: OBSTACLE_TYPES.escalationPolicies },
];
+
+export const userStatus = {
+ emoji: 'basketball',
+ message: 'test',
+ availability: 'busy',
+ message_html: 'test',
+ clear_status_at: '2023-01-04T10:00:00.000Z',
+};
diff --git a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
index 62a3e07186a..a15c78cc456 100644
--- a/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
+++ b/spec/frontend/alerts_settings/components/alerts_settings_wrapper_spec.js
@@ -32,7 +32,7 @@ import {
} from '~/alerts_settings/utils/error_messages';
import { createAlert, VARIANT_SUCCESS } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes, { HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
+import { HTTP_STATUS_FORBIDDEN, HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
import {
createHttpVariables,
updateHttpVariables,
@@ -365,7 +365,7 @@ describe('AlertsSettingsWrapper', () => {
});
it('shows an error alert when integration is not activated', async () => {
- mock.onPost(/(.*)/).replyOnce(httpStatusCodes.FORBIDDEN);
+ mock.onPost(/(.*)/).replyOnce(HTTP_STATUS_FORBIDDEN);
await wrapper.vm.testAlertPayload({ endpoint: '', data: '', token: '' });
expect(createAlert).toHaveBeenCalledWith({
message: INTEGRATION_INACTIVE_PAYLOAD_TEST_ERROR,
diff --git a/spec/frontend/analytics/cycle_analytics/store/actions_spec.js b/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
index f87807804c9..3030fca126b 100644
--- a/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
+++ b/spec/frontend/analytics/cycle_analytics/store/actions_spec.js
@@ -3,7 +3,7 @@ import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
import * as actions from '~/analytics/cycle_analytics/store/actions';
import * as getters from '~/analytics/cycle_analytics/store/getters';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import {
allowedStages,
selectedStage,
@@ -197,7 +197,7 @@ describe('Project Value Stream Analytics actions', () => {
selectedStage,
};
mock = new MockAdapter(axios);
- mock.onGet(mockStagePath).reply(httpStatusCodes.OK, reviewEvents, headers);
+ mock.onGet(mockStagePath).reply(HTTP_STATUS_OK, reviewEvents, headers);
});
it(`commits the 'RECEIVE_STAGE_DATA_SUCCESS' mutation`, () =>
@@ -223,7 +223,7 @@ describe('Project Value Stream Analytics actions', () => {
selectedStage,
};
mock = new MockAdapter(axios);
- mock.onGet(mockStagePath).reply(httpStatusCodes.OK, { error: tooMuchDataError });
+ mock.onGet(mockStagePath).reply(HTTP_STATUS_OK, { error: tooMuchDataError });
});
it(`commits the 'RECEIVE_STAGE_DATA_ERROR' mutation`, () =>
@@ -247,7 +247,7 @@ describe('Project Value Stream Analytics actions', () => {
selectedStage,
};
mock = new MockAdapter(axios);
- mock.onGet(mockStagePath).reply(httpStatusCodes.BAD_REQUEST);
+ mock.onGet(mockStagePath).reply(HTTP_STATUS_BAD_REQUEST);
});
it(`commits the 'RECEIVE_STAGE_DATA_ERROR' mutation`, () =>
@@ -269,7 +269,7 @@ describe('Project Value Stream Analytics actions', () => {
endpoints: mockEndpoints,
};
mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
+ mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_OK);
});
it(`commits the 'REQUEST_VALUE_STREAMS' mutation`, () =>
@@ -284,7 +284,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST);
+ mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_BAD_REQUEST);
});
it(`commits the 'RECEIVE_VALUE_STREAMS_ERROR' mutation`, () =>
@@ -294,7 +294,7 @@ describe('Project Value Stream Analytics actions', () => {
payload: {},
expectedMutations: [
{ type: 'REQUEST_VALUE_STREAMS' },
- { type: 'RECEIVE_VALUE_STREAMS_ERROR', payload: httpStatusCodes.BAD_REQUEST },
+ { type: 'RECEIVE_VALUE_STREAMS_ERROR', payload: HTTP_STATUS_BAD_REQUEST },
],
expectedActions: [],
}));
@@ -337,7 +337,7 @@ describe('Project Value Stream Analytics actions', () => {
selectedValueStream,
};
mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
+ mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_OK);
});
it(`commits the 'REQUEST_VALUE_STREAM_STAGES' and 'RECEIVE_VALUE_STREAM_STAGES_SUCCESS' mutations`, () =>
@@ -355,7 +355,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST);
+ mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_BAD_REQUEST);
});
it(`commits the 'RECEIVE_VALUE_STREAM_STAGES_ERROR' mutation`, () =>
@@ -365,7 +365,7 @@ describe('Project Value Stream Analytics actions', () => {
payload: {},
expectedMutations: [
{ type: 'REQUEST_VALUE_STREAM_STAGES' },
- { type: 'RECEIVE_VALUE_STREAM_STAGES_ERROR', payload: httpStatusCodes.BAD_REQUEST },
+ { type: 'RECEIVE_VALUE_STREAM_STAGES_ERROR', payload: HTTP_STATUS_BAD_REQUEST },
],
expectedActions: [],
}));
@@ -382,7 +382,7 @@ describe('Project Value Stream Analytics actions', () => {
];
const stageMedianError = new Error(
- `Request failed with status code ${httpStatusCodes.BAD_REQUEST}`,
+ `Request failed with status code ${HTTP_STATUS_BAD_REQUEST}`,
);
beforeEach(() => {
@@ -392,7 +392,7 @@ describe('Project Value Stream Analytics actions', () => {
stages: allowedStages,
};
mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.OK);
+ mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_OK);
});
it(`commits the 'REQUEST_STAGE_MEDIANS' and 'RECEIVE_STAGE_MEDIANS_SUCCESS' mutations`, () =>
@@ -410,7 +410,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST);
+ mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_BAD_REQUEST);
});
it(`commits the 'RECEIVE_VALUE_STREAM_STAGES_ERROR' mutation`, () =>
@@ -435,9 +435,7 @@ describe('Project Value Stream Analytics actions', () => {
{ id: 'code', count: 3 },
];
- const stageCountError = new Error(
- `Request failed with status code ${httpStatusCodes.BAD_REQUEST}`,
- );
+ const stageCountError = new Error(`Request failed with status code ${HTTP_STATUS_BAD_REQUEST}`);
beforeEach(() => {
state = {
@@ -448,11 +446,11 @@ describe('Project Value Stream Analytics actions', () => {
mock = new MockAdapter(axios);
mock
.onGet(mockValueStreamPath)
- .replyOnce(httpStatusCodes.OK, { count: 1 })
+ .replyOnce(HTTP_STATUS_OK, { count: 1 })
.onGet(mockValueStreamPath)
- .replyOnce(httpStatusCodes.OK, { count: 2 })
+ .replyOnce(HTTP_STATUS_OK, { count: 2 })
.onGet(mockValueStreamPath)
- .replyOnce(httpStatusCodes.OK, { count: 3 });
+ .replyOnce(HTTP_STATUS_OK, { count: 3 });
});
it(`commits the 'REQUEST_STAGE_COUNTS' and 'RECEIVE_STAGE_COUNTS_SUCCESS' mutations`, () =>
@@ -470,7 +468,7 @@ describe('Project Value Stream Analytics actions', () => {
describe('with a failing request', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(mockValueStreamPath).reply(httpStatusCodes.BAD_REQUEST);
+ mock.onGet(mockValueStreamPath).reply(HTTP_STATUS_BAD_REQUEST);
});
it(`commits the 'RECEIVE_STAGE_COUNTS_ERROR' mutation`, () =>
diff --git a/spec/frontend/api/groups_api_spec.js b/spec/frontend/api/groups_api_spec.js
index 9de588a02aa..c354d8a9416 100644
--- a/spec/frontend/api/groups_api_spec.js
+++ b/spec/frontend/api/groups_api_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import getGroupTransferLocationsResponse from 'test_fixtures/api/groups/transfer_locations.json';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
import { DEFAULT_PER_PAGE } from '~/api';
import { updateGroup, getGroupTransferLocations } from '~/api/groups_api';
@@ -35,7 +35,7 @@ describe('GroupsApi', () => {
beforeEach(() => {
mock.onPut(expectedUrl).reply(({ data }) => {
- return [httpStatus.OK, { id: mockGroupId, ...JSON.parse(data) }];
+ return [HTTP_STATUS_OK, { id: mockGroupId, ...JSON.parse(data) }];
});
});
diff --git a/spec/frontend/api/harbor_registry_spec.js b/spec/frontend/api/harbor_registry_spec.js
index 8a4c377ebd1..db4b189835e 100644
--- a/spec/frontend/api/harbor_registry_spec.js
+++ b/spec/frontend/api/harbor_registry_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import * as harborRegistryApi from '~/api/harbor_registry';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('~/api/harbor_registry', () => {
let mock;
@@ -37,7 +37,7 @@ describe('~/api/harbor_registry', () => {
location: 'http://demo.harbor.com/harbor/projects/2/repositories/image-1',
},
];
- mock.onGet(expectedUrl).reply(httpStatus.OK, expectResponse);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, expectResponse);
return harborRegistryApi.getHarborRepositoriesList(expectedParams).then(({ data }) => {
expect(data).toEqual(expectResponse);
@@ -66,7 +66,7 @@ describe('~/api/harbor_registry', () => {
tags: ['v2', 'v1', 'latest'],
},
];
- mock.onGet(expectedUrl).reply(httpStatus.OK, expectResponse);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, expectResponse);
return harborRegistryApi.getHarborArtifacts(expectedParams).then(({ data }) => {
expect(data).toEqual(expectResponse);
@@ -97,7 +97,7 @@ describe('~/api/harbor_registry', () => {
immutable: false,
},
];
- mock.onGet(expectedUrl).reply(httpStatus.OK, expectResponse);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, expectResponse);
return harborRegistryApi.getHarborTags(expectedParams).then(({ data }) => {
expect(data).toEqual(expectResponse);
diff --git a/spec/frontend/api/packages_api_spec.js b/spec/frontend/api/packages_api_spec.js
index d55d2036dcf..5f517bcf358 100644
--- a/spec/frontend/api/packages_api_spec.js
+++ b/spec/frontend/api/packages_api_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import { publishPackage } from '~/api/packages_api';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('Api', () => {
const dummyApiVersion = 'v3000';
@@ -35,7 +35,7 @@ describe('Api', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/packages/generic/${name}/${packageVersion}/${name}`;
jest.spyOn(axios, 'put');
- mock.onPut(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
+ mock.onPut(expectedUrl).replyOnce(HTTP_STATUS_OK, apiResponse);
return publishPackage(
{
diff --git a/spec/frontend/api/tags_api_spec.js b/spec/frontend/api/tags_api_spec.js
index a7436bf6a50..af3533f52b7 100644
--- a/spec/frontend/api/tags_api_spec.js
+++ b/spec/frontend/api/tags_api_spec.js
@@ -1,7 +1,7 @@
import MockAdapter from 'axios-mock-adapter';
import * as tagsApi from '~/api/tags_api';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('~/api/tags_api.js', () => {
let mock;
@@ -25,7 +25,7 @@ describe('~/api/tags_api.js', () => {
it('fetches a tag of a given tag name of a particular project', () => {
const tagName = 'tag-name';
const expectedUrl = `/api/v7/projects/${projectId}/repository/tags/${tagName}`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, {
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, {
name: tagName,
});
diff --git a/spec/frontend/api/user_api_spec.js b/spec/frontend/api/user_api_spec.js
index ba6b73e8c1a..9e901cf0f71 100644
--- a/spec/frontend/api/user_api_spec.js
+++ b/spec/frontend/api/user_api_spec.js
@@ -1,8 +1,13 @@
import MockAdapter from 'axios-mock-adapter';
-import { followUser, unfollowUser, associationsCount } from '~/api/user_api';
+import { followUser, unfollowUser, associationsCount, updateUserStatus } from '~/api/user_api';
import axios from '~/lib/utils/axios_utils';
-import { associationsCount as associationsCountData } from 'jest/admin/users/mock_data';
+import {
+ associationsCount as associationsCountData,
+ userStatus as mockUserStatus,
+} from 'jest/admin/users/mock_data';
+import { AVAILABILITY_STATUS } from '~/set_status_modal/constants';
+import { timeRanges } from '~/vue_shared/constants';
describe('~/api/user_api', () => {
let axiosMock;
@@ -62,4 +67,30 @@ describe('~/api/user_api', () => {
expect(axiosMock.history.get[0].url).toBe(expectedUrl);
});
});
+
+ describe('updateUserStatus', () => {
+ it('calls correct URL and returns expected response', async () => {
+ const expectedUrl = '/api/v4/user/status';
+ const expectedData = {
+ emoji: 'basketball',
+ message: 'test',
+ availability: AVAILABILITY_STATUS.BUSY,
+ clear_status_after: timeRanges[0].shortcut,
+ };
+ const expectedResponse = { data: mockUserStatus };
+
+ axiosMock.onPatch(expectedUrl).replyOnce(200, expectedResponse);
+
+ await expect(
+ updateUserStatus({
+ emoji: 'basketball',
+ message: 'test',
+ availability: AVAILABILITY_STATUS.BUSY,
+ clearStatusAfter: timeRanges[0].shortcut,
+ }),
+ ).resolves.toEqual(expect.objectContaining({ data: expectedResponse }));
+ expect(axiosMock.history.patch[0].url).toBe(expectedUrl);
+ expect(JSON.parse(axiosMock.history.patch[0].data)).toEqual(expectedData);
+ });
+ });
});
diff --git a/spec/frontend/api_spec.js b/spec/frontend/api_spec.js
index 5209d9c2d2c..39fbe02480d 100644
--- a/spec/frontend/api_spec.js
+++ b/spec/frontend/api_spec.js
@@ -1,10 +1,13 @@
import MockAdapter from 'axios-mock-adapter';
import Api, { DEFAULT_PER_PAGE } from '~/api';
import axios from '~/lib/utils/axios_utils';
-import httpStatus, {
+import {
HTTP_STATUS_ACCEPTED,
HTTP_STATUS_CREATED,
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
HTTP_STATUS_NO_CONTENT,
+ HTTP_STATUS_NOT_FOUND,
+ HTTP_STATUS_OK,
} from '~/lib/utils/http_status';
jest.mock('~/flash');
@@ -64,7 +67,7 @@ describe('Api', () => {
it('fetch all group packages', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/packages`;
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, apiResponse);
return Api.groupPackages(groupId).then(({ data }) => {
expect(data).toEqual(apiResponse);
@@ -77,7 +80,7 @@ describe('Api', () => {
it('fetch all project packages', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/packages`;
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, apiResponse);
return Api.projectPackages(projectId).then(({ data }) => {
expect(data).toEqual(apiResponse);
@@ -99,7 +102,7 @@ describe('Api', () => {
const expectedUrl = `foo`;
jest.spyOn(Api, 'buildProjectPackageUrl').mockReturnValue(expectedUrl);
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, apiResponse);
return Api.projectPackage(projectId, packageId).then(({ data }) => {
expect(data).toEqual(apiResponse);
@@ -114,7 +117,7 @@ describe('Api', () => {
jest.spyOn(Api, 'buildProjectPackageUrl').mockReturnValue(expectedUrl);
jest.spyOn(axios, 'delete');
- mock.onDelete(expectedUrl).replyOnce(httpStatus.OK, true);
+ mock.onDelete(expectedUrl).replyOnce(HTTP_STATUS_OK, true);
return Api.deleteProjectPackage(projectId, packageId).then(({ data }) => {
expect(data).toEqual(true);
@@ -130,7 +133,7 @@ describe('Api', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/packages/${packageId}/package_files/${packageFileId}`;
jest.spyOn(axios, 'delete');
- mock.onDelete(expectedUrl).replyOnce(httpStatus.OK, true);
+ mock.onDelete(expectedUrl).replyOnce(HTTP_STATUS_OK, true);
return Api.deleteProjectPackageFile(projectId, packageId, packageFileId).then(
({ data }) => {
@@ -150,7 +153,7 @@ describe('Api', () => {
jest.spyOn(axios, 'get');
jest.spyOn(Api, 'buildUrl').mockReturnValueOnce(expectedUrl);
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK, apiResponse);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, apiResponse);
const { data } = await Api.containerRegistryDetails(1);
@@ -164,7 +167,7 @@ describe('Api', () => {
it('fetches a group', () => {
const groupId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, {
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, {
name: 'test',
});
@@ -182,7 +185,7 @@ describe('Api', () => {
const groupId = '54321';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/members`;
const expectedData = [{ id: 7 }];
- mock.onGet(expectedUrl).reply(httpStatus.OK, expectedData);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, expectedData);
return Api.groupMembers(groupId).then(({ data }) => {
expect(data).toEqual(expectedData);
@@ -232,7 +235,7 @@ describe('Api', () => {
web_url: 'https://gitlab.com/groups/gitlab-org/-/milestones/42',
},
];
- mock.onGet(expectedUrl).reply(httpStatus.OK, expectedData);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, expectedData);
return Api.groupMilestones(groupId).then(({ data }) => {
expect(data).toEqual(expectedData);
@@ -245,7 +248,7 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups.json`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -266,7 +269,7 @@ describe('Api', () => {
const options = { params: { search: 'foo' } };
const expectedGroup = 'gitlab-org';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${expectedGroup}/labels`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
id: 1,
name: 'Foo Label',
@@ -284,7 +287,7 @@ describe('Api', () => {
it('fetches namespaces', () => {
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/namespaces.json`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -306,7 +309,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
window.gon.current_user_id = 1;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -325,7 +328,7 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects.json`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -345,7 +348,7 @@ describe('Api', () => {
it('update a project with the given payload', () => {
const projectPath = 'foo';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}`;
- mock.onPut(expectedUrl).reply(httpStatus.OK, { foo: 'bar' });
+ mock.onPut(expectedUrl).reply(HTTP_STATUS_OK, { foo: 'bar' });
return Api.updateProject(projectPath, { foo: 'bar' }).then(({ data }) => {
expect(data.foo).toBe('bar');
@@ -359,7 +362,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const projectPath = 'gitlab-org%2Fgitlab-ce';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/users`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -378,7 +381,7 @@ describe('Api', () => {
it('fetches all merge requests for a project', () => {
const mockData = [{ source_branch: 'foo' }, { source_branch: 'bar' }];
- mock.onGet(expectedUrl).reply(httpStatus.OK, mockData);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, mockData);
return Api.projectMergeRequests(projectPath).then(({ data }) => {
expect(data.length).toEqual(2);
expect(data[0].source_branch).toBe('foo');
@@ -391,7 +394,7 @@ describe('Api', () => {
source_branch: 'bar',
};
const mockData = [{ source_branch: 'bar' }];
- mock.onGet(expectedUrl, { params }).reply(httpStatus.OK, mockData);
+ mock.onGet(expectedUrl, { params }).reply(HTTP_STATUS_OK, mockData);
return Api.projectMergeRequests(projectPath, params).then(({ data }) => {
expect(data.length).toEqual(1);
@@ -405,7 +408,7 @@ describe('Api', () => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, {
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, {
title: 'test',
});
@@ -420,7 +423,7 @@ describe('Api', () => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/changes`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, {
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, {
title: 'test',
});
@@ -435,7 +438,7 @@ describe('Api', () => {
const projectPath = 'abc';
const mergeRequestId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/merge_requests/${mergeRequestId}/versions`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
id: 123,
},
@@ -454,7 +457,7 @@ describe('Api', () => {
const params = { scope: 'active' };
const mockData = [{ id: 4 }];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectPath}/runners`;
- mock.onGet(expectedUrl, { params }).reply(httpStatus.OK, mockData);
+ mock.onGet(expectedUrl, { params }).reply(HTTP_STATUS_OK, mockData);
return Api.projectRunners(projectPath, { params }).then(({ data }) => {
expect(data).toEqual(mockData);
@@ -561,7 +564,7 @@ describe('Api', () => {
expect(config.data).toBe(JSON.stringify(expectedData));
return [
- httpStatus.OK,
+ HTTP_STATUS_OK,
{
name: 'test',
},
@@ -584,7 +587,7 @@ describe('Api', () => {
expect(config.data).toBe(JSON.stringify({ color: labelData.color }));
return [
- httpStatus.OK,
+ HTTP_STATUS_OK,
{
...labelData,
},
@@ -605,7 +608,7 @@ describe('Api', () => {
const groupId = '123456';
const query = 'dummy query';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/groups/${groupId}/projects.json`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -660,7 +663,7 @@ describe('Api', () => {
)}/repository/commits/${sha}`;
it('fetches a single commit', () => {
- mock.onGet(expectedUrl).reply(httpStatus.OK, { id: sha });
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, { id: sha });
return Api.commit(projectId, sha).then(({ data: commit }) => {
expect(commit.id).toBe(sha);
@@ -668,7 +671,7 @@ describe('Api', () => {
});
it('fetches a single commit without stats', () => {
- mock.onGet(expectedUrl, { params: { stats: false } }).reply(httpStatus.OK, { id: sha });
+ mock.onGet(expectedUrl, { params: { stats: false } }).reply(HTTP_STATUS_OK, { id: sha });
return Api.commit(projectId, sha, { stats: false }).then(({ data: commit }) => {
expect(commit.id).toBe(sha);
@@ -686,7 +689,7 @@ describe('Api', () => {
)}`;
it('fetches an issue template', () => {
- mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, 'test');
return new Promise((resolve) => {
Api.issueTemplate(namespace, project, templateKey, templateType, (_, response) => {
@@ -698,7 +701,7 @@ describe('Api', () => {
describe('when an error occurs while fetching an issue template', () => {
it('rejects the Promise', () => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return new Promise((resolve) => {
Api.issueTemplate(namespace, project, templateKey, templateType, () => {
@@ -720,7 +723,7 @@ describe('Api', () => {
const expectedData = [
{ key: 'Template1', name: 'Template 1', content: 'This is template 1!' },
];
- mock.onGet(expectedUrl).reply(httpStatus.OK, expectedData);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, expectedData);
return new Promise((resolve) => {
Api.issueTemplates(namespace, project, templateType, (_, response) => {
@@ -736,7 +739,7 @@ describe('Api', () => {
describe('when an error occurs while fetching issue templates', () => {
it('rejects the Promise', () => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
Api.issueTemplates(namespace, project, templateType, () => {
expect(mock.history.get).toHaveLength(1);
@@ -749,7 +752,7 @@ describe('Api', () => {
it('fetches a list of templates', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/gitlab-org%2Fgitlab-ce/templates/licenses`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, 'test');
return new Promise((resolve) => {
Api.projectTemplates('gitlab-org/gitlab-ce', 'licenses', {}, (response) => {
@@ -765,7 +768,7 @@ describe('Api', () => {
const data = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/gitlab-org%2Fgitlab-ce/templates/licenses/test%20license`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, 'test');
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, 'test');
return new Promise((resolve) => {
Api.projectTemplate(
@@ -787,7 +790,7 @@ describe('Api', () => {
const query = 'dummy query';
const options = { unused: 'option' };
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users.json`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -804,7 +807,7 @@ describe('Api', () => {
it('fetches single user', () => {
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, {
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, {
name: 'testuser',
});
@@ -817,7 +820,7 @@ describe('Api', () => {
describe('user counts', () => {
it('fetches single user counts', () => {
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/user_counts`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, {
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, {
merge_requests: 4,
});
@@ -831,7 +834,7 @@ describe('Api', () => {
it('fetches single user status', () => {
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}/status`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, {
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, {
message: 'testmessage',
});
@@ -847,7 +850,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const userId = '123456';
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/users/${userId}/projects`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -868,7 +871,7 @@ describe('Api', () => {
const projectId = 'example/foobar';
const commitSha = 'abc123def';
const expectedUrl = `${dummyUrlRoot}/${projectId}/commit/${commitSha}/pipelines`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -893,7 +896,7 @@ describe('Api', () => {
name: 'test',
},
];
- mock.onGet(expectedUrl, { params }).reply(httpStatus.OK, payload);
+ mock.onGet(expectedUrl, { params }).reply(HTTP_STATUS_OK, payload);
const { data } = await Api.pipelineJobs(projectId, pipelineId, params);
expect(data).toEqual(payload);
@@ -912,7 +915,7 @@ describe('Api', () => {
jest.spyOn(axios, 'post');
- mock.onPost(expectedUrl).replyOnce(httpStatus.OK, {
+ mock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, {
name: branch,
});
@@ -932,7 +935,7 @@ describe('Api', () => {
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK, ['fork']);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, ['fork']);
return Api.projectForks(dummyProjectPath, { visibility: 'private' }).then(({ data }) => {
expect(data).toEqual(['fork']);
@@ -1021,7 +1024,7 @@ describe('Api', () => {
describe('when releases are successfully returned', () => {
it('resolves the Promise', () => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK);
return Api.releases(dummyProjectPath).then(() => {
expect(mock.history.get).toHaveLength(1);
@@ -1031,7 +1034,7 @@ describe('Api', () => {
describe('when an error occurs while fetching releases', () => {
it('rejects the Promise', () => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.releases(dummyProjectPath).catch(() => {
expect(mock.history.get).toHaveLength(1);
@@ -1045,7 +1048,7 @@ describe('Api', () => {
describe('when the release is successfully returned', () => {
it('resolves the Promise', () => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK);
return Api.release(dummyProjectPath, dummyTagName).then(() => {
expect(mock.history.get).toHaveLength(1);
@@ -1055,7 +1058,7 @@ describe('Api', () => {
describe('when an error occurs while fetching the release', () => {
it('rejects the Promise', () => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.release(dummyProjectPath, dummyTagName).catch(() => {
expect(mock.history.get).toHaveLength(1);
@@ -1083,7 +1086,7 @@ describe('Api', () => {
describe('when an error occurs while creating the release', () => {
it('rejects the Promise', () => {
- mock.onPost(expectedUrl, release).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onPost(expectedUrl, release).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.createRelease(dummyProjectPath, release).catch(() => {
expect(mock.history.post).toHaveLength(1);
@@ -1101,7 +1104,7 @@ describe('Api', () => {
describe('when the release is successfully updated', () => {
it('resolves the Promise', () => {
- mock.onPut(expectedUrl, release).replyOnce(httpStatus.OK);
+ mock.onPut(expectedUrl, release).replyOnce(HTTP_STATUS_OK);
return Api.updateRelease(dummyProjectPath, dummyTagName, release).then(() => {
expect(mock.history.put).toHaveLength(1);
@@ -1111,7 +1114,7 @@ describe('Api', () => {
describe('when an error occurs while updating the release', () => {
it('rejects the Promise', () => {
- mock.onPut(expectedUrl, release).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onPut(expectedUrl, release).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.updateRelease(dummyProjectPath, dummyTagName, release).catch(() => {
expect(mock.history.put).toHaveLength(1);
@@ -1139,7 +1142,7 @@ describe('Api', () => {
describe('when an error occurs while creating the Release', () => {
it('rejects the Promise', () => {
- mock.onPost(expectedUrl, expectedLink).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onPost(expectedUrl, expectedLink).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.createReleaseLink(dummyProjectPath, dummyTagName, expectedLink).catch(() => {
expect(mock.history.post).toHaveLength(1);
@@ -1154,7 +1157,7 @@ describe('Api', () => {
describe('when the Release is successfully deleted', () => {
it('resolves the Promise', () => {
- mock.onDelete(expectedUrl).replyOnce(httpStatus.OK);
+ mock.onDelete(expectedUrl).replyOnce(HTTP_STATUS_OK);
return Api.deleteReleaseLink(dummyProjectPath, dummyTagName, dummyLinkId).then(() => {
expect(mock.history.delete).toHaveLength(1);
@@ -1164,7 +1167,7 @@ describe('Api', () => {
describe('when an error occurs while deleting the Release', () => {
it('rejects the Promise', () => {
- mock.onDelete(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onDelete(expectedUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.deleteReleaseLink(dummyProjectPath, dummyTagName, dummyLinkId).catch(() => {
expect(mock.history.delete).toHaveLength(1);
@@ -1183,7 +1186,7 @@ describe('Api', () => {
describe('when the raw file is successfully fetched', () => {
beforeEach(() => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK);
});
it('resolves the Promise', () => {
@@ -1206,7 +1209,7 @@ describe('Api', () => {
describe('when an error occurs while getting a raw file', () => {
it('rejects the Promise', () => {
- mock.onPost(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onPost(expectedUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.getRawFile(dummyProjectPath, dummyFilePath).catch(() => {
expect(mock.history.get).toHaveLength(1);
@@ -1238,7 +1241,7 @@ describe('Api', () => {
describe('when an error occurs while getting a raw file', () => {
it('rejects the Promise', () => {
- mock.onPost(expectedUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onPost(expectedUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
return Api.createProjectMergeRequest(dummyProjectPath).catch(() => {
expect(mock.history.post).toHaveLength(1);
@@ -1253,7 +1256,7 @@ describe('Api', () => {
const issue = 1;
const expectedArray = [1, 2, 3];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/issues/${issue}`;
- mock.onPut(expectedUrl).reply(httpStatus.OK, { assigneeIds: expectedArray });
+ mock.onPut(expectedUrl).reply(HTTP_STATUS_OK, { assigneeIds: expectedArray });
return Api.updateIssue(projectId, issue, { assigneeIds: expectedArray }).then(({ data }) => {
expect(data.assigneeIds).toEqual(expectedArray);
@@ -1267,7 +1270,7 @@ describe('Api', () => {
const mergeRequest = 1;
const expectedArray = [1, 2, 3];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/merge_requests/${mergeRequest}`;
- mock.onPut(expectedUrl).reply(httpStatus.OK, { assigneeIds: expectedArray });
+ mock.onPut(expectedUrl).reply(HTTP_STATUS_OK, { assigneeIds: expectedArray });
return Api.updateMergeRequest(projectId, mergeRequest, { assigneeIds: expectedArray }).then(
({ data }) => {
@@ -1283,7 +1286,7 @@ describe('Api', () => {
const options = { unused: 'option' };
const projectId = 8;
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/repository/tags`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [
{
name: 'test',
},
@@ -1308,7 +1311,7 @@ describe('Api', () => {
updated_at: '2020-07-10T05:10:35.122Z',
};
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/freeze_periods`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, [freezePeriod]);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, [freezePeriod]);
return Api.freezePeriods(projectId).then(({ data }) => {
expect(data[0]).toStrictEqual(freezePeriod);
@@ -1368,7 +1371,7 @@ describe('Api', () => {
describe('when the freeze period is successfully updated', () => {
it('resolves the Promise', () => {
- mock.onPut(expectedUrl, options).replyOnce(httpStatus.OK, expectedResult);
+ mock.onPut(expectedUrl, options).replyOnce(HTTP_STATUS_OK, expectedResult);
return Api.updateFreezePeriod(projectId, options).then(({ data }) => {
expect(data).toStrictEqual(expectedResult);
@@ -1392,7 +1395,7 @@ describe('Api', () => {
jest.spyOn(axios, 'post');
- mock.onPost(expectedUrl).replyOnce(httpStatus.OK, {
+ mock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, {
web_url: redirectUrl,
});
@@ -1423,7 +1426,7 @@ describe('Api', () => {
it('returns null', () => {
jest.spyOn(axios, 'post');
- mock.onPost(expectedUrl).replyOnce(httpStatus.OK, true);
+ mock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, true);
expect(axios.post).toHaveBeenCalledTimes(0);
expect(Api.trackRedisCounterEvent(event)).toEqual(null);
@@ -1437,7 +1440,7 @@ describe('Api', () => {
it('resolves the Promise', () => {
jest.spyOn(axios, 'post');
- mock.onPost(expectedUrl, { event }).replyOnce(httpStatus.OK, true);
+ mock.onPost(expectedUrl, { event }).replyOnce(HTTP_STATUS_OK, true);
return Api.trackRedisCounterEvent(event).then(({ data }) => {
expect(data).toEqual(true);
@@ -1483,7 +1486,7 @@ describe('Api', () => {
it('resolves the Promise', () => {
jest.spyOn(axios, 'post');
- mock.onPost(expectedUrl, { event }).replyOnce(httpStatus.OK, true);
+ mock.onPost(expectedUrl, { event }).replyOnce(HTTP_STATUS_OK, true);
return Api.trackRedisHllUserEvent(event).then(({ data }) => {
expect(data).toEqual(true);
@@ -1544,7 +1547,7 @@ describe('Api', () => {
];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/deploy_keys`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, deployKeys);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, deployKeys);
const params = { page: 2, public: true };
const { data } = await Api.deployKeys(params);
@@ -1569,7 +1572,7 @@ describe('Api', () => {
];
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/secure_files`;
- mock.onGet(expectedUrl).reply(httpStatus.OK, secureFiles);
+ mock.onGet(expectedUrl).reply(HTTP_STATUS_OK, secureFiles);
const { data } = await Api.projectSecureFiles(projectId, {});
expect(data).toEqual(secureFiles);
@@ -1589,7 +1592,7 @@ describe('Api', () => {
};
const expectedUrl = `${dummyUrlRoot}/api/${dummyApiVersion}/projects/${projectId}/secure_files`;
- mock.onPost(expectedUrl).reply(httpStatus.OK, secureFile);
+ mock.onPost(expectedUrl).reply(HTTP_STATUS_OK, secureFile);
const { data } = await Api.uploadProjectSecureFile(projectId, 'some data');
expect(data).toEqual(secureFile);
@@ -1639,7 +1642,7 @@ describe('Api', () => {
describe('fetchFeatureFlagUserLists', () => {
it('GETs the right url', () => {
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK, []);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, []);
return Api.fetchFeatureFlagUserLists(projectId).then(({ data }) => {
expect(data).toEqual([]);
@@ -1649,7 +1652,7 @@ describe('Api', () => {
describe('searchFeatureFlagUserLists', () => {
it('GETs the right url', () => {
- mock.onGet(expectedUrl, { params: { search: 'test' } }).replyOnce(httpStatus.OK, []);
+ mock.onGet(expectedUrl, { params: { search: 'test' } }).replyOnce(HTTP_STATUS_OK, []);
return Api.searchFeatureFlagUserLists(projectId, 'test').then(({ data }) => {
expect(data).toEqual([]);
@@ -1663,7 +1666,7 @@ describe('Api', () => {
name: 'mock_user_list',
user_xids: '1,2,3,4',
};
- mock.onPost(expectedUrl, mockUserListData).replyOnce(httpStatus.OK, mockUserList);
+ mock.onPost(expectedUrl, mockUserListData).replyOnce(HTTP_STATUS_OK, mockUserList);
return Api.createFeatureFlagUserList(projectId, mockUserListData).then(({ data }) => {
expect(data).toEqual(mockUserList);
@@ -1673,7 +1676,7 @@ describe('Api', () => {
describe('fetchFeatureFlagUserList', () => {
it('GETs the right url', () => {
- mock.onGet(`${expectedUrl}/1`).replyOnce(httpStatus.OK, mockUserList);
+ mock.onGet(`${expectedUrl}/1`).replyOnce(HTTP_STATUS_OK, mockUserList);
return Api.fetchFeatureFlagUserList(projectId, 1).then(({ data }) => {
expect(data).toEqual(mockUserList);
@@ -1685,7 +1688,7 @@ describe('Api', () => {
it('PUTs the right url', () => {
mock
.onPut(`${expectedUrl}/1`)
- .replyOnce(httpStatus.OK, { ...mockUserList, user_xids: '5' });
+ .replyOnce(HTTP_STATUS_OK, { ...mockUserList, user_xids: '5' });
return Api.updateFeatureFlagUserList(projectId, {
...mockUserList,
@@ -1698,7 +1701,7 @@ describe('Api', () => {
describe('deleteFeatureFlagUserList', () => {
it('DELETEs the right url', () => {
- mock.onDelete(`${expectedUrl}/1`).replyOnce(httpStatus.OK, 'deleted');
+ mock.onDelete(`${expectedUrl}/1`).replyOnce(HTTP_STATUS_OK, 'deleted');
return Api.deleteFeatureFlagUserList(projectId, 1).then(({ data }) => {
expect(data).toBe('deleted');
@@ -1715,12 +1718,12 @@ describe('Api', () => {
it('returns 404 for non-existing branch', () => {
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(httpStatus.NOT_FOUND, {
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_NOT_FOUND, {
message: '404 Not found',
});
return Api.projectProtectedBranch(dummyProjectId, branchName).catch((error) => {
- expect(error.response.status).toBe(httpStatus.NOT_FOUND);
+ expect(error.response.status).toBe(HTTP_STATUS_NOT_FOUND);
expect(axios.get).toHaveBeenCalledWith(expectedUrl);
});
});
@@ -1730,7 +1733,7 @@ describe('Api', () => {
jest.spyOn(axios, 'get');
- mock.onGet(expectedUrl).replyOnce(httpStatus.OK, expectedObj);
+ mock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, expectedObj);
return Api.projectProtectedBranch(dummyProjectId, branchName).then((data) => {
expect(data).toEqual(expectedObj);
diff --git a/spec/frontend/artifacts/components/artifact_row_spec.js b/spec/frontend/artifacts/components/artifact_row_spec.js
index dcc0d684f13..2a7156bf480 100644
--- a/spec/frontend/artifacts/components/artifact_row_spec.js
+++ b/spec/frontend/artifacts/components/artifact_row_spec.js
@@ -16,13 +16,14 @@ describe('ArtifactRow component', () => {
const findDownloadButton = () => wrapper.findByTestId('job-artifact-row-download-button');
const findDeleteButton = () => wrapper.findByTestId('job-artifact-row-delete-button');
- const createComponent = (mountFn = shallowMountExtended) => {
- wrapper = mountFn(ArtifactRow, {
+ const createComponent = ({ canDestroyArtifacts = true } = {}) => {
+ wrapper = shallowMountExtended(ArtifactRow, {
propsData: {
artifact,
isLoading: false,
isLastRow: false,
},
+ provide: { canDestroyArtifacts },
stubs: { GlBadge, GlButton, GlFriendlyWrap },
});
};
@@ -50,12 +51,24 @@ describe('ArtifactRow component', () => {
it('displays the download button as a link to the download path', () => {
expect(findDownloadButton().attributes('href')).toBe(artifact.downloadPath);
});
+ });
+
+ describe('delete button', () => {
+ it('does not show when user does not have permission', () => {
+ createComponent({ canDestroyArtifacts: false });
+
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+
+ it('shows when user has permission', () => {
+ createComponent();
- it('displays the delete button', () => {
expect(findDeleteButton().exists()).toBe(true);
});
- it('emits the delete event when the delete button is clicked', async () => {
+ it('emits the delete event when clicked', async () => {
+ createComponent();
+
expect(wrapper.emitted('delete')).toBeUndefined();
findDeleteButton().trigger('click');
diff --git a/spec/frontend/artifacts/components/artifacts_table_row_details_spec.js b/spec/frontend/artifacts/components/artifacts_table_row_details_spec.js
index c6ad13462f9..d006e0285d2 100644
--- a/spec/frontend/artifacts/components/artifacts_table_row_details_spec.js
+++ b/spec/frontend/artifacts/components/artifacts_table_row_details_spec.js
@@ -40,6 +40,7 @@ describe('ArtifactsTableRowDetails component', () => {
refetchArtifacts,
queryVariables: {},
},
+ provide: { canDestroyArtifacts: true },
data() {
return { deletingArtifactId: null };
},
diff --git a/spec/frontend/artifacts/components/feedback_banner_spec.js b/spec/frontend/artifacts/components/feedback_banner_spec.js
new file mode 100644
index 00000000000..3421486020a
--- /dev/null
+++ b/spec/frontend/artifacts/components/feedback_banner_spec.js
@@ -0,0 +1,63 @@
+import { GlBanner } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import FeedbackBanner from '~/artifacts/components/feedback_banner.vue';
+import { makeMockUserCalloutDismisser } from 'helpers/mock_user_callout_dismisser';
+import {
+ I18N_FEEDBACK_BANNER_TITLE,
+ I18N_FEEDBACK_BANNER_BUTTON,
+ FEEDBACK_URL,
+} from '~/artifacts/constants';
+
+const mockBannerImagePath = 'banner/image/path';
+
+describe('Artifacts management feedback banner', () => {
+ let wrapper;
+ let userCalloutDismissSpy;
+
+ const findBanner = () => wrapper.findComponent(GlBanner);
+
+ const createComponent = ({ shouldShowCallout = true } = {}) => {
+ userCalloutDismissSpy = jest.fn();
+
+ wrapper = shallowMount(FeedbackBanner, {
+ provide: {
+ artifactsManagementFeedbackImagePath: mockBannerImagePath,
+ },
+ stubs: {
+ UserCalloutDismisser: makeMockUserCalloutDismisser({
+ dismiss: userCalloutDismissSpy,
+ shouldShowCallout,
+ }),
+ },
+ });
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('is displayed with the correct props', () => {
+ createComponent();
+
+ expect(findBanner().props()).toMatchObject({
+ title: I18N_FEEDBACK_BANNER_TITLE,
+ buttonText: I18N_FEEDBACK_BANNER_BUTTON,
+ buttonLink: FEEDBACK_URL,
+ svgPath: mockBannerImagePath,
+ });
+ });
+
+ it('dismisses the callout when closed', () => {
+ createComponent();
+
+ findBanner().vm.$emit('close');
+
+ expect(userCalloutDismissSpy).toHaveBeenCalled();
+ });
+
+ it('is not displayed once it has been dismissed', () => {
+ createComponent({ shouldShowCallout: false });
+
+ expect(findBanner().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/artifacts/components/job_artifacts_table_spec.js b/spec/frontend/artifacts/components/job_artifacts_table_spec.js
index 131b4b99bb2..dbe4598f599 100644
--- a/spec/frontend/artifacts/components/job_artifacts_table_spec.js
+++ b/spec/frontend/artifacts/components/job_artifacts_table_spec.js
@@ -5,6 +5,7 @@ import getJobArtifactsResponse from 'test_fixtures/graphql/artifacts/graphql/que
import CiIcon from '~/vue_shared/components/ci_icon.vue';
import waitForPromises from 'helpers/wait_for_promises';
import JobArtifactsTable from '~/artifacts/components/job_artifacts_table.vue';
+import FeedbackBanner from '~/artifacts/components/feedback_banner.vue';
import ArtifactsTableRowDetails from '~/artifacts/components/artifacts_table_row_details.vue';
import ArtifactDeleteModal from '~/artifacts/components/artifact_delete_modal.vue';
import createMockApollo from 'helpers/mock_apollo_helper';
@@ -23,6 +24,8 @@ describe('JobArtifactsTable component', () => {
let wrapper;
let requestHandlers;
+ const findBanner = () => wrapper.findComponent(FeedbackBanner);
+
const findLoadingState = () => wrapper.findComponent(GlLoadingIcon);
const findTable = () => wrapper.findComponent(GlTable);
const findDetailsRows = () => wrapper.findAllComponents(ArtifactsTableRowDetails);
@@ -79,13 +82,18 @@ describe('JobArtifactsTable component', () => {
getJobArtifactsQuery: jest.fn().mockResolvedValue(getJobArtifactsResponse),
},
data = {},
+ canDestroyArtifacts = true,
) => {
requestHandlers = handlers;
wrapper = mountExtended(JobArtifactsTable, {
apolloProvider: createMockApollo([
[getJobArtifactsQuery, requestHandlers.getJobArtifactsQuery],
]),
- provide: { projectPath: 'project/path' },
+ provide: {
+ projectPath: 'project/path',
+ canDestroyArtifacts,
+ artifactsManagementFeedbackImagePath: 'banner/image/path',
+ },
data() {
return data;
},
@@ -96,6 +104,12 @@ describe('JobArtifactsTable component', () => {
wrapper.destroy();
});
+ it('renders feedback banner', () => {
+ createComponent();
+
+ expect(findBanner().exists()).toBe(true);
+ });
+
it('when loading, shows a loading state', () => {
createComponent();
@@ -283,6 +297,14 @@ describe('JobArtifactsTable component', () => {
});
describe('delete button', () => {
+ it('does not show when user does not have permission', async () => {
+ createComponent({}, {}, false);
+
+ await waitForPromises();
+
+ expect(findDeleteButton().exists()).toBe(false);
+ });
+
it('shows a disabled delete button for now (coming soon)', async () => {
createComponent();
diff --git a/spec/frontend/autosave_spec.js b/spec/frontend/autosave_spec.js
index 7a9262cd004..88460221168 100644
--- a/spec/frontend/autosave_spec.js
+++ b/spec/frontend/autosave_spec.js
@@ -1,4 +1,3 @@
-import $ from 'jquery';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import Autosave from '~/autosave';
import AccessorUtilities from '~/lib/utils/accessor';
@@ -7,12 +6,19 @@ describe('Autosave', () => {
useLocalStorageSpy();
let autosave;
- const field = $('<textarea></textarea>');
- const checkbox = $('<input type="checkbox">');
+ const field = document.createElement('textarea');
+ const checkbox = document.createElement('input');
+ checkbox.type = 'checkbox';
const key = 'key';
const fallbackKey = 'fallbackKey';
const lockVersionKey = 'lockVersionKey';
const lockVersion = 1;
+ const getAutosaveKey = () => `autosave/${key}`;
+ const getAutosaveLockKey = () => `autosave/${key}/lockVersion`;
+
+ afterEach(() => {
+ autosave?.dispose?.();
+ });
describe('class constructor', () => {
beforeEach(() => {
@@ -43,18 +49,10 @@ describe('Autosave', () => {
});
describe('restore', () => {
- beforeEach(() => {
- autosave = {
- field,
- key,
- };
- });
-
describe('if .isLocalStorageAvailable is `false`', () => {
beforeEach(() => {
- autosave.isLocalStorageAvailable = false;
-
- Autosave.prototype.restore.call(autosave);
+ jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
+ autosave = new Autosave(field, key);
});
it('should not call .getItem', () => {
@@ -63,97 +61,73 @@ describe('Autosave', () => {
});
describe('if .isLocalStorageAvailable is `true`', () => {
- beforeEach(() => {
- autosave.isLocalStorageAvailable = true;
- });
-
it('should call .getItem', () => {
- Autosave.prototype.restore.call(autosave);
-
- expect(window.localStorage.getItem).toHaveBeenCalledWith(key);
+ autosave = new Autosave(field, key);
+ expect(window.localStorage.getItem.mock.calls).toEqual([[getAutosaveKey()], []]);
});
- it('triggers jquery event', () => {
- jest.spyOn(autosave.field, 'trigger').mockImplementation(() => {});
-
- Autosave.prototype.restore.call(autosave);
-
- expect(field.trigger).toHaveBeenCalled();
- });
-
- it('triggers native event', () => {
- const fieldElement = autosave.field.get(0);
- const eventHandler = jest.fn();
- fieldElement.addEventListener('change', eventHandler);
-
- Autosave.prototype.restore.call(autosave);
+ describe('if saved value is present', () => {
+ const storedValue = 'bar';
- expect(eventHandler).toHaveBeenCalledTimes(1);
- fieldElement.removeEventListener('change', eventHandler);
- });
-
- describe('if field type is checkbox', () => {
beforeEach(() => {
- autosave = {
- field: checkbox,
- key,
- isLocalStorageAvailable: true,
- type: 'checkbox',
- };
+ field.value = 'foo';
+ window.localStorage.setItem(getAutosaveKey(), storedValue);
});
- it('should restore', () => {
- window.localStorage.setItem(key, true);
- expect(checkbox.is(':checked')).toBe(false);
- Autosave.prototype.restore.call(autosave);
- expect(checkbox.is(':checked')).toBe(true);
+ it('restores the value', () => {
+ autosave = new Autosave(field, key);
+ expect(field.value).toEqual(storedValue);
});
- });
- });
- describe('if field gets deleted from DOM', () => {
- beforeEach(() => {
- autosave.field = $('.not-a-real-element');
- });
+ it('triggers native event', () => {
+ const eventHandler = jest.fn();
+ field.addEventListener('change', eventHandler);
+ autosave = new Autosave(field, key);
- it('does not trigger event', () => {
- jest.spyOn(field, 'trigger');
+ expect(eventHandler).toHaveBeenCalledTimes(1);
+ field.removeEventListener('change', eventHandler);
+ });
+
+ describe('if field type is checkbox', () => {
+ beforeEach(() => {
+ checkbox.checked = false;
+ window.localStorage.setItem(getAutosaveKey(), true);
+ autosave = new Autosave(checkbox, key);
+ });
- expect(field.trigger).not.toHaveBeenCalled();
+ it('should restore', () => {
+ expect(checkbox.checked).toBe(true);
+ });
+ });
});
});
});
describe('getSavedLockVersion', () => {
- beforeEach(() => {
- autosave = {
- field,
- key,
- lockVersionKey,
- };
- });
-
describe('if .isLocalStorageAvailable is `false`', () => {
beforeEach(() => {
- autosave.isLocalStorageAvailable = false;
-
- Autosave.prototype.getSavedLockVersion.call(autosave);
+ jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
+ autosave = new Autosave(field, key);
});
it('should not call .getItem', () => {
+ autosave.getSavedLockVersion();
expect(window.localStorage.getItem).not.toHaveBeenCalled();
});
});
describe('if .isLocalStorageAvailable is `true`', () => {
beforeEach(() => {
- autosave.isLocalStorageAvailable = true;
+ autosave = new Autosave(field, key);
});
it('should call .getItem', () => {
- Autosave.prototype.getSavedLockVersion.call(autosave);
-
- expect(window.localStorage.getItem).toHaveBeenCalledWith(lockVersionKey);
+ autosave.getSavedLockVersion();
+ expect(window.localStorage.getItem.mock.calls).toEqual([
+ [getAutosaveKey()],
+ [],
+ [getAutosaveLockKey()],
+ ]);
});
});
});
@@ -162,7 +136,7 @@ describe('Autosave', () => {
beforeEach(() => {
autosave = { reset: jest.fn() };
autosave.field = field;
- field.val('value');
+ field.value = 'value';
});
describe('if .isLocalStorageAvailable is `false`', () => {
@@ -200,14 +174,14 @@ describe('Autosave', () => {
});
it('should save true when checkbox on', () => {
- checkbox.prop('checked', true);
+ checkbox.checked = true;
Autosave.prototype.save.call(autosave);
expect(window.localStorage.setItem).toHaveBeenCalledWith(key, true);
});
it('should call reset when checkbox off', () => {
autosave.reset = jest.fn();
- checkbox.prop('checked', false);
+ checkbox.checked = false;
Autosave.prototype.save.call(autosave);
expect(autosave.reset).toHaveBeenCalled();
expect(window.localStorage.setItem).not.toHaveBeenCalled();
diff --git a/spec/frontend/batch_comments/components/submit_dropdown_spec.js b/spec/frontend/batch_comments/components/submit_dropdown_spec.js
index 462ef7e7280..003a6d86371 100644
--- a/spec/frontend/batch_comments/components/submit_dropdown_spec.js
+++ b/spec/frontend/batch_comments/components/submit_dropdown_spec.js
@@ -3,6 +3,8 @@ import Vuex from 'vuex';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import SubmitDropdown from '~/batch_comments/components/submit_dropdown.vue';
+jest.mock('~/autosave');
+
Vue.use(Vuex);
let wrapper;
diff --git a/spec/frontend/behaviors/markdown/render_gfm_spec.js b/spec/frontend/behaviors/markdown/render_gfm_spec.js
new file mode 100644
index 00000000000..0bbb92282e5
--- /dev/null
+++ b/spec/frontend/behaviors/markdown/render_gfm_spec.js
@@ -0,0 +1,9 @@
+import { renderGFM } from '~/behaviors/markdown/render_gfm';
+
+describe('renderGFM', () => {
+ it('handles a missing element', () => {
+ expect(() => {
+ renderGFM();
+ }).not.toThrow();
+ });
+});
diff --git a/spec/frontend/boards/board_card_inner_spec.js b/spec/frontend/boards/board_card_inner_spec.js
index d05e057095d..2c8e6306431 100644
--- a/spec/frontend/boards/board_card_inner_spec.js
+++ b/spec/frontend/boards/board_card_inner_spec.js
@@ -1,7 +1,7 @@
import { GlLabel, GlLoadingIcon, GlTooltip } from '@gitlab/ui';
import { range } from 'lodash';
+import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
-import { nextTick } from 'vue';
import setWindowLocation from 'helpers/set_window_location_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { mountExtended } from 'helpers/vue_test_utils_helper';
@@ -17,6 +17,8 @@ import { mockLabelList, mockIssue, mockIssueFullPath } from './mock_data';
jest.mock('~/lib/utils/url_utility');
jest.mock('~/boards/eventhub');
+Vue.use(Vuex);
+
describe('Board card component', () => {
const user = {
id: 1,
@@ -52,25 +54,19 @@ describe('Board card component', () => {
const performSearchMock = jest.fn();
- const createStore = ({ isProjectBoard = false } = {}) => {
+ const createStore = () => {
store = new Vuex.Store({
- ...defaultStore,
actions: {
performSearch: performSearchMock,
},
state: {
...defaultStore.state,
- issuableType: issuableTypes.issue,
isShowingLabels: true,
},
- getters: {
- isGroupBoard: () => true,
- isProjectBoard: () => isProjectBoard,
- },
});
};
- const createWrapper = ({ props = {}, isEpicBoard = false } = {}) => {
+ const createWrapper = ({ props = {}, isEpicBoard = false, isGroupBoard = true } = {}) => {
wrapper = mountExtended(BoardCardInner, {
store,
propsData: {
@@ -97,6 +93,8 @@ describe('Board card component', () => {
rootPath: '/',
scopedLabelsAvailable: false,
isEpicBoard,
+ issuableType: issuableTypes.issue,
+ isGroupBoard,
},
});
};
@@ -164,8 +162,8 @@ describe('Board card component', () => {
});
it('does not render item reference path', () => {
- createStore({ isProjectBoard: true });
- createWrapper();
+ createStore();
+ createWrapper({ isGroupBoard: false });
expect(wrapper.find('.board-card-number').text()).not.toContain(mockIssueFullPath);
});
diff --git a/spec/frontend/boards/board_list_helper.js b/spec/frontend/boards/board_list_helper.js
index c5c3faf1712..1ba546f24a8 100644
--- a/spec/frontend/boards/board_list_helper.js
+++ b/spec/frontend/boards/board_list_helper.js
@@ -58,8 +58,6 @@ export default function createComponent({
...state,
},
getters: {
- isGroupBoard: () => false,
- isProjectBoard: () => true,
isEpicBoard: () => false,
...getters,
},
@@ -88,7 +86,6 @@ export default function createComponent({
apolloProvider: fakeApollo,
store,
propsData: {
- disabled: false,
list,
boardItems: [issue],
canAdminList: true,
@@ -97,12 +94,16 @@ export default function createComponent({
provide: {
groupId: null,
rootPath: '/',
+ fullPath: 'gitlab-org',
boardId: '1',
weightFeatureAvailable: false,
boardWeight: null,
canAdminList: true,
isIssueBoard: true,
isEpicBoard: false,
+ isGroupBoard: false,
+ isProjectBoard: true,
+ disabled: false,
...provide,
},
stubs,
diff --git a/spec/frontend/boards/board_list_spec.js b/spec/frontend/boards/board_list_spec.js
index 34c0504143c..abe8c230bd8 100644
--- a/spec/frontend/boards/board_list_spec.js
+++ b/spec/frontend/boards/board_list_spec.js
@@ -267,7 +267,7 @@ describe('Board list component', () => {
describe('when dragging is not allowed', () => {
beforeEach(() => {
wrapper = createComponent({
- componentProps: {
+ provide: {
disabled: true,
},
});
diff --git a/spec/frontend/boards/components/board_app_spec.js b/spec/frontend/boards/components/board_app_spec.js
index c209f2f82e6..872a67a71fb 100644
--- a/spec/frontend/boards/components/board_app_spec.js
+++ b/spec/frontend/boards/components/board_app_spec.js
@@ -23,11 +23,10 @@ describe('BoardApp', () => {
});
};
- const createComponent = ({ provide = { disabled: true } } = {}) => {
+ const createComponent = () => {
wrapper = shallowMount(BoardApp, {
store,
provide: {
- ...provide,
fullBoardId: 'gid://gitlab/Board/1',
},
});
diff --git a/spec/frontend/boards/components/board_card_spec.js b/spec/frontend/boards/components/board_card_spec.js
index 38b79e2e3f3..f8ad7c468c1 100644
--- a/spec/frontend/boards/components/board_card_spec.js
+++ b/spec/frontend/boards/components/board_card_spec.js
@@ -29,9 +29,6 @@ describe('Board card', () => {
...initialState,
},
actions: mockActions,
- getters: {
- isProjectBoard: () => false,
- },
});
};
@@ -52,7 +49,6 @@ describe('Board card', () => {
propsData: {
list: mockLabelList,
item,
- disabled: false,
index: 0,
...propsData,
},
@@ -61,6 +57,10 @@ describe('Board card', () => {
rootPath: '/',
scopedLabelsAvailable: false,
isEpicBoard: false,
+ issuableType: 'issue',
+ isProjectBoard: false,
+ isGroupBoard: true,
+ disabled: false,
...provide,
},
});
diff --git a/spec/frontend/boards/components/board_column_spec.js b/spec/frontend/boards/components/board_column_spec.js
index c13f7caba76..d34e228a2d7 100644
--- a/spec/frontend/boards/components/board_column_spec.js
+++ b/spec/frontend/boards/components/board_column_spec.js
@@ -34,7 +34,6 @@ describe('Board Column Component', () => {
wrapper = shallowMount(BoardColumn, {
store,
propsData: {
- disabled: false,
list: listMock,
},
});
diff --git a/spec/frontend/boards/components/board_content_sidebar_spec.js b/spec/frontend/boards/components/board_content_sidebar_spec.js
index 0d5b1d16e30..51c42b48535 100644
--- a/spec/frontend/boards/components/board_content_sidebar_spec.js
+++ b/spec/frontend/boards/components/board_content_sidebar_spec.js
@@ -7,7 +7,7 @@ import SidebarDropdownWidget from 'ee_else_ce/sidebar/components/sidebar_dropdow
import { stubComponent } from 'helpers/stub_component';
import BoardContentSidebar from '~/boards/components/board_content_sidebar.vue';
import BoardSidebarTitle from '~/boards/components/sidebar/board_sidebar_title.vue';
-import { ISSUABLE } from '~/boards/constants';
+import { ISSUABLE, issuableTypes } from '~/boards/constants';
import SidebarDateWidget from '~/sidebar/components/date/sidebar_date_widget.vue';
import SidebarSeverity from '~/sidebar/components/severity/sidebar_severity.vue';
import SidebarSubscriptionsWidget from '~/sidebar/components/subscriptions/sidebar_subscriptions_widget.vue';
@@ -26,7 +26,6 @@ describe('BoardContentSidebar', () => {
sidebarType: ISSUABLE,
issues: { [mockIssue.id]: { ...mockIssue, epic: null } },
activeId: mockIssue.id,
- issuableType: 'issue',
},
getters: {
activeBoardItem: () => {
@@ -35,7 +34,6 @@ describe('BoardContentSidebar', () => {
groupPathForActiveIssue: () => mockIssueGroupPath,
projectPathForActiveIssue: () => mockIssueProjectPath,
isSidebarOpen: () => true,
- isGroupBoard: () => false,
...mockGetters,
},
actions: mockActions,
@@ -55,6 +53,8 @@ describe('BoardContentSidebar', () => {
canUpdate: true,
rootPath: '/',
groupId: 1,
+ issuableType: issuableTypes.issue,
+ isGroupBoard: false,
},
store,
stubs: {
diff --git a/spec/frontend/boards/components/board_content_spec.js b/spec/frontend/boards/components/board_content_spec.js
index 82e7ab48e7d..97596c86198 100644
--- a/spec/frontend/boards/components/board_content_spec.js
+++ b/spec/frontend/boards/components/board_content_spec.js
@@ -60,7 +60,6 @@ describe('BoardContent', () => {
wrapper = shallowMount(BoardContent, {
apolloProvider: fakeApollo,
propsData: {
- disabled: false,
boardId: 'gid://gitlab/Board/1',
...props,
},
@@ -71,6 +70,8 @@ describe('BoardContent', () => {
issuableType,
isIssueBoard,
isEpicBoard,
+ isGroupBoard: true,
+ disabled: false,
isApolloBoard,
},
store,
diff --git a/spec/frontend/boards/components/board_filtered_search_spec.js b/spec/frontend/boards/components/board_filtered_search_spec.js
index e80c66f7fb8..4c0cc36889c 100644
--- a/spec/frontend/boards/components/board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/board_filtered_search_spec.js
@@ -139,6 +139,7 @@ describe('BoardFilteredSearch', () => {
{ type: TOKEN_TYPE_ITERATION, value: { data: 'Any&3', operator: '=' } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v1.0.0', operator: '=' } },
{ type: TOKEN_TYPE_HEALTH, value: { data: 'onTrack', operator: '=' } },
+ { type: TOKEN_TYPE_HEALTH, value: { data: 'atRisk', operator: '!=' } },
];
jest.spyOn(urlUtility, 'updateHistory');
findFilteredSearch().vm.$emit('onFilter', mockFilters);
@@ -147,7 +148,7 @@ describe('BoardFilteredSearch', () => {
title: '',
replace: true,
url:
- 'http://test.host/?author_username=root&label_name[]=label&label_name[]=label%262&assignee_username=root&milestone_title=New%20Milestone&iteration_id=Any&iteration_cadence_id=3&types=INCIDENT&weight=2&release_tag=v1.0.0&health_status=onTrack',
+ 'http://test.host/?not[health_status]=atRisk&author_username=root&label_name[]=label&label_name[]=label%262&assignee_username=root&milestone_title=New%20Milestone&iteration_id=Any&iteration_cadence_id=3&types=INCIDENT&weight=2&release_tag=v1.0.0&health_status=onTrack',
});
});
diff --git a/spec/frontend/boards/components/board_form_spec.js b/spec/frontend/boards/components/board_form_spec.js
index fdc16b46167..f8154145d43 100644
--- a/spec/frontend/boards/components/board_form_spec.js
+++ b/spec/frontend/boards/components/board_form_spec.js
@@ -53,10 +53,6 @@ describe('BoardForm', () => {
const setErrorMock = jest.fn();
const store = new Vuex.Store({
- getters: {
- isGroupBoard: () => true,
- isProjectBoard: () => false,
- },
actions: {
setBoard: setBoardMock,
setError: setErrorMock,
@@ -73,6 +69,8 @@ describe('BoardForm', () => {
},
provide: {
boardBaseUrl: 'root',
+ isGroupBoard: true,
+ isProjectBoard: false,
},
mocks: {
$apollo: {
diff --git a/spec/frontend/boards/components/board_list_header_spec.js b/spec/frontend/boards/components/board_list_header_spec.js
index 4633612891c..a16b99728c3 100644
--- a/spec/frontend/boards/components/board_list_header_spec.js
+++ b/spec/frontend/boards/components/board_list_header_spec.js
@@ -68,7 +68,6 @@ describe('Board List Header Component', () => {
apolloProvider: fakeApollo,
store,
propsData: {
- disabled: false,
list: listMock,
},
provide: {
@@ -76,6 +75,7 @@ describe('Board List Header Component', () => {
weightFeatureAvailable: false,
currentUserId,
isEpicBoard: false,
+ disabled: false,
},
}),
);
diff --git a/spec/frontend/boards/components/board_new_issue_spec.js b/spec/frontend/boards/components/board_new_issue_spec.js
index f097f42476a..c3e69ba0e40 100644
--- a/spec/frontend/boards/components/board_new_issue_spec.js
+++ b/spec/frontend/boards/components/board_new_issue_spec.js
@@ -14,9 +14,10 @@ const addListNewIssuesSpy = jest.fn().mockResolvedValue();
const mockActions = { addListNewIssue: addListNewIssuesSpy };
const createComponent = ({
- state = { selectedProject: mockGroupProjects[0], fullPath: mockGroupProjects[0].fullPath },
+ state = { selectedProject: mockGroupProjects[0] },
actions = mockActions,
- getters = { isGroupBoard: () => true, getBoardItemsByList: () => () => [] },
+ getters = { getBoardItemsByList: () => () => [] },
+ isGroupBoard = true,
} = {}) =>
shallowMount(BoardNewIssue, {
store: new Vuex.Store({
@@ -29,8 +30,10 @@ const createComponent = ({
},
provide: {
groupId: 1,
+ fullPath: mockGroupProjects[0].fullPath,
weightFeatureAvailable: false,
boardWeight: null,
+ isGroupBoard,
},
stubs: {
BoardNewItem,
@@ -84,9 +87,9 @@ describe('Issue boards new issue form', () => {
beforeEach(() => {
wrapper = createComponent({
getters: {
- isGroupBoard: () => true,
getBoardItemsByList: () => () => [mockIssue, mockIssue2],
},
+ isGroupBoard: true,
});
});
@@ -128,7 +131,7 @@ describe('Issue boards new issue form', () => {
describe('when in project issue board', () => {
beforeEach(() => {
wrapper = createComponent({
- getters: { isGroupBoard: () => false },
+ isGroupBoard: false,
});
});
diff --git a/spec/frontend/boards/components/board_top_bar_spec.js b/spec/frontend/boards/components/board_top_bar_spec.js
index 08b5042f70f..af492145eb0 100644
--- a/spec/frontend/boards/components/board_top_bar_spec.js
+++ b/spec/frontend/boards/components/board_top_bar_spec.js
@@ -33,6 +33,7 @@ describe('BoardTopBar', () => {
boardType: 'group',
releasesFetchPath: '/releases',
isIssueBoard: true,
+ isGroupBoard: true,
...provide,
},
stubs: { IssueBoardFilteredSearch },
diff --git a/spec/frontend/boards/components/boards_selector_spec.js b/spec/frontend/boards/components/boards_selector_spec.js
index f3be66db36f..7b61ca5e6fd 100644
--- a/spec/frontend/boards/components/boards_selector_spec.js
+++ b/spec/frontend/boards/components/boards_selector_spec.js
@@ -10,7 +10,6 @@ import groupBoardsQuery from '~/boards/graphql/group_boards.query.graphql';
import projectBoardsQuery from '~/boards/graphql/project_boards.query.graphql';
import groupRecentBoardsQuery from '~/boards/graphql/group_recent_boards.query.graphql';
import projectRecentBoardsQuery from '~/boards/graphql/project_recent_boards.query.graphql';
-import defaultStore from '~/boards/stores';
import createMockApollo from 'helpers/mock_apollo_helper';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import {
@@ -28,25 +27,20 @@ import {
const throttleDuration = 1;
Vue.use(VueApollo);
+Vue.use(Vuex);
describe('BoardsSelector', () => {
let wrapper;
let fakeApollo;
let store;
- const createStore = ({ isGroupBoard = false, isProjectBoard = false } = {}) => {
+ const createStore = () => {
store = new Vuex.Store({
- ...defaultStore,
actions: {
setError: jest.fn(),
setBoardConfig: jest.fn(),
},
- getters: {
- isGroupBoard: () => isGroupBoard,
- isProjectBoard: () => isProjectBoard,
- },
state: {
- boardType: isGroupBoard ? 'group' : 'project',
board: mockBoard,
},
});
@@ -86,6 +80,8 @@ describe('BoardsSelector', () => {
const createComponent = ({
projectBoardsQueryHandler = projectBoardsQueryHandlerSuccess,
projectRecentBoardsQueryHandler = projectRecentBoardsQueryHandlerSuccess,
+ isGroupBoard = false,
+ isProjectBoard = false,
} = {}) => {
fakeApollo = createMockApollo([
[projectBoardsQuery, projectBoardsQueryHandler],
@@ -109,6 +105,9 @@ describe('BoardsSelector', () => {
multipleIssueBoardsAvailable: true,
scopedIssueBoardFeatureEnabled: true,
weights: [],
+ boardType: isGroupBoard ? 'group' : 'project',
+ isGroupBoard,
+ isProjectBoard,
},
});
};
@@ -120,8 +119,8 @@ describe('BoardsSelector', () => {
describe('template', () => {
beforeEach(() => {
- createStore({ isProjectBoard: true });
- createComponent();
+ createStore();
+ createComponent({ isProjectBoard: true });
});
describe('loading', () => {
@@ -229,11 +228,11 @@ describe('BoardsSelector', () => {
${BoardType.group} | ${groupBoardsQueryHandlerSuccess} | ${projectBoardsQueryHandlerSuccess}
${BoardType.project} | ${projectBoardsQueryHandlerSuccess} | ${groupBoardsQueryHandlerSuccess}
`('fetches $boardType boards', async ({ boardType, queryHandler, notCalledHandler }) => {
- createStore({
- isProjectBoard: boardType === BoardType.project,
+ createStore();
+ createComponent({
isGroupBoard: boardType === BoardType.group,
+ isProjectBoard: boardType === BoardType.project,
});
- createComponent();
await nextTick();
diff --git a/spec/frontend/boards/components/issue_board_filtered_search_spec.js b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
index 513561307cd..57a30ddc512 100644
--- a/spec/frontend/boards/components/issue_board_filtered_search_spec.js
+++ b/spec/frontend/boards/components/issue_board_filtered_search_spec.js
@@ -18,7 +18,7 @@ describe('IssueBoardFilter', () => {
isSignedIn,
releasesFetchPath: '/releases',
fullPath: 'gitlab-org',
- boardType: 'group',
+ isGroupBoard: true,
},
});
};
diff --git a/spec/frontend/boards/stores/getters_spec.js b/spec/frontend/boards/stores/getters_spec.js
index 304f2aad98e..c86a256bd96 100644
--- a/spec/frontend/boards/stores/getters_spec.js
+++ b/spec/frontend/boards/stores/getters_spec.js
@@ -12,42 +12,6 @@ import {
} from '../mock_data';
describe('Boards - Getters', () => {
- describe('isGroupBoard', () => {
- it('returns true when boardType on state is group', () => {
- const state = {
- boardType: 'group',
- };
-
- expect(getters.isGroupBoard(state)).toBe(true);
- });
-
- it('returns false when boardType on state is not group', () => {
- const state = {
- boardType: 'project',
- };
-
- expect(getters.isGroupBoard(state)).toBe(false);
- });
- });
-
- describe('isProjectBoard', () => {
- it('returns true when boardType on state is project', () => {
- const state = {
- boardType: 'project',
- };
-
- expect(getters.isProjectBoard(state)).toBe(true);
- });
-
- it('returns false when boardType on state is not project', () => {
- const state = {
- boardType: 'group',
- };
-
- expect(getters.isProjectBoard(state)).toBe(false);
- });
- });
-
describe('isSidebarOpen', () => {
it('returns true when activeId is not equal to 0', () => {
const state = {
diff --git a/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js b/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
index b2a25bc93ea..002fe7c6e71 100644
--- a/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
+++ b/spec/frontend/captcha/captcha_modal_axios_interceptor_spec.js
@@ -4,9 +4,11 @@ import { registerCaptchaModalInterceptor } from '~/captcha/captcha_modal_axios_i
import UnsolvedCaptchaError from '~/captcha/unsolved_captcha_error';
import { waitForCaptchaToBeSolved } from '~/captcha/wait_for_captcha_to_be_solved';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes, {
+import {
HTTP_STATUS_CONFLICT,
HTTP_STATUS_METHOD_NOT_ALLOWED,
+ HTTP_STATUS_NOT_FOUND,
+ HTTP_STATUS_OK,
} from '~/lib/utils/http_status';
jest.mock('~/captcha/wait_for_captcha_to_be_solved');
@@ -46,7 +48,7 @@ describe('registerCaptchaModalInterceptor', () => {
} = config.headers;
if (captchaResponse === CAPTCHA_RESPONSE && spamLogId === SPAM_LOG_ID) {
- return [httpStatusCodes.OK, { ...data, method: config.method, CAPTCHA_SUCCESS }];
+ return [HTTP_STATUS_OK, { ...data, method: config.method, CAPTCHA_SUCCESS }];
}
return [HTTP_STATUS_CONFLICT, NEEDS_CAPTCHA_RESPONSE];
@@ -64,7 +66,7 @@ describe('registerCaptchaModalInterceptor', () => {
it('successful requests are passed through', async () => {
const { data, status } = await axios[method]('/endpoint-without-captcha');
- expect(status).toEqual(httpStatusCodes.OK);
+ expect(status).toEqual(HTTP_STATUS_OK);
expect(data).toEqual(AXIOS_RESPONSE);
expect(mock.history[method]).toHaveLength(1);
});
@@ -73,7 +75,7 @@ describe('registerCaptchaModalInterceptor', () => {
await expect(() => axios[method]('/endpoint-with-unrelated-error')).rejects.toThrow(
expect.objectContaining({
response: expect.objectContaining({
- status: httpStatusCodes.NOT_FOUND,
+ status: HTTP_STATUS_NOT_FOUND,
data: AXIOS_RESPONSE,
}),
}),
diff --git a/spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js b/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
index 2210b0f48d6..e4abedb412f 100644
--- a/spec/frontend/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
+++ b/spec/frontend/ci/ci_variable_list/ci_variable_list/ci_variable_list_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import VariableList from '~/ci_variable_list/ci_variable_list';
+import VariableList from '~/ci/ci_variable_list/ci_variable_list';
const HIDE_CLASS = 'hide';
diff --git a/spec/frontend/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js b/spec/frontend/ci/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js
index 57f666e29d6..71e8e6d3afb 100644
--- a/spec/frontend/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js
+++ b/spec/frontend/ci/ci_variable_list/ci_variable_list/native_form_variable_list_spec.js
@@ -1,6 +1,6 @@
import $ from 'jquery';
import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import setupNativeFormVariableList from '~/ci_variable_list/native_form_variable_list';
+import setupNativeFormVariableList from '~/ci/ci_variable_list/native_form_variable_list';
describe('NativeFormVariableList', () => {
let $wrapper;
diff --git a/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_admin_variables_spec.js
index aa83638773d..5e0c35c9f90 100644
--- a/spec/frontend/ci_variable_list/components/ci_admin_variables_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_admin_variables_spec.js
@@ -1,7 +1,7 @@
import { shallowMount } from '@vue/test-utils';
-import ciAdminVariables from '~/ci_variable_list/components/ci_admin_variables.vue';
-import ciVariableShared from '~/ci_variable_list/components/ci_variable_shared.vue';
+import ciAdminVariables from '~/ci/ci_variable_list/components/ci_admin_variables.vue';
+import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue';
describe('Ci Project Variable wrapper', () => {
let wrapper;
diff --git a/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js
new file mode 100644
index 00000000000..2fd395a1230
--- /dev/null
+++ b/spec/frontend/ci/ci_variable_list/components/ci_environments_dropdown_spec.js
@@ -0,0 +1,118 @@
+import { GlListboxItem, GlCollapsibleListbox, GlDropdownItem, GlIcon } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { allEnvironments } from '~/ci/ci_variable_list/constants';
+import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
+
+describe('Ci environments dropdown', () => {
+ let wrapper;
+
+ const envs = ['dev', 'prod', 'staging'];
+ const defaultProps = { environments: envs, selectedEnvironmentScope: '' };
+
+ const findAllListboxItems = () => wrapper.findAllComponents(GlListboxItem);
+ const findListboxItemByIndex = (index) => wrapper.findAllComponents(GlListboxItem).at(index);
+ const findActiveIconByIndex = (index) => findListboxItemByIndex(index).findComponent(GlIcon);
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findListboxText = () => findListbox().props('toggleText');
+ const findCreateWildcardButton = () => wrapper.findComponent(GlDropdownItem);
+
+ const createComponent = ({ props = {}, searchTerm = '' } = {}) => {
+ wrapper = mount(CiEnvironmentsDropdown, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ });
+
+ findListbox().vm.$emit('search', searchTerm);
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('No environments found', () => {
+ beforeEach(() => {
+ createComponent({ searchTerm: 'stable' });
+ });
+
+ it('renders create button with search term if environments do not contain search term', () => {
+ const button = findCreateWildcardButton();
+ expect(button.exists()).toBe(true);
+ expect(button.text()).toBe('Create wildcard: stable');
+ });
+ });
+
+ describe('Search term is empty', () => {
+ beforeEach(() => {
+ createComponent({ props: { environments: envs } });
+ });
+
+ it('renders all environments when search term is empty', () => {
+ expect(findListboxItemByIndex(0).text()).toBe(envs[0]);
+ expect(findListboxItemByIndex(1).text()).toBe(envs[1]);
+ expect(findListboxItemByIndex(2).text()).toBe(envs[2]);
+ });
+
+ it('does not display active checkmark on the inactive stage', () => {
+ expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true);
+ });
+ });
+
+ describe('when `*` is the value of selectedEnvironmentScope props', () => {
+ const wildcardScope = '*';
+
+ beforeEach(() => {
+ createComponent({ props: { selectedEnvironmentScope: wildcardScope } });
+ });
+
+ it('shows the `All environments` text and not the wildcard', () => {
+ expect(findListboxText()).toContain(allEnvironments.text);
+ expect(findListboxText()).not.toContain(wildcardScope);
+ });
+ });
+
+ describe('Environments found', () => {
+ const currentEnv = envs[2];
+
+ beforeEach(() => {
+ createComponent({ searchTerm: currentEnv });
+ });
+
+ it('renders only the environment searched for', () => {
+ expect(findAllListboxItems()).toHaveLength(1);
+ expect(findListboxItemByIndex(0).text()).toBe(currentEnv);
+ });
+
+ it('does not display create button', () => {
+ expect(findCreateWildcardButton().exists()).toBe(false);
+ });
+
+ describe('Custom events', () => {
+ describe('when selecting an environment', () => {
+ const itemIndex = 0;
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('emits `select-environment` when an environment is clicked', () => {
+ findListbox().vm.$emit('select', envs[itemIndex]);
+ expect(wrapper.emitted('select-environment')).toEqual([[envs[itemIndex]]]);
+ });
+ });
+
+ describe('when creating a new environment from a search term', () => {
+ const search = 'new-env';
+ beforeEach(() => {
+ createComponent({ searchTerm: search });
+ });
+
+ it('emits create-environment-scope', () => {
+ findCreateWildcardButton().vm.$emit('click');
+ expect(wrapper.emitted('create-environment-scope')).toEqual([[search]]);
+ });
+ });
+ });
+ });
+});
diff --git a/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
index ef624d8e4b4..3f1eebbc6a5 100644
--- a/spec/frontend/ci_variable_list/components/ci_group_variables_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_group_variables_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { convertToGraphQLId } from '~/graphql_shared/utils';
-import ciGroupVariables from '~/ci_variable_list/components/ci_group_variables.vue';
-import ciVariableShared from '~/ci_variable_list/components/ci_variable_shared.vue';
+import ciGroupVariables from '~/ci/ci_variable_list/components/ci_group_variables.vue';
+import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue';
-import { GRAPHQL_GROUP_TYPE } from '~/ci_variable_list/constants';
+import { GRAPHQL_GROUP_TYPE } from '~/ci/ci_variable_list/constants';
const mockProvide = {
glFeatures: {
diff --git a/spec/frontend/ci_variable_list/components/ci_project_variables_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js
index 53c25e430f2..7230017c560 100644
--- a/spec/frontend/ci_variable_list/components/ci_project_variables_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_project_variables_spec.js
@@ -1,10 +1,10 @@
import { shallowMount } from '@vue/test-utils';
import { convertToGraphQLId } from '~/graphql_shared/utils';
-import ciProjectVariables from '~/ci_variable_list/components/ci_project_variables.vue';
-import ciVariableShared from '~/ci_variable_list/components/ci_variable_shared.vue';
+import ciProjectVariables from '~/ci/ci_variable_list/components/ci_project_variables.vue';
+import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue';
-import { GRAPHQL_PROJECT_TYPE } from '~/ci_variable_list/constants';
+import { GRAPHQL_PROJECT_TYPE } from '~/ci/ci_variable_list/constants';
const mockProvide = {
projectFullPath: '/namespace/project',
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
index d177e755591..7838e4884d8 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_modal_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_modal_spec.js
@@ -1,8 +1,8 @@
import { GlButton, GlFormInput } from '@gitlab/ui';
import { mockTracking } from 'helpers/tracking_helper';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
-import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
-import CiVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
+import CiEnvironmentsDropdown from '~/ci/ci_variable_list/components/ci_environments_dropdown.vue';
+import CiVariableModal from '~/ci/ci_variable_list/components/ci_variable_modal.vue';
import {
ADD_VARIABLE_ACTION,
AWS_ACCESS_KEY_ID,
@@ -12,7 +12,7 @@ import {
ENVIRONMENT_SCOPE_LINK_TITLE,
instanceString,
variableOptions,
-} from '~/ci_variable_list/constants';
+} from '~/ci/ci_variable_list/constants';
import { mockVariablesWithScopes } from '../mocks';
import ModalStub from '../stubs';
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
index 5e459ee390f..32af2ec4de9 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_settings_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_settings_spec.js
@@ -1,14 +1,14 @@
import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
-import CiVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
-import ciVariableModal from '~/ci_variable_list/components/ci_variable_modal.vue';
-import ciVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
+import CiVariableSettings from '~/ci/ci_variable_list/components/ci_variable_settings.vue';
+import ciVariableModal from '~/ci/ci_variable_list/components/ci_variable_modal.vue';
+import ciVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
import {
ADD_VARIABLE_ACTION,
EDIT_VARIABLE_ACTION,
projectString,
-} from '~/ci_variable_list/constants';
-import { mapEnvironmentNames } from '~/ci_variable_list/utils';
+} from '~/ci/ci_variable_list/constants';
+import { mapEnvironmentNames } from '~/ci/ci_variable_list/utils';
import { mockEnvs, mockVariablesWithScopes, newVariable } from '../mocks';
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_shared_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
index 65a58a1647f..2d39bff8ce0 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_shared_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_shared_spec.js
@@ -5,16 +5,16 @@ import { shallowMount } from '@vue/test-utils';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/flash';
-import { resolvers } from '~/ci_variable_list/graphql/settings';
+import { resolvers } from '~/ci/ci_variable_list/graphql/settings';
import { convertToGraphQLId } from '~/graphql_shared/utils';
-import ciVariableShared from '~/ci_variable_list/components/ci_variable_shared.vue';
-import ciVariableSettings from '~/ci_variable_list/components/ci_variable_settings.vue';
-import ciVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
-import getProjectEnvironments from '~/ci_variable_list/graphql/queries/project_environments.query.graphql';
-import getAdminVariables from '~/ci_variable_list/graphql/queries/variables.query.graphql';
-import getGroupVariables from '~/ci_variable_list/graphql/queries/group_variables.query.graphql';
-import getProjectVariables from '~/ci_variable_list/graphql/queries/project_variables.query.graphql';
+import ciVariableShared from '~/ci/ci_variable_list/components/ci_variable_shared.vue';
+import ciVariableSettings from '~/ci/ci_variable_list/components/ci_variable_settings.vue';
+import ciVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
+import getProjectEnvironments from '~/ci/ci_variable_list/graphql/queries/project_environments.query.graphql';
+import getAdminVariables from '~/ci/ci_variable_list/graphql/queries/variables.query.graphql';
+import getGroupVariables from '~/ci/ci_variable_list/graphql/queries/group_variables.query.graphql';
+import getProjectVariables from '~/ci/ci_variable_list/graphql/queries/project_variables.query.graphql';
import {
ADD_MUTATION_ACTION,
@@ -23,7 +23,7 @@ import {
environmentFetchErrorText,
genericMutationErrorText,
variableFetchErrorText,
-} from '~/ci_variable_list/constants';
+} from '~/ci/ci_variable_list/constants';
import {
createGroupProps,
diff --git a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
index 9891bc397b6..9e2508c56ee 100644
--- a/spec/frontend/ci_variable_list/components/ci_variable_table_spec.js
+++ b/spec/frontend/ci/ci_variable_list/components/ci_variable_table_spec.js
@@ -1,8 +1,8 @@
import { GlAlert } from '@gitlab/ui';
import { sprintf } from '~/locale';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import CiVariableTable from '~/ci_variable_list/components/ci_variable_table.vue';
-import { EXCEEDS_VARIABLE_LIMIT_TEXT, projectString } from '~/ci_variable_list/constants';
+import CiVariableTable from '~/ci/ci_variable_list/components/ci_variable_table.vue';
+import { EXCEEDS_VARIABLE_LIMIT_TEXT, projectString } from '~/ci/ci_variable_list/constants';
import { mockVariables } from '../mocks';
describe('Ci variable table', () => {
diff --git a/spec/frontend/ci_variable_list/mocks.js b/spec/frontend/ci/ci_variable_list/mocks.js
index 065e9fa6667..4da4f53f69f 100644
--- a/spec/frontend/ci_variable_list/mocks.js
+++ b/spec/frontend/ci/ci_variable_list/mocks.js
@@ -6,22 +6,22 @@ import {
groupString,
instanceString,
projectString,
-} from '~/ci_variable_list/constants';
-
-import addAdminVariable from '~/ci_variable_list/graphql/mutations/admin_add_variable.mutation.graphql';
-import deleteAdminVariable from '~/ci_variable_list/graphql/mutations/admin_delete_variable.mutation.graphql';
-import updateAdminVariable from '~/ci_variable_list/graphql/mutations/admin_update_variable.mutation.graphql';
-import addGroupVariable from '~/ci_variable_list/graphql/mutations/group_add_variable.mutation.graphql';
-import deleteGroupVariable from '~/ci_variable_list/graphql/mutations/group_delete_variable.mutation.graphql';
-import updateGroupVariable from '~/ci_variable_list/graphql/mutations/group_update_variable.mutation.graphql';
-import addProjectVariable from '~/ci_variable_list/graphql/mutations/project_add_variable.mutation.graphql';
-import deleteProjectVariable from '~/ci_variable_list/graphql/mutations/project_delete_variable.mutation.graphql';
-import updateProjectVariable from '~/ci_variable_list/graphql/mutations/project_update_variable.mutation.graphql';
-
-import getAdminVariables from '~/ci_variable_list/graphql/queries/variables.query.graphql';
-import getGroupVariables from '~/ci_variable_list/graphql/queries/group_variables.query.graphql';
-import getProjectEnvironments from '~/ci_variable_list/graphql/queries/project_environments.query.graphql';
-import getProjectVariables from '~/ci_variable_list/graphql/queries/project_variables.query.graphql';
+} from '~/ci/ci_variable_list/constants';
+
+import addAdminVariable from '~/ci/ci_variable_list/graphql/mutations/admin_add_variable.mutation.graphql';
+import deleteAdminVariable from '~/ci/ci_variable_list/graphql/mutations/admin_delete_variable.mutation.graphql';
+import updateAdminVariable from '~/ci/ci_variable_list/graphql/mutations/admin_update_variable.mutation.graphql';
+import addGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_add_variable.mutation.graphql';
+import deleteGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_delete_variable.mutation.graphql';
+import updateGroupVariable from '~/ci/ci_variable_list/graphql/mutations/group_update_variable.mutation.graphql';
+import addProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_add_variable.mutation.graphql';
+import deleteProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_delete_variable.mutation.graphql';
+import updateProjectVariable from '~/ci/ci_variable_list/graphql/mutations/project_update_variable.mutation.graphql';
+
+import getAdminVariables from '~/ci/ci_variable_list/graphql/queries/variables.query.graphql';
+import getGroupVariables from '~/ci/ci_variable_list/graphql/queries/group_variables.query.graphql';
+import getProjectEnvironments from '~/ci/ci_variable_list/graphql/queries/project_environments.query.graphql';
+import getProjectVariables from '~/ci/ci_variable_list/graphql/queries/project_variables.query.graphql';
export const devName = 'dev';
export const prodName = 'prod';
diff --git a/spec/frontend/ci_variable_list/services/mock_data.js b/spec/frontend/ci/ci_variable_list/services/mock_data.js
index 44f4db93c63..44f4db93c63 100644
--- a/spec/frontend/ci_variable_list/services/mock_data.js
+++ b/spec/frontend/ci/ci_variable_list/services/mock_data.js
diff --git a/spec/frontend/ci_variable_list/stubs.js b/spec/frontend/ci/ci_variable_list/stubs.js
index 5769d6190f6..5769d6190f6 100644
--- a/spec/frontend/ci_variable_list/stubs.js
+++ b/spec/frontend/ci/ci_variable_list/stubs.js
diff --git a/spec/frontend/ci_variable_list/utils_spec.js b/spec/frontend/ci/ci_variable_list/utils_spec.js
index 081c399792f..beeae71376a 100644
--- a/spec/frontend/ci_variable_list/utils_spec.js
+++ b/spec/frontend/ci/ci_variable_list/utils_spec.js
@@ -2,8 +2,8 @@ import {
createJoinedEnvironments,
convertEnvironmentScope,
mapEnvironmentNames,
-} from '~/ci_variable_list/utils';
-import { allEnvironments } from '~/ci_variable_list/constants';
+} from '~/ci/ci_variable_list/utils';
+import { allEnvironments } from '~/ci/ci_variable_list/constants';
describe('utils', () => {
const environments = ['dev', 'prod'];
diff --git a/spec/frontend/ci/pipeline_editor/components/editor/text_editor_spec.js b/spec/frontend/ci/pipeline_editor/components/editor/text_editor_spec.js
index 63e23c41263..ec987be8cb8 100644
--- a/spec/frontend/ci/pipeline_editor/components/editor/text_editor_spec.js
+++ b/spec/frontend/ci/pipeline_editor/components/editor/text_editor_spec.js
@@ -26,14 +26,13 @@ describe('Pipeline Editor | Text editor component', () => {
props: ['value', 'fileName', 'editorOptions', 'debounceValue'],
};
- const createComponent = (glFeatures = {}, mountFn = shallowMount) => {
+ const createComponent = (mountFn = shallowMount) => {
wrapper = mountFn(TextEditor, {
provide: {
projectPath: mockProjectPath,
projectNamespace: mockProjectNamespace,
ciConfigPath: mockCiConfigPath,
defaultBranch: mockDefaultBranch,
- glFeatures,
},
propsData: {
commitSha: mockCommitSha,
@@ -107,28 +106,14 @@ describe('Pipeline Editor | Text editor component', () => {
});
describe('CI schema', () => {
- describe('when `schema_linting` feature flag is on', () => {
- beforeEach(() => {
- createComponent({ schemaLinting: true });
- findEditor().vm.$emit(EDITOR_READY_EVENT, editorInstanceDetail);
- });
-
- it('configures editor with syntax highlight', () => {
- expect(mockUse).toHaveBeenCalledTimes(1);
- expect(mockRegisterCiSchema).toHaveBeenCalledTimes(1);
- });
+ beforeEach(() => {
+ createComponent();
+ findEditor().vm.$emit(EDITOR_READY_EVENT, editorInstanceDetail);
});
- describe('when `schema_linting` feature flag is off', () => {
- beforeEach(() => {
- createComponent();
- findEditor().vm.$emit(EDITOR_READY_EVENT, editorInstanceDetail);
- });
-
- it('does not call the register CI schema function', () => {
- expect(mockUse).not.toHaveBeenCalled();
- expect(mockRegisterCiSchema).not.toHaveBeenCalled();
- });
+ it('configures editor with syntax highlight', () => {
+ expect(mockUse).toHaveBeenCalledTimes(1);
+ expect(mockRegisterCiSchema).toHaveBeenCalledTimes(1);
});
});
});
diff --git a/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js b/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js
index e54c72a758f..6a6cc3a14de 100644
--- a/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js
+++ b/spec/frontend/ci/pipeline_editor/graphql/resolvers_spec.js
@@ -1,6 +1,6 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { resolvers } from '~/ci/pipeline_editor/graphql/resolvers';
import { mockLintResponse } from '../mock_data';
@@ -20,7 +20,7 @@ describe('~/ci/pipeline_editor/graphql/resolvers', () => {
beforeEach(async () => {
mock = new MockAdapter(axios);
- mock.onPost(endpoint).reply(httpStatus.OK, mockLintResponse);
+ mock.onPost(endpoint).reply(HTTP_STATUS_OK, mockLintResponse);
result = await resolvers.Mutation.lintCI(null, {
endpoint,
diff --git a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js b/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js
index 2360dd7d103..cd16045f92d 100644
--- a/spec/frontend/pipeline_new/components/pipeline_new_form_spec.js
+++ b/spec/frontend/ci/pipeline_new/components/pipeline_new_form_spec.js
@@ -8,12 +8,16 @@ import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_help
import { TEST_HOST } from 'helpers/test_constants';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_BAD_REQUEST,
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
+ HTTP_STATUS_OK,
+} from '~/lib/utils/http_status';
import { redirectTo } from '~/lib/utils/url_utility';
-import PipelineNewForm from '~/pipeline_new/components/pipeline_new_form.vue';
-import ciConfigVariablesQuery from '~/pipeline_new/graphql/queries/ci_config_variables.graphql';
-import { resolvers } from '~/pipeline_new/graphql/resolvers';
-import RefsDropdown from '~/pipeline_new/components/refs_dropdown.vue';
+import PipelineNewForm from '~/ci/pipeline_new/components/pipeline_new_form.vue';
+import ciConfigVariablesQuery from '~/ci/pipeline_new/graphql/queries/ci_config_variables.graphql';
+import { resolvers } from '~/ci/pipeline_new/graphql/resolvers';
+import RefsDropdown from '~/ci/pipeline_new/components/refs_dropdown.vue';
import {
mockCreditCardValidationRequiredError,
mockCiConfigVariablesResponse,
@@ -108,7 +112,7 @@ describe('Pipeline New Form', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
mockCiConfigVariables = jest.fn();
- mock.onGet(projectRefsEndpoint).reply(httpStatusCodes.OK, mockRefs);
+ mock.onGet(projectRefsEndpoint).reply(HTTP_STATUS_OK, mockRefs);
dummySubmitEvent = {
preventDefault: jest.fn(),
@@ -173,7 +177,7 @@ describe('Pipeline New Form', () => {
describe('Pipeline creation', () => {
beforeEach(async () => {
mockCiConfigVariables.mockResolvedValue(mockEmptyCiConfigVariablesResponse);
- mock.onPost(pipelinesPath).reply(httpStatusCodes.OK, newPipelinePostResponse);
+ mock.onPost(pipelinesPath).reply(HTTP_STATUS_OK, newPipelinePostResponse);
});
it('does not submit the native HTML form', async () => {
@@ -365,7 +369,7 @@ describe('Pipeline New Form', () => {
beforeEach(() => {
mock
.onGet(projectRefsEndpoint, { params: { search: '' } })
- .reply(httpStatusCodes.INTERNAL_SERVER_ERROR);
+ .reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
findRefsDropdown().vm.$emit('loadingError');
});
@@ -378,7 +382,7 @@ describe('Pipeline New Form', () => {
describe('when the error response can be handled', () => {
beforeEach(async () => {
- mock.onPost(pipelinesPath).reply(httpStatusCodes.BAD_REQUEST, mockError);
+ mock.onPost(pipelinesPath).reply(HTTP_STATUS_BAD_REQUEST, mockError);
findForm().vm.$emit('submit', dummySubmitEvent);
@@ -416,7 +420,7 @@ describe('Pipeline New Form', () => {
beforeEach(async () => {
mock
.onPost(pipelinesPath)
- .reply(httpStatusCodes.BAD_REQUEST, mockCreditCardValidationRequiredError);
+ .reply(HTTP_STATUS_BAD_REQUEST, mockCreditCardValidationRequiredError);
window.gon = {
subscriptions_url: TEST_HOST,
@@ -449,9 +453,7 @@ describe('Pipeline New Form', () => {
describe('when the error response cannot be handled', () => {
beforeEach(async () => {
- mock
- .onPost(pipelinesPath)
- .reply(httpStatusCodes.INTERNAL_SERVER_ERROR, 'something went wrong');
+ mock.onPost(pipelinesPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR, 'something went wrong');
findForm().vm.$emit('submit', dummySubmitEvent);
diff --git a/spec/frontend/pipeline_new/components/refs_dropdown_spec.js b/spec/frontend/ci/pipeline_new/components/refs_dropdown_spec.js
index 8cba876c688..cf8009e388f 100644
--- a/spec/frontend/pipeline_new/components/refs_dropdown_spec.js
+++ b/spec/frontend/ci/pipeline_new/components/refs_dropdown_spec.js
@@ -1,13 +1,13 @@
-import { GlDropdown, GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
+import { GlListbox, GlListboxItem } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
+import { mountExtended, shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
-import RefsDropdown from '~/pipeline_new/components/refs_dropdown.vue';
+import RefsDropdown from '~/ci/pipeline_new/components/refs_dropdown.vue';
-import { mockRefs, mockFilteredRefs } from '../mock_data';
+import { mockBranches, mockRefs, mockFilteredRefs, mockTags } from '../mock_data';
const projectRefsEndpoint = '/root/project/refs';
const refShortName = 'main';
@@ -19,11 +19,12 @@ describe('Pipeline New Form', () => {
let wrapper;
let mock;
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findRefsDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findDropdown = () => wrapper.findComponent(GlListbox);
+ const findRefsDropdownItems = () => wrapper.findAllComponents(GlListboxItem);
+ const findSearchBox = () => wrapper.findByTestId('listbox-search-input');
+ const findListboxGroups = () => wrapper.findAll('ul[role="group"]');
- const createComponent = (props = {}, mountFn = shallowMount) => {
+ const createComponent = (props = {}, mountFn = shallowMountExtended) => {
wrapper = mountFn(RefsDropdown, {
provide: {
projectRefsEndpoint,
@@ -40,22 +41,15 @@ describe('Pipeline New Form', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(projectRefsEndpoint, { params: { search: '' } }).reply(httpStatusCodes.OK, mockRefs);
- });
-
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
-
- mock.restore();
+ mock.onGet(projectRefsEndpoint, { params: { search: '' } }).reply(HTTP_STATUS_OK, mockRefs);
});
beforeEach(() => {
createComponent();
});
- it('displays empty dropdown initially', async () => {
- await findDropdown().vm.$emit('show');
+ it('displays empty dropdown initially', () => {
+ findDropdown().vm.$emit('shown');
expect(findRefsDropdownItems()).toHaveLength(0);
});
@@ -66,19 +60,19 @@ describe('Pipeline New Form', () => {
describe('when user opens dropdown', () => {
beforeEach(async () => {
- await findDropdown().vm.$emit('show');
+ createComponent({}, mountExtended);
+ findDropdown().vm.$emit('shown');
await waitForPromises();
});
- it('requests unfiltered tags and branches', async () => {
+ it('requests unfiltered tags and branches', () => {
expect(mock.history.get).toHaveLength(1);
expect(mock.history.get[0].url).toBe(projectRefsEndpoint);
expect(mock.history.get[0].params).toEqual({ search: '' });
});
- it('displays dropdown with branches and tags', async () => {
+ it('displays dropdown with branches and tags', () => {
const refLength = mockRefs.Tags.length + mockRefs.Branches.length;
-
expect(findRefsDropdownItems()).toHaveLength(refLength);
});
@@ -99,7 +93,8 @@ describe('Pipeline New Form', () => {
const selectedIndex = 1;
beforeEach(async () => {
- await findRefsDropdownItems().at(selectedIndex).vm.$emit('click');
+ findRefsDropdownItems().at(selectedIndex).vm.$emit('select', 'refs/heads/branch-1');
+ await waitForPromises();
});
it('component emits @input', () => {
@@ -116,7 +111,7 @@ describe('Pipeline New Form', () => {
beforeEach(async () => {
mock
.onGet(projectRefsEndpoint, { params: { search: mockSearchTerm } })
- .reply(httpStatusCodes.OK, mockFilteredRefs);
+ .reply(HTTP_STATUS_OK, mockFilteredRefs);
await findSearchBox().vm.$emit('input', mockSearchTerm);
await waitForPromises();
@@ -147,20 +142,23 @@ describe('Pipeline New Form', () => {
.onGet(projectRefsEndpoint, {
params: { ref: mockFullName },
})
- .reply(httpStatusCodes.OK, mockRefs);
-
- createComponent({
- value: {
- shortName: mockShortName,
- fullName: mockFullName,
+ .reply(HTTP_STATUS_OK, mockRefs);
+
+ createComponent(
+ {
+ value: {
+ shortName: mockShortName,
+ fullName: mockFullName,
+ },
},
- });
- await findDropdown().vm.$emit('show');
+ mountExtended,
+ );
+ findDropdown().vm.$emit('shown');
await waitForPromises();
});
it('branch is checked', () => {
- expect(findRefsDropdownItems().at(selectedIndex).props('isChecked')).toBe(true);
+ expect(findRefsDropdownItems().at(selectedIndex).props('isSelected')).toBe(true);
});
});
@@ -168,9 +166,9 @@ describe('Pipeline New Form', () => {
beforeEach(async () => {
mock
.onGet(projectRefsEndpoint, { params: { search: '' } })
- .reply(httpStatusCodes.INTERNAL_SERVER_ERROR);
+ .reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
- await findDropdown().vm.$emit('show');
+ findDropdown().vm.$emit('shown');
await waitForPromises();
});
@@ -179,4 +177,25 @@ describe('Pipeline New Form', () => {
expect(wrapper.emitted('loadingError')[0]).toEqual([expect.any(Error)]);
});
});
+
+ describe('should display branches and tags based on its length', () => {
+ it.each`
+ mockData | expectedGroupLength | expectedListboxItemsLength
+ ${{ ...mockBranches, Tags: [] }} | ${1} | ${mockBranches.Branches.length}
+ ${{ Branches: [], ...mockTags }} | ${1} | ${mockTags.Tags.length}
+ ${{ ...mockRefs }} | ${2} | ${mockBranches.Branches.length + mockTags.Tags.length}
+ ${{ Branches: undefined, Tags: undefined }} | ${0} | ${0}
+ `(
+ 'should render branches and tags based on presence',
+ async ({ mockData, expectedGroupLength, expectedListboxItemsLength }) => {
+ mock.onGet(projectRefsEndpoint, { params: { search: '' } }).reply(HTTP_STATUS_OK, mockData);
+ createComponent({}, mountExtended);
+ findDropdown().vm.$emit('shown');
+ await waitForPromises();
+
+ expect(findListboxGroups()).toHaveLength(expectedGroupLength);
+ expect(findRefsDropdownItems()).toHaveLength(expectedListboxItemsLength);
+ },
+ );
+ });
});
diff --git a/spec/frontend/pipeline_new/mock_data.js b/spec/frontend/ci/pipeline_new/mock_data.js
index 2af0ef4d7c4..dfb643a0ba4 100644
--- a/spec/frontend/pipeline_new/mock_data.js
+++ b/spec/frontend/ci/pipeline_new/mock_data.js
@@ -1,8 +1,16 @@
-export const mockRefs = {
+export const mockBranches = {
Branches: ['main', 'branch-1', 'branch-2'],
+};
+
+export const mockTags = {
Tags: ['1.0.0', '1.1.0', '1.2.0'],
};
+export const mockRefs = {
+ ...mockBranches,
+ ...mockTags,
+};
+
export const mockFilteredRefs = {
Branches: ['branch-1'],
Tags: ['1.0.0', '1.1.0'],
diff --git a/spec/frontend/pipeline_new/utils/filter_variables_spec.js b/spec/frontend/ci/pipeline_new/utils/filter_variables_spec.js
index 42bc6244456..d1b89704b58 100644
--- a/spec/frontend/pipeline_new/utils/filter_variables_spec.js
+++ b/spec/frontend/ci/pipeline_new/utils/filter_variables_spec.js
@@ -1,4 +1,4 @@
-import filterVariables from '~/pipeline_new/utils/filter_variables';
+import filterVariables from '~/ci/pipeline_new/utils/filter_variables';
import { mockVariables } from '../mock_data';
describe('Filter variables utility function', () => {
diff --git a/spec/frontend/ci/pipeline_new/utils/format_refs_spec.js b/spec/frontend/ci/pipeline_new/utils/format_refs_spec.js
new file mode 100644
index 00000000000..137a9339649
--- /dev/null
+++ b/spec/frontend/ci/pipeline_new/utils/format_refs_spec.js
@@ -0,0 +1,82 @@
+import { BRANCH_REF_TYPE, TAG_REF_TYPE } from '~/ci/pipeline_new/constants';
+import {
+ formatRefs,
+ formatListBoxItems,
+ searchByFullNameInListboxOptions,
+} from '~/ci/pipeline_new/utils/format_refs';
+import { mockBranchRefs, mockTagRefs } from '../mock_data';
+
+describe('Format refs util', () => {
+ it('formats branch ref correctly', () => {
+ expect(formatRefs(mockBranchRefs, BRANCH_REF_TYPE)).toEqual([
+ { fullName: 'refs/heads/main', shortName: 'main' },
+ { fullName: 'refs/heads/dev', shortName: 'dev' },
+ { fullName: 'refs/heads/release', shortName: 'release' },
+ ]);
+ });
+
+ it('formats tag ref correctly', () => {
+ expect(formatRefs(mockTagRefs, TAG_REF_TYPE)).toEqual([
+ { fullName: 'refs/tags/1.0.0', shortName: '1.0.0' },
+ { fullName: 'refs/tags/1.1.0', shortName: '1.1.0' },
+ { fullName: 'refs/tags/1.2.0', shortName: '1.2.0' },
+ ]);
+ });
+});
+
+describe('formatListBoxItems', () => {
+ it('formats branches and tags to listbox items correctly', () => {
+ expect(formatListBoxItems(mockBranchRefs, mockTagRefs)).toEqual([
+ {
+ text: 'Branches',
+ options: [
+ { value: 'refs/heads/main', text: 'main' },
+ { value: 'refs/heads/dev', text: 'dev' },
+ { value: 'refs/heads/release', text: 'release' },
+ ],
+ },
+ {
+ text: 'Tags',
+ options: [
+ { value: 'refs/tags/1.0.0', text: '1.0.0' },
+ { value: 'refs/tags/1.1.0', text: '1.1.0' },
+ { value: 'refs/tags/1.2.0', text: '1.2.0' },
+ ],
+ },
+ ]);
+
+ expect(formatListBoxItems(mockBranchRefs, [])).toEqual([
+ {
+ text: 'Branches',
+ options: [
+ { value: 'refs/heads/main', text: 'main' },
+ { value: 'refs/heads/dev', text: 'dev' },
+ { value: 'refs/heads/release', text: 'release' },
+ ],
+ },
+ ]);
+
+ expect(formatListBoxItems([], mockTagRefs)).toEqual([
+ {
+ text: 'Tags',
+ options: [
+ { value: 'refs/tags/1.0.0', text: '1.0.0' },
+ { value: 'refs/tags/1.1.0', text: '1.1.0' },
+ { value: 'refs/tags/1.2.0', text: '1.2.0' },
+ ],
+ },
+ ]);
+ });
+});
+
+describe('searchByFullNameInListboxOptions', () => {
+ const listbox = formatListBoxItems(mockBranchRefs, mockTagRefs);
+
+ it.each`
+ fullName | expectedResult
+ ${'refs/heads/main'} | ${{ fullName: 'refs/heads/main', shortName: 'main' }}
+ ${'refs/tags/1.0.0'} | ${{ fullName: 'refs/tags/1.0.0', shortName: '1.0.0' }}
+ `('should search item in listbox correctly', ({ fullName, expectedResult }) => {
+ expect(searchByFullNameInListboxOptions(fullName, listbox)).toEqual(expectedResult);
+ });
+});
diff --git a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
index 4aa4cdf89a1..611993556e3 100644
--- a/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/pipeline_schedules_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlLoadingIcon, GlTabs } from '@gitlab/ui';
+import { GlAlert, GlEmptyState, GlLink, GlLoadingIcon, GlTabs } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import { trimText } from 'helpers/text_helper';
@@ -10,13 +10,16 @@ import DeletePipelineScheduleModal from '~/ci/pipeline_schedules/components/dele
import TakeOwnershipModal from '~/ci/pipeline_schedules/components/take_ownership_modal.vue';
import PipelineSchedulesTable from '~/ci/pipeline_schedules/components/table/pipeline_schedules_table.vue';
import deletePipelineScheduleMutation from '~/ci/pipeline_schedules/graphql/mutations/delete_pipeline_schedule.mutation.graphql';
+import playPipelineScheduleMutation from '~/ci/pipeline_schedules/graphql/mutations/play_pipeline_schedule.mutation.graphql';
import takeOwnershipMutation from '~/ci/pipeline_schedules/graphql/mutations/take_ownership.mutation.graphql';
import getPipelineSchedulesQuery from '~/ci/pipeline_schedules/graphql/queries/get_pipeline_schedules.query.graphql';
import {
mockGetPipelineSchedulesGraphQLResponse,
mockPipelineScheduleNodes,
deleteMutationResponse,
+ playMutationResponse,
takeOwnershipMutationResponse,
+ emptyPipelineSchedulesResponse,
} from '../mock_data';
Vue.use(VueApollo);
@@ -29,10 +32,13 @@ describe('Pipeline schedules app', () => {
let wrapper;
const successHandler = jest.fn().mockResolvedValue(mockGetPipelineSchedulesGraphQLResponse);
+ const successEmptyHandler = jest.fn().mockResolvedValue(emptyPipelineSchedulesResponse);
const failedHandler = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const deleteMutationHandlerSuccess = jest.fn().mockResolvedValue(deleteMutationResponse);
const deleteMutationHandlerFailed = jest.fn().mockRejectedValue(new Error('GraphQL error'));
+ const playMutationHandlerSuccess = jest.fn().mockResolvedValue(playMutationResponse);
+ const playMutationHandlerFailed = jest.fn().mockRejectedValue(new Error('GraphQL error'));
const takeOwnershipMutationHandlerSuccess = jest
.fn()
.mockResolvedValue(takeOwnershipMutationResponse);
@@ -60,14 +66,18 @@ describe('Pipeline schedules app', () => {
const findTable = () => wrapper.findComponent(PipelineSchedulesTable);
const findAlert = () => wrapper.findComponent(GlAlert);
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findDeleteModal = () => wrapper.findComponent(DeletePipelineScheduleModal);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findTakeOwnershipModal = () => wrapper.findComponent(TakeOwnershipModal);
const findTabs = () => wrapper.findComponent(GlTabs);
+ const findEmptyState = () => wrapper.findComponent(GlEmptyState);
+ const findLink = () => wrapper.findComponent(GlLink);
const findNewButton = () => wrapper.findByTestId('new-schedule-button');
const findAllTab = () => wrapper.findByTestId('pipeline-schedules-all-tab');
const findActiveTab = () => wrapper.findByTestId('pipeline-schedules-active-tab');
const findInactiveTab = () => wrapper.findByTestId('pipeline-schedules-inactive-tab');
+ const findSchedulesCharacteristics = () =>
+ wrapper.findByTestId('pipeline-schedules-characteristics');
afterEach(() => {
wrapper.destroy();
@@ -181,6 +191,45 @@ describe('Pipeline schedules app', () => {
});
});
+ describe('playing a pipeline schedule', () => {
+ it('shows play mutation error alert', async () => {
+ createComponent([
+ [getPipelineSchedulesQuery, successHandler],
+ [playPipelineScheduleMutation, playMutationHandlerFailed],
+ ]);
+
+ await waitForPromises();
+
+ findTable().vm.$emit('playPipelineSchedule');
+
+ await waitForPromises();
+
+ expect(findAlert().text()).toBe('There was a problem playing the pipeline schedule.');
+ });
+
+ it('plays pipeline schedule', async () => {
+ createComponent([
+ [getPipelineSchedulesQuery, successHandler],
+ [playPipelineScheduleMutation, playMutationHandlerSuccess],
+ ]);
+
+ await waitForPromises();
+
+ const scheduleId = mockPipelineScheduleNodes[0].id;
+
+ findTable().vm.$emit('playPipelineSchedule', scheduleId);
+
+ await waitForPromises();
+
+ expect(playMutationHandlerSuccess).toHaveBeenCalledWith({
+ id: scheduleId,
+ });
+ expect(findAlert().text()).toBe(
+ 'Successfully scheduled a pipeline to run. Go to the Pipelines page for details.',
+ );
+ });
+ });
+
describe('taking ownership of a pipeline schedule', () => {
it('shows take ownership mutation error alert', async () => {
createComponent([
@@ -277,4 +326,24 @@ describe('Pipeline schedules app', () => {
expect(wrapper.vm.$apollo.queries.schedules.refetch).toHaveBeenCalledTimes(1);
});
});
+
+ describe('Empty pipeline schedules response', () => {
+ it('should show an empty state', async () => {
+ createComponent([[getPipelineSchedulesQuery, successEmptyHandler]]);
+
+ await waitForPromises();
+
+ const schedulesCharacteristics = findSchedulesCharacteristics();
+
+ expect(findEmptyState().exists()).toBe(true);
+ expect(schedulesCharacteristics.text()).toContain('Runs for a specific branch or tag.');
+ expect(schedulesCharacteristics.text()).toContain('Can have custom CI/CD variables.');
+ expect(schedulesCharacteristics.text()).toContain(
+ 'Runs with the same project permissions as the schedule owner.',
+ );
+
+ expect(findLink().exists()).toBe(true);
+ expect(findLink().text()).toContain('scheduled pipelines documentation.');
+ });
+ });
});
diff --git a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js
index 3364c61d155..6fb6a8bc33b 100644
--- a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_actions_spec.js
@@ -25,6 +25,7 @@ describe('Pipeline schedule actions', () => {
const findAllButtons = () => wrapper.findAllComponents(GlButton);
const findDeleteBtn = () => wrapper.findByTestId('delete-pipeline-schedule-btn');
const findTakeOwnershipBtn = () => wrapper.findByTestId('take-ownership-pipeline-schedule-btn');
+ const findPlayScheduleBtn = () => wrapper.findByTestId('play-pipeline-schedule-btn');
afterEach(() => {
wrapper.destroy();
@@ -61,4 +62,14 @@ describe('Pipeline schedule actions', () => {
showTakeOwnershipModal: [[mockTakeOwnershipNodes[0].id]],
});
});
+
+ it('play button emits playPipelineSchedule event and schedule id', () => {
+ createComponent();
+
+ findPlayScheduleBtn().vm.$emit('click');
+
+ expect(wrapper.emitted()).toEqual({
+ playPipelineSchedule: [[mockPipelineScheduleNodes[0].id]],
+ });
+ });
});
diff --git a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
index 17bf465baf3..0821c59c8a0 100644
--- a/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
+++ b/spec/frontend/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline_spec.js
@@ -1,5 +1,5 @@
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import PipelineScheduleLastPipeline from '~/ci/pipeline_schedules/components/table/cells/pipeline_schedule_last_pipeline.vue';
import { mockPipelineScheduleNodes } from '../../../mock_data';
@@ -18,7 +18,7 @@ describe('Pipeline schedule last pipeline', () => {
});
};
- const findCIBadge = () => wrapper.findComponent(CiBadge);
+ const findCIBadgeLink = () => wrapper.findComponent(CiBadgeLink);
const findStatusText = () => wrapper.findByTestId('pipeline-schedule-status-text');
afterEach(() => {
@@ -28,8 +28,10 @@ describe('Pipeline schedule last pipeline', () => {
it('displays pipeline status', () => {
createComponent();
- expect(findCIBadge().exists()).toBe(true);
- expect(findCIBadge().props('status')).toBe(defaultProps.schedule.lastPipeline.detailedStatus);
+ expect(findCIBadgeLink().exists()).toBe(true);
+ expect(findCIBadgeLink().props('status')).toBe(
+ defaultProps.schedule.lastPipeline.detailedStatus,
+ );
expect(findStatusText().exists()).toBe(false);
});
@@ -37,6 +39,6 @@ describe('Pipeline schedule last pipeline', () => {
createComponent({ schedule: mockPipelineScheduleNodes[0] });
expect(findStatusText().text()).toBe('None');
- expect(findCIBadge().exists()).toBe(false);
+ expect(findCIBadgeLink().exists()).toBe(false);
});
});
diff --git a/spec/frontend/ci/pipeline_schedules/mock_data.js b/spec/frontend/ci/pipeline_schedules/mock_data.js
index 3010f1d06c3..2826c054249 100644
--- a/spec/frontend/ci/pipeline_schedules/mock_data.js
+++ b/spec/frontend/ci/pipeline_schedules/mock_data.js
@@ -32,6 +32,14 @@ export const mockPipelineScheduleNodes = nodes;
export const mockPipelineScheduleAsGuestNodes = guestNodes;
export const mockTakeOwnershipNodes = takeOwnershipNodes;
+export const emptyPipelineSchedulesResponse = {
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/1',
+ pipelineSchedules: { nodes: [], count: 0 },
+ },
+ },
+};
export const deleteMutationResponse = {
data: {
@@ -43,6 +51,16 @@ export const deleteMutationResponse = {
},
};
+export const playMutationResponse = {
+ data: {
+ pipelineSchedulePlay: {
+ clientMutationId: null,
+ errors: [],
+ __typename: 'PipelineSchedulePlayPayload',
+ },
+ },
+};
+
export const takeOwnershipMutationResponse = {
data: {
pipelineScheduleTakeOwnership: {
diff --git a/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js b/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js
index cb46c668930..0ecafdd7d83 100644
--- a/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js
+++ b/spec/frontend/ci/runner/components/registration/registration_dropdown_spec.js
@@ -13,12 +13,12 @@ import RegistrationTokenResetDropdownItem from '~/ci/runner/components/registrat
import { INSTANCE_TYPE, GROUP_TYPE, PROJECT_TYPE } from '~/ci/runner/constants';
-import getRunnerPlatformsQuery from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_platforms.query.graphql';
-import getRunnerSetupInstructionsQuery from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_setup.query.graphql';
+import getRunnerPlatformsQuery from '~/vue_shared/components/runner_instructions/graphql/get_runner_platforms.query.graphql';
+import getRunnerSetupInstructionsQuery from '~/vue_shared/components/runner_instructions/graphql/get_runner_setup.query.graphql';
import {
- mockGraphqlRunnerPlatforms,
- mockGraphqlInstructions,
+ mockRunnerPlatforms,
+ mockInstructions,
} from 'jest/vue_shared/components/runner_instructions/mock_data';
const mockToken = '0123456789';
@@ -67,8 +67,8 @@ describe('RegistrationDropdown', () => {
const createComponentWithModal = () => {
const requestHandlers = [
- [getRunnerPlatformsQuery, jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms)],
- [getRunnerSetupInstructionsQuery, jest.fn().mockResolvedValue(mockGraphqlInstructions)],
+ [getRunnerPlatformsQuery, jest.fn().mockResolvedValue(mockRunnerPlatforms)],
+ [getRunnerSetupInstructionsQuery, jest.fn().mockResolvedValue(mockInstructions)],
];
createComponent(
diff --git a/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js b/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
deleted file mode 100644
index e9966576cab..00000000000
--- a/spec/frontend/ci_variable_list/components/ci_environments_dropdown_spec.js
+++ /dev/null
@@ -1,139 +0,0 @@
-import { GlDropdown, GlDropdownItem, GlIcon, GlSearchBoxByType } from '@gitlab/ui';
-import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
-import { allEnvironments } from '~/ci_variable_list/constants';
-import CiEnvironmentsDropdown from '~/ci_variable_list/components/ci_environments_dropdown.vue';
-
-describe('Ci environments dropdown', () => {
- let wrapper;
-
- const envs = ['dev', 'prod', 'staging'];
- const defaultProps = { environments: envs, selectedEnvironmentScope: '' };
-
- const findDropdownText = () => wrapper.findComponent(GlDropdown).text();
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
- const findActiveIconByIndex = (index) => findDropdownItemByIndex(index).findComponent(GlIcon);
- const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
-
- const createComponent = ({ props = {}, searchTerm = '' } = {}) => {
- wrapper = mount(CiEnvironmentsDropdown, {
- propsData: {
- ...defaultProps,
- ...props,
- },
- });
-
- findSearchBox().vm.$emit('input', searchTerm);
- };
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('No environments found', () => {
- beforeEach(() => {
- createComponent({ searchTerm: 'stable' });
- });
-
- it('renders create button with search term if environments do not contain search term', () => {
- expect(findAllDropdownItems()).toHaveLength(2);
- expect(findDropdownItemByIndex(1).text()).toBe('Create wildcard: stable');
- });
-
- it('renders empty results message', () => {
- expect(findDropdownItemByIndex(0).text()).toBe('No matching results');
- });
- });
-
- describe('Search term is empty', () => {
- beforeEach(() => {
- createComponent({ props: { environments: envs } });
- });
-
- it('renders all environments when search term is empty', () => {
- expect(findAllDropdownItems()).toHaveLength(3);
- expect(findDropdownItemByIndex(0).text()).toBe(envs[0]);
- expect(findDropdownItemByIndex(1).text()).toBe(envs[1]);
- expect(findDropdownItemByIndex(2).text()).toBe(envs[2]);
- });
-
- it('should not display active checkmark on the inactive stage', () => {
- expect(findActiveIconByIndex(0).classes('gl-visibility-hidden')).toBe(true);
- });
- });
-
- describe('when `*` is the value of selectedEnvironmentScope props', () => {
- const wildcardScope = '*';
-
- beforeEach(() => {
- createComponent({ props: { selectedEnvironmentScope: wildcardScope } });
- });
-
- it('shows the `All environments` text and not the wildcard', () => {
- expect(findDropdownText()).toContain(allEnvironments.text);
- expect(findDropdownText()).not.toContain(wildcardScope);
- });
- });
-
- describe('Environments found', () => {
- const currentEnv = envs[2];
-
- beforeEach(async () => {
- createComponent({ searchTerm: currentEnv });
- await nextTick();
- });
-
- it('renders only the environment searched for', () => {
- expect(findAllDropdownItems()).toHaveLength(1);
- expect(findDropdownItemByIndex(0).text()).toBe(currentEnv);
- });
-
- it('should not display create button', () => {
- const environments = findAllDropdownItems().filter((env) => env.text().startsWith('Create'));
- expect(environments).toHaveLength(0);
- expect(findAllDropdownItems()).toHaveLength(1);
- });
-
- it('should not display empty results message', () => {
- expect(wrapper.findComponent({ ref: 'noMatchingResults' }).exists()).toBe(false);
- });
-
- it('should clear the search term when showing the dropdown', () => {
- wrapper.findComponent(GlDropdown).trigger('click');
-
- expect(findSearchBox().text()).toBe('');
- });
-
- describe('Custom events', () => {
- describe('when clicking on an environment', () => {
- const itemIndex = 0;
-
- beforeEach(() => {
- createComponent();
- });
-
- it('should emit `select-environment` if an environment is clicked', async () => {
- await nextTick();
-
- await findDropdownItemByIndex(itemIndex).vm.$emit('click');
-
- expect(wrapper.emitted('select-environment')).toEqual([[envs[itemIndex]]]);
- });
- });
-
- describe('when creating a new environment from a search term', () => {
- const search = 'new-env';
- beforeEach(() => {
- createComponent({ searchTerm: search });
- });
-
- it('should emit createClicked if an environment is clicked', async () => {
- await nextTick();
- findDropdownItemByIndex(1).vm.$emit('click');
- expect(wrapper.emitted('create-environment-scope')).toEqual([[search]]);
- });
- });
- });
- });
-});
diff --git a/spec/frontend/commit/pipelines/pipelines_table_spec.js b/spec/frontend/commit/pipelines/pipelines_table_spec.js
index d89a238105b..6865b721441 100644
--- a/spec/frontend/commit/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/commit/pipelines/pipelines_table_spec.js
@@ -7,7 +7,11 @@ import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import Api from '~/api';
import PipelinesTable from '~/commit/pipelines/pipelines_table.vue';
-import httpStatusCodes from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_BAD_REQUEST,
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
+ HTTP_STATUS_UNAUTHORIZED,
+} from '~/lib/utils/http_status';
import { createAlert } from '~/flash';
import { TOAST_MESSAGE } from '~/pipelines/constants';
import axios from '~/lib/utils/axios_utils';
@@ -243,10 +247,10 @@ describe('Pipelines table in Commits and Merge requests', () => {
'An error occurred while trying to run a new pipeline for this merge request.';
it.each`
- status | message
- ${httpStatusCodes.BAD_REQUEST} | ${defaultMsg}
- ${httpStatusCodes.UNAUTHORIZED} | ${permissionsMsg}
- ${httpStatusCodes.INTERNAL_SERVER_ERROR} | ${defaultMsg}
+ status | message
+ ${HTTP_STATUS_BAD_REQUEST} | ${defaultMsg}
+ ${HTTP_STATUS_UNAUTHORIZED} | ${permissionsMsg}
+ ${HTTP_STATUS_INTERNAL_SERVER_ERROR} | ${defaultMsg}
`('displays permissions error message', async ({ status, message }) => {
const response = { response: { status } };
diff --git a/spec/frontend/constants_spec.js b/spec/frontend/constants_spec.js
new file mode 100644
index 00000000000..b596b62f72c
--- /dev/null
+++ b/spec/frontend/constants_spec.js
@@ -0,0 +1,30 @@
+import * as constants from '~/constants';
+
+describe('Global JS constants', () => {
+ describe('getModifierKey()', () => {
+ afterEach(() => {
+ delete window.gl;
+ });
+
+ it.each`
+ isMac | removeSuffix | expectedKey
+ ${true} | ${false} | ${'⌘'}
+ ${false} | ${false} | ${'Ctrl+'}
+ ${true} | ${true} | ${'⌘'}
+ ${false} | ${true} | ${'Ctrl'}
+ `(
+ 'returns correct keystroke when isMac=$isMac and removeSuffix=$removeSuffix',
+ ({ isMac, removeSuffix, expectedKey }) => {
+ Object.assign(window, {
+ gl: {
+ client: {
+ isMac,
+ },
+ },
+ });
+
+ expect(constants.getModifierKey(removeSuffix)).toBe(expectedKey);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
index 3ebb305afbf..5a725ac1ca4 100644
--- a/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
+++ b/spec/frontend/content_editor/components/toolbar_text_style_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import EditorStateObserver from '~/content_editor/components/editor_state_observer.vue';
import ToolbarTextStyleDropdown from '~/content_editor/components/toolbar_text_style_dropdown.vue';
@@ -22,8 +22,6 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
const buildWrapper = (propsData = {}) => {
wrapper = shallowMountExtended(ToolbarTextStyleDropdown, {
stubs: {
- GlDropdown,
- GlDropdownItem,
EditorStateObserver,
},
provide: {
@@ -35,7 +33,7 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
},
});
};
- const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
beforeEach(() => {
buildEditor();
@@ -48,9 +46,10 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
it('renders all text styles as dropdown items', () => {
buildWrapper();
- TEXT_STYLE_DROPDOWN_ITEMS.forEach((textStyle) => {
- expect(wrapper.findByText(textStyle.label).exists()).toBe(true);
+ TEXT_STYLE_DROPDOWN_ITEMS.forEach((textStyle, index) => {
+ expect(findListbox().props('items').at(index).text).toContain(textStyle.label);
});
+ expect(findListbox().props('items').length).toBe(TEXT_STYLE_DROPDOWN_ITEMS.length);
});
describe('when there is an active item', () => {
@@ -69,19 +68,11 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
});
it('displays the active text style label as the dropdown toggle text', () => {
- expect(findDropdown().props().text).toBe(activeTextStyle.label);
+ expect(findListbox().props('toggleText')).toBe(activeTextStyle.label);
});
it('sets dropdown as enabled', () => {
- expect(findDropdown().props().disabled).toBe(false);
- });
-
- it('sets active item as active', () => {
- const activeItem = wrapper
- .findAllComponents(GlDropdownItem)
- .filter((item) => item.text() === activeTextStyle.label)
- .at(0);
- expect(activeItem.props().isChecked).toBe(true);
+ expect(findListbox().props('disabled')).toBe(false);
});
});
@@ -93,11 +84,11 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
});
it('sets dropdown as disabled', () => {
- expect(findDropdown().props().disabled).toBe(true);
+ expect(findListbox().props('disabled')).toBe(true);
});
it('sets dropdown toggle text to Text style', () => {
- expect(findDropdown().props().text).toBe('Text style');
+ expect(findListbox().props('toggleText')).toBe('Text style');
});
});
@@ -109,7 +100,7 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
const { editorCommand, commandParams } = textStyle;
const commands = mockChainedCommands(tiptapEditor, [editorCommand, 'focus', 'run']);
- wrapper.findAllComponents(GlDropdownItem).at(index).vm.$emit('click');
+ findListbox().vm.$emit('select', TEXT_STYLE_DROPDOWN_ITEMS[index].label);
expect(commands[editorCommand]).toHaveBeenCalledWith(commandParams || {});
expect(commands.focus).toHaveBeenCalled();
expect(commands.run).toHaveBeenCalled();
@@ -121,7 +112,7 @@ describe('content_editor/components/toolbar_text_style_dropdown', () => {
buildWrapper();
const { contentType, commandParams } = textStyle;
- wrapper.findAllComponents(GlDropdownItem).at(index).vm.$emit('click');
+ findListbox().vm.$emit('select', TEXT_STYLE_DROPDOWN_ITEMS[index].label);
expect(wrapper.emitted('execute')).toEqual([
[
{
diff --git a/spec/frontend/content_editor/extensions/attachment_spec.js b/spec/frontend/content_editor/extensions/attachment_spec.js
index d528096be34..6b804b3b4c6 100644
--- a/spec/frontend/content_editor/extensions/attachment_spec.js
+++ b/spec/frontend/content_editor/extensions/attachment_spec.js
@@ -8,7 +8,7 @@ import Video from '~/content_editor/extensions/video';
import Link from '~/content_editor/extensions/link';
import Loading from '~/content_editor/extensions/loading';
import { VARIANT_DANGER } from '~/flash';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import eventHubFactory from '~/helpers/event_hub_factory';
import { createTestEditor, createDocBuilder } from '../test_utils';
import {
@@ -132,7 +132,7 @@ describe('content_editor/extensions/attachment', () => {
};
beforeEach(() => {
- mock.onPost().reply(httpStatus.OK, successResponse);
+ mock.onPost().reply(HTTP_STATUS_OK, successResponse);
});
it('inserts a media content with src set to the encoded content and uploading true', async () => {
@@ -167,7 +167,7 @@ describe('content_editor/extensions/attachment', () => {
describe('when uploading request fails', () => {
beforeEach(() => {
- mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onPost().reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
});
it('resets the doc to original state', async () => {
@@ -209,7 +209,7 @@ describe('content_editor/extensions/attachment', () => {
};
beforeEach(() => {
- mock.onPost().reply(httpStatus.OK, successResponse);
+ mock.onPost().reply(HTTP_STATUS_OK, successResponse);
});
it('inserts a loading mark', async () => {
@@ -246,7 +246,7 @@ describe('content_editor/extensions/attachment', () => {
describe('when uploading request fails', () => {
beforeEach(() => {
- mock.onPost().reply(httpStatus.INTERNAL_SERVER_ERROR);
+ mock.onPost().reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
});
it('resets the doc to orginal state', async () => {
diff --git a/spec/frontend/content_editor/extensions/link_spec.js b/spec/frontend/content_editor/extensions/link_spec.js
index bb841357d37..ead898554d1 100644
--- a/spec/frontend/content_editor/extensions/link_spec.js
+++ b/spec/frontend/content_editor/extensions/link_spec.js
@@ -33,7 +33,7 @@ describe('content_editor/extensions/link', () => {
${'documentation](readme.md'} | ${() => p('documentation](readme.md')}
${'http://example.com '} | ${() => p(link({ href: 'http://example.com' }, 'http://example.com'))}
${'https://example.com '} | ${() => p(link({ href: 'https://example.com' }, 'https://example.com'))}
- ${'www.example.com '} | ${() => p(link({ href: 'http://www.example.com' }, 'www.example.com'))}
+ ${'www.example.com '} | ${() => p(link({ href: 'www.example.com' }, 'www.example.com'))}
${'example.com/ab.html '} | ${() => p('example.com/ab.html')}
${'https://www.google.com '} | ${() => p(link({ href: 'https://www.google.com' }, 'https://www.google.com'))}
`('with input=$input, then should insert a $insertedNode', ({ input, insertedNode }) => {
diff --git a/spec/frontend/content_editor/markdown_processing_spec.js b/spec/frontend/content_editor/markdown_processing_spec.js
deleted file mode 100644
index 3930f47289a..00000000000
--- a/spec/frontend/content_editor/markdown_processing_spec.js
+++ /dev/null
@@ -1,16 +0,0 @@
-import path from 'path';
-import { describeMarkdownProcessing } from 'jest/content_editor/markdown_processing_spec_helper';
-
-jest.mock('~/emoji');
-
-const markdownYamlPath = path.join(
- __dirname,
- '..',
- '..',
- 'fixtures',
- 'markdown',
- 'markdown_golden_master_examples.yml',
-);
-
-// See spec/fixtures/markdown/markdown_golden_master_examples.yml for documentation on how this spec works.
-describeMarkdownProcessing('CE markdown processing in ContentEditor', markdownYamlPath);
diff --git a/spec/frontend/content_editor/markdown_processing_spec_helper.js b/spec/frontend/content_editor/markdown_processing_spec_helper.js
deleted file mode 100644
index 6f10f294fb0..00000000000
--- a/spec/frontend/content_editor/markdown_processing_spec_helper.js
+++ /dev/null
@@ -1,92 +0,0 @@
-import fs from 'fs';
-import jsYaml from 'js-yaml';
-import { memoize } from 'lodash';
-import MockAdapter from 'axios-mock-adapter';
-import axios from 'axios';
-import { createContentEditor } from '~/content_editor';
-import httpStatus from '~/lib/utils/http_status';
-
-const getFocusedMarkdownExamples = memoize(
- () => process.env.FOCUSED_MARKDOWN_EXAMPLES?.split(',') || [],
-);
-
-const includeExample = ({ name }) => {
- const focusedMarkdownExamples = getFocusedMarkdownExamples();
- if (!focusedMarkdownExamples.length) {
- return true;
- }
- return focusedMarkdownExamples.includes(name);
-};
-
-const getPendingReason = (pendingStringOrObject) => {
- if (!pendingStringOrObject) {
- return null;
- }
- if (typeof pendingStringOrObject === 'string') {
- return pendingStringOrObject;
- }
- if (pendingStringOrObject.frontend) {
- return pendingStringOrObject.frontend;
- }
-
- return null;
-};
-
-const loadMarkdownApiExamples = (markdownYamlPath) => {
- const apiMarkdownYamlText = fs.readFileSync(markdownYamlPath);
- const apiMarkdownExampleObjects = jsYaml.safeLoad(apiMarkdownYamlText);
-
- return apiMarkdownExampleObjects
- .filter(includeExample)
- .map(({ name, pending, markdown, html }) => [
- name,
- { pendingReason: getPendingReason(pending), markdown, html },
- ]);
-};
-
-const testSerializesHtmlToMarkdownForElement = async ({ markdown, html }) => {
- const mock = new MockAdapter(axios);
-
- // Ignore any API requests from the suggestions plugin
- mock.onGet().reply(httpStatus.OK, []);
-
- const contentEditor = createContentEditor({
- // Overwrite renderMarkdown to always return this specific html
- renderMarkdown: () => html,
- });
-
- await contentEditor.setSerializedContent(markdown);
-
- // This serializes the ContentEditor document, which was based on the HTML, to markdown
- const serializedContent = contentEditor.getSerializedContent();
-
- // Assert that the markdown we ended up with after sending it through all the ContentEditor
- // plumbing matches the original markdown from the YAML.
- expect(serializedContent.trim()).toBe(markdown.trim());
-
- mock.restore();
-};
-
-// describeMarkdownProcesssing
-//
-// This is used to dynamically generate examples (for both CE and EE) to ensure
-// we generate same markdown that was provided to Markdown API.
-//
-// eslint-disable-next-line jest/no-export
-export const describeMarkdownProcessing = (description, markdownYamlPath) => {
- const examples = loadMarkdownApiExamples(markdownYamlPath);
-
- describe(description, () => {
- describe.each(examples)('%s', (name, { pendingReason, ...example }) => {
- const exampleName = 'correctly serializes HTML to markdown';
- if (pendingReason) {
- it.todo(`${exampleName}: ${pendingReason}`);
- return;
- }
-
- it(`${exampleName}`, async () => {
- await testSerializesHtmlToMarkdownForElement(example);
- });
- });
- });
-};
diff --git a/spec/frontend/content_editor/markdown_snapshot_spec.js b/spec/frontend/content_editor/markdown_snapshot_spec.js
index 146208bf8c7..fd64003420e 100644
--- a/spec/frontend/content_editor/markdown_snapshot_spec.js
+++ b/spec/frontend/content_editor/markdown_snapshot_spec.js
@@ -1,11 +1,96 @@
-import { describeMarkdownSnapshots } from 'jest/content_editor/markdown_snapshot_spec_helper';
-
-jest.mock('~/emoji');
-
// See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
// for documentation on this spec.
//
// NOTE: Unlike the backend markdown_snapshot_spec.rb which has a CE and EE version, there is only
// one version of this spec. This is because the frontend markdown rendering does not require EE-only
// backend features.
-describeMarkdownSnapshots('markdown example snapshots in ContentEditor');
+
+import jsYaml from 'js-yaml';
+import { pick } from 'lodash';
+import glfmExampleStatusYml from '../../../glfm_specification/input/gitlab_flavored_markdown/glfm_example_status.yml';
+import markdownYml from '../../../glfm_specification/output_example_snapshots/markdown.yml';
+import htmlYml from '../../../glfm_specification/output_example_snapshots/html.yml';
+import prosemirrorJsonYml from '../../../glfm_specification/output_example_snapshots/prosemirror_json.yml';
+import {
+ IMPLEMENTATION_ERROR_MSG,
+ renderHtmlAndJsonForAllExamples,
+} from './render_html_and_json_for_all_examples';
+
+jest.mock('~/emoji');
+
+const filterExamples = (examples) => {
+ const focusedMarkdownExamples = process.env.FOCUSED_MARKDOWN_EXAMPLES?.split(',') || [];
+ if (!focusedMarkdownExamples.length) {
+ return examples;
+ }
+ return pick(examples, focusedMarkdownExamples);
+};
+
+const loadExamples = (yaml) => {
+ const examples = jsYaml.safeLoad(yaml, {});
+ return filterExamples(examples);
+};
+
+describe('markdown example snapshots in ContentEditor', () => {
+ let actualHtmlAndJsonExamples;
+ let skipRunningSnapshotWysiwygHtmlTests;
+ let skipRunningSnapshotProsemirrorJsonTests;
+
+ const exampleStatuses = loadExamples(glfmExampleStatusYml);
+ const markdownExamples = loadExamples(markdownYml);
+ const expectedHtmlExamples = loadExamples(htmlYml);
+ const expectedProseMirrorJsonExamples = loadExamples(prosemirrorJsonYml);
+ const exampleNames = Object.keys(markdownExamples);
+
+ beforeAll(async () => {
+ return renderHtmlAndJsonForAllExamples(markdownExamples).then((examples) => {
+ actualHtmlAndJsonExamples = examples;
+ });
+ });
+
+ describe.each(exampleNames)('%s', (name) => {
+ const exampleNamePrefix = 'verifies conversion of GLFM to';
+ skipRunningSnapshotWysiwygHtmlTests =
+ exampleStatuses[name]?.skip_running_snapshot_wysiwyg_html_tests;
+ skipRunningSnapshotProsemirrorJsonTests =
+ exampleStatuses[name]?.skip_running_snapshot_prosemirror_json_tests;
+
+ const markdown = markdownExamples[name];
+
+ if (skipRunningSnapshotWysiwygHtmlTests) {
+ it.todo(`${exampleNamePrefix} HTML: ${skipRunningSnapshotWysiwygHtmlTests}`);
+ } else {
+ it(`${exampleNamePrefix} HTML`, async () => {
+ const expectedHtml = expectedHtmlExamples[name].wysiwyg;
+ const { html: actualHtml } = actualHtmlAndJsonExamples[name];
+
+ // noinspection JSUnresolvedFunction (required to avoid RubyMine type inspection warning, because custom matchers auto-imported via Jest test setup are not automatically resolved - see https://youtrack.jetbrains.com/issue/WEB-42350/matcher-for-jest-is-not-recognized-but-it-is-runable)
+ expect(actualHtml).toMatchExpectedForMarkdown(
+ 'HTML',
+ name,
+ markdown,
+ IMPLEMENTATION_ERROR_MSG,
+ expectedHtml,
+ );
+ });
+ }
+
+ if (skipRunningSnapshotProsemirrorJsonTests) {
+ it.todo(`${exampleNamePrefix} ProseMirror JSON: ${skipRunningSnapshotProsemirrorJsonTests}`);
+ } else {
+ it(`${exampleNamePrefix} ProseMirror JSON`, async () => {
+ const expectedJson = expectedProseMirrorJsonExamples[name];
+ const { json: actualJson } = actualHtmlAndJsonExamples[name];
+
+ // noinspection JSUnresolvedFunction
+ expect(actualJson).toMatchExpectedForMarkdown(
+ 'JSON',
+ name,
+ markdown,
+ IMPLEMENTATION_ERROR_MSG,
+ expectedJson,
+ );
+ });
+ }
+ });
+});
diff --git a/spec/frontend/content_editor/markdown_snapshot_spec_helper.js b/spec/frontend/content_editor/markdown_snapshot_spec_helper.js
deleted file mode 100644
index 64988c5b717..00000000000
--- a/spec/frontend/content_editor/markdown_snapshot_spec_helper.js
+++ /dev/null
@@ -1,96 +0,0 @@
-// See https://docs.gitlab.com/ee/development/gitlab_flavored_markdown/specification_guide/#markdown-snapshot-testing
-// for documentation on this spec.
-
-import jsYaml from 'js-yaml';
-import { pick } from 'lodash';
-import glfmExampleStatusYml from '../../../glfm_specification/input/gitlab_flavored_markdown/glfm_example_status.yml';
-import markdownYml from '../../../glfm_specification/output_example_snapshots/markdown.yml';
-import htmlYml from '../../../glfm_specification/output_example_snapshots/html.yml';
-import prosemirrorJsonYml from '../../../glfm_specification/output_example_snapshots/prosemirror_json.yml';
-import {
- IMPLEMENTATION_ERROR_MSG,
- renderHtmlAndJsonForAllExamples,
-} from './render_html_and_json_for_all_examples';
-
-const filterExamples = (examples) => {
- const focusedMarkdownExamples = process.env.FOCUSED_MARKDOWN_EXAMPLES?.split(',') || [];
- if (!focusedMarkdownExamples.length) {
- return examples;
- }
- return pick(examples, focusedMarkdownExamples);
-};
-
-const loadExamples = (yaml) => {
- const examples = jsYaml.safeLoad(yaml, {});
- return filterExamples(examples);
-};
-
-// eslint-disable-next-line jest/no-export
-export const describeMarkdownSnapshots = (description) => {
- let actualHtmlAndJsonExamples;
- let skipRunningSnapshotWysiwygHtmlTests;
- let skipRunningSnapshotProsemirrorJsonTests;
-
- const exampleStatuses = loadExamples(glfmExampleStatusYml);
- const markdownExamples = loadExamples(markdownYml);
- const expectedHtmlExamples = loadExamples(htmlYml);
- const expectedProseMirrorJsonExamples = loadExamples(prosemirrorJsonYml);
-
- beforeAll(async () => {
- return renderHtmlAndJsonForAllExamples(markdownExamples).then((examples) => {
- actualHtmlAndJsonExamples = examples;
- });
- });
-
- describe(description, () => {
- const exampleNames = Object.keys(markdownExamples);
-
- describe.each(exampleNames)('%s', (name) => {
- const exampleNamePrefix = 'verifies conversion of GLFM to';
- skipRunningSnapshotWysiwygHtmlTests =
- exampleStatuses[name]?.skip_running_snapshot_wysiwyg_html_tests;
- skipRunningSnapshotProsemirrorJsonTests =
- exampleStatuses[name]?.skip_running_snapshot_prosemirror_json_tests;
-
- const markdown = markdownExamples[name];
-
- if (skipRunningSnapshotWysiwygHtmlTests) {
- it.todo(`${exampleNamePrefix} HTML: ${skipRunningSnapshotWysiwygHtmlTests}`);
- } else {
- it(`${exampleNamePrefix} HTML`, async () => {
- const expectedHtml = expectedHtmlExamples[name].wysiwyg;
- const { html: actualHtml } = actualHtmlAndJsonExamples[name];
-
- // noinspection JSUnresolvedFunction (required to avoid RubyMine type inspection warning, because custom matchers auto-imported via Jest test setup are not automatically resolved - see https://youtrack.jetbrains.com/issue/WEB-42350/matcher-for-jest-is-not-recognized-but-it-is-runable)
- expect(actualHtml).toMatchExpectedForMarkdown(
- 'HTML',
- name,
- markdown,
- IMPLEMENTATION_ERROR_MSG,
- expectedHtml,
- );
- });
- }
-
- if (skipRunningSnapshotProsemirrorJsonTests) {
- it.todo(
- `${exampleNamePrefix} ProseMirror JSON: ${skipRunningSnapshotProsemirrorJsonTests}`,
- );
- } else {
- it(`${exampleNamePrefix} ProseMirror JSON`, async () => {
- const expectedJson = expectedProseMirrorJsonExamples[name];
- const { json: actualJson } = actualHtmlAndJsonExamples[name];
-
- // noinspection JSUnresolvedFunction
- expect(actualJson).toMatchExpectedForMarkdown(
- 'JSON',
- name,
- markdown,
- IMPLEMENTATION_ERROR_MSG,
- expectedJson,
- );
- });
- }
- });
- });
-};
diff --git a/spec/frontend/content_editor/services/upload_helpers_spec.js b/spec/frontend/content_editor/services/upload_helpers_spec.js
index ee9333232db..3423e4db3dc 100644
--- a/spec/frontend/content_editor/services/upload_helpers_spec.js
+++ b/spec/frontend/content_editor/services/upload_helpers_spec.js
@@ -1,7 +1,7 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { uploadFile } from '~/content_editor/services/upload_helpers';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('content_editor/services/upload_helpers', () => {
const uploadsPath = '/uploads';
@@ -26,7 +26,7 @@ describe('content_editor/services/upload_helpers', () => {
renderedMarkdown = parseHTML(renderedAttachmentLinkFixture);
mock = new MockAdapter(axios);
- mock.onPost(uploadsPath, formData).reply(httpStatus.OK, successResponse);
+ mock.onPost(uploadsPath, formData).reply(HTTP_STATUS_OK, successResponse);
renderMarkdown = jest.fn().mockResolvedValue(renderedAttachmentLinkFixture);
});
diff --git a/spec/frontend/deploy_freeze/store/mutations_spec.js b/spec/frontend/deploy_freeze/store/mutations_spec.js
index 984105d6655..a1e80ef0e6c 100644
--- a/spec/frontend/deploy_freeze/store/mutations_spec.js
+++ b/spec/frontend/deploy_freeze/store/mutations_spec.js
@@ -33,9 +33,9 @@ describe('Deploy freeze mutations', () => {
describe('RECEIVE_FREEZE_PERIODS_SUCCESS', () => {
it('should set freeze periods and format timezones from identifiers to names', () => {
const timezoneNames = {
- 'Europe/Berlin': '[UTC + 2] Berlin',
+ 'Europe/Berlin': '[UTC+2] Berlin',
'Etc/UTC': '[UTC 0] UTC',
- 'America/New_York': '[UTC - 4] Eastern Time (US & Canada)',
+ 'America/New_York': '[UTC-4] Eastern Time (US & Canada)',
};
mutations[types.RECEIVE_FREEZE_PERIODS_SUCCESS](stateCopy, freezePeriodsFixture);
diff --git a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
index 5fd61b25edc..f4d4f9cf896 100644
--- a/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
+++ b/spec/frontend/design_management/components/design_notes/design_reply_form_spec.js
@@ -5,6 +5,7 @@ import { confirmAction } from '~/lib/utils/confirm_via_gl_modal/confirm_via_gl_m
import DesignReplyForm from '~/design_management/components/design_notes/design_reply_form.vue';
jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
+jest.mock('~/autosave');
describe('Design reply form component', () => {
let wrapper;
@@ -78,12 +79,11 @@ describe('Design reply form component', () => {
createComponent({ discussionId });
await nextTick();
- // We discourage testing `wrapper.vm` properties but
- // since `autosave` library instantiates on component
- // there's no other way to test whether instantiation
- // happened correctly or not.
- expect(wrapper.vm.autosaveDiscussion).toBeInstanceOf(Autosave);
- expect(wrapper.vm.autosaveDiscussion.key).toBe(`autosave/Discussion/6/${shortDiscussionId}`);
+ expect(Autosave).toHaveBeenCalledWith(expect.any(Element), [
+ 'Discussion',
+ 6,
+ shortDiscussionId,
+ ]);
},
);
@@ -141,7 +141,7 @@ describe('Design reply form component', () => {
});
it('emits submitForm event on Comment button click', async () => {
- const autosaveResetSpy = jest.spyOn(wrapper.vm.autosaveDiscussion, 'reset');
+ const autosaveResetSpy = jest.spyOn(Autosave.prototype, 'reset');
findSubmitButton().vm.$emit('click');
@@ -151,7 +151,7 @@ describe('Design reply form component', () => {
});
it('emits submitForm event on textarea ctrl+enter keydown', async () => {
- const autosaveResetSpy = jest.spyOn(wrapper.vm.autosaveDiscussion, 'reset');
+ const autosaveResetSpy = jest.spyOn(Autosave.prototype, 'reset');
findTextarea().trigger('keydown.enter', {
ctrlKey: true,
@@ -163,7 +163,7 @@ describe('Design reply form component', () => {
});
it('emits submitForm event on textarea meta+enter keydown', async () => {
- const autosaveResetSpy = jest.spyOn(wrapper.vm.autosaveDiscussion, 'reset');
+ const autosaveResetSpy = jest.spyOn(Autosave.prototype, 'reset');
findTextarea().trigger('keydown.enter', {
metaKey: true,
@@ -178,7 +178,7 @@ describe('Design reply form component', () => {
findTextarea().setValue('test2');
await nextTick();
- expect(wrapper.emitted('input')).toEqual([['test'], ['test2']]);
+ expect(wrapper.emitted('input')).toEqual([['test2']]);
});
it('emits cancelForm event on Escape key if text was not changed', () => {
@@ -211,7 +211,7 @@ describe('Design reply form component', () => {
it('emits cancelForm event when confirmed', async () => {
confirmAction.mockResolvedValueOnce(true);
- const autosaveResetSpy = jest.spyOn(wrapper.vm.autosaveDiscussion, 'reset');
+ const autosaveResetSpy = jest.spyOn(Autosave.prototype, 'reset');
wrapper.setProps({ value: 'test3' });
await nextTick();
@@ -228,7 +228,7 @@ describe('Design reply form component', () => {
it("doesn't emit cancelForm event when not confirmed", async () => {
confirmAction.mockResolvedValueOnce(false);
- const autosaveResetSpy = jest.spyOn(wrapper.vm.autosaveDiscussion, 'reset');
+ const autosaveResetSpy = jest.spyOn(Autosave.prototype, 'reset');
wrapper.setProps({ value: 'test3' });
await nextTick();
diff --git a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
index 1acbf14db88..a4af73dd194 100644
--- a/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
+++ b/spec/frontend/design_management/components/upload/__snapshots__/design_version_dropdown_spec.js.snap
@@ -12,6 +12,7 @@ exports[`Design management design version dropdown component renders design vers
toggletext="Showing latest version"
variant="default"
>
+
<!---->
<!---->
@@ -24,6 +25,7 @@ exports[`Design management design version dropdown component renders design vers
tabindex="-1"
>
<gl-listbox-item-stub
+ data-testid="listbox-item-gid://gitlab/DesignManagement::Version/1"
ischeckcentered="true"
>
<span
@@ -66,6 +68,7 @@ exports[`Design management design version dropdown component renders design vers
</span>
</gl-listbox-item-stub>
<gl-listbox-item-stub
+ data-testid="listbox-item-gid://gitlab/DesignManagement::Version/2"
ischeckcentered="true"
>
<span
@@ -107,6 +110,10 @@ exports[`Design management design version dropdown component renders design vers
</span>
</span>
</gl-listbox-item-stub>
+
+ <!---->
+
+ <!---->
</ul>
<!---->
@@ -126,6 +133,7 @@ exports[`Design management design version dropdown component renders design vers
toggletext="Showing latest version"
variant="default"
>
+
<!---->
<!---->
@@ -138,6 +146,7 @@ exports[`Design management design version dropdown component renders design vers
tabindex="-1"
>
<gl-listbox-item-stub
+ data-testid="listbox-item-gid://gitlab/DesignManagement::Version/1"
ischeckcentered="true"
>
<span
@@ -180,6 +189,7 @@ exports[`Design management design version dropdown component renders design vers
</span>
</gl-listbox-item-stub>
<gl-listbox-item-stub
+ data-testid="listbox-item-gid://gitlab/DesignManagement::Version/2"
ischeckcentered="true"
>
<span
@@ -221,6 +231,10 @@ exports[`Design management design version dropdown component renders design vers
</span>
</span>
</gl-listbox-item-stub>
+
+ <!---->
+
+ <!---->
</ul>
<!---->
diff --git a/spec/frontend/diff_spec.js b/spec/frontend/diff_spec.js
new file mode 100644
index 00000000000..759ae32ac51
--- /dev/null
+++ b/spec/frontend/diff_spec.js
@@ -0,0 +1,72 @@
+import createEventHub from '~/helpers/event_hub_factory';
+
+import Diff from '~/diff';
+
+describe('Diff', () => {
+ describe('diff <-> tabs interactions', () => {
+ let hub;
+
+ beforeEach(() => {
+ hub = createEventHub();
+ });
+
+ describe('constructor', () => {
+ it("takes in the `mergeRequestEventHub` when it's provided", () => {
+ const diff = new Diff({ mergeRequestEventHub: hub });
+
+ expect(diff.mrHub).toBe(hub);
+ });
+
+ it('does not fatal if no event hub is provided', () => {
+ expect(() => {
+ new Diff(); /* eslint-disable-line no-new */
+ }).not.toThrow();
+ });
+
+ it("doesn't set the mrHub property if none is provided by the construction arguments", () => {
+ const diff = new Diff();
+
+ expect(diff.mrHub).toBe(undefined);
+ });
+ });
+
+ describe('viewTypeSwitch', () => {
+ const clickPath = '/path/somewhere?params=exist';
+ const jsonPath = 'http://test.host/path/somewhere.json?params=exist';
+ const simulatejQueryClick = {
+ originalEvent: {
+ target: {
+ getAttribute() {
+ return clickPath;
+ },
+ },
+ preventDefault: jest.fn(),
+ stopPropagation: jest.fn(),
+ },
+ };
+
+ it('emits the correct switch view event when called and there is an `mrHub`', async () => {
+ const diff = new Diff({ mergeRequestEventHub: hub });
+ const hubEmit = new Promise((resolve) => {
+ hub.$on('diff:switch-view-type', resolve);
+ });
+
+ diff.viewTypeSwitch(simulatejQueryClick);
+ const { source } = await hubEmit;
+
+ expect(simulatejQueryClick.originalEvent.preventDefault).toHaveBeenCalled();
+ expect(simulatejQueryClick.originalEvent.stopPropagation).toHaveBeenCalled();
+ expect(source).toBe(jsonPath);
+ });
+
+ it('is effectively a noop when there is no `mrHub`', () => {
+ const diff = new Diff();
+
+ expect(diff.mrHub).toBe(undefined);
+ expect(() => {
+ diff.viewTypeSwitch(simulatejQueryClick);
+ }).not.toThrow();
+ });
+ });
+ });
+});
diff --git a/spec/frontend/diffs/components/app_spec.js b/spec/frontend/diffs/components/app_spec.js
index 936f4744e94..c8be0bedb4c 100644
--- a/spec/frontend/diffs/components/app_spec.js
+++ b/spec/frontend/diffs/components/app_spec.js
@@ -107,6 +107,7 @@ describe('diffs/components/app', () => {
beforeEach(() => {
const fetchResolver = () => {
store.state.diffs.retrievingBatches = false;
+ store.state.notes.doneFetchingBatchDiscussions = true;
store.state.notes.discussions = 'test';
return Promise.resolve({ real_size: 100 });
};
diff --git a/spec/frontend/diffs/components/diff_file_spec.js b/spec/frontend/diffs/components/diff_file_spec.js
index 944cec77efb..ccfc36f8f16 100644
--- a/spec/frontend/diffs/components/diff_file_spec.js
+++ b/spec/frontend/diffs/components/diff_file_spec.js
@@ -18,7 +18,7 @@ import createDiffsStore from '~/diffs/store/modules';
import { diffViewerModes, diffViewerErrors } from '~/ide/constants';
import axios from '~/lib/utils/axios_utils';
import { scrollToElement } from '~/lib/utils/common_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import createNotesStore from '~/notes/stores/modules';
import { getDiffFileMock } from '../mock_data/diff_file';
import diffFileMockDataUnreadable from '../mock_data/diff_file_unreadable';
@@ -436,7 +436,7 @@ describe('DiffFile', () => {
describe('loading', () => {
it('should have loading icon while loading a collapsed diffs', async () => {
const { load_collapsed_diff_url } = store.state.diffs.diffFiles[0];
- axiosMock.onGet(load_collapsed_diff_url).reply(httpStatus.OK, getReadableFile());
+ axiosMock.onGet(load_collapsed_diff_url).reply(HTTP_STATUS_OK, getReadableFile());
makeFileAutomaticallyCollapsed(store);
wrapper.vm.requestDiff();
@@ -517,7 +517,7 @@ describe('DiffFile', () => {
viewer: { name: 'collapsed', automaticallyCollapsed: true },
};
- axiosMock.onGet(file.load_collapsed_diff_url).reply(httpStatus.OK, getReadableFile());
+ axiosMock.onGet(file.load_collapsed_diff_url).reply(HTTP_STATUS_OK, getReadableFile());
({ wrapper, store } = createComponent({ file, props: { viewDiffsFileByFile: true } }));
diff --git a/spec/frontend/diffs/components/diff_line_note_form_spec.js b/spec/frontend/diffs/components/diff_line_note_form_spec.js
index 9493dc8855e..bd0e3455872 100644
--- a/spec/frontend/diffs/components/diff_line_note_form_spec.js
+++ b/spec/frontend/diffs/components/diff_line_note_form_spec.js
@@ -101,7 +101,8 @@ describe('DiffLineNoteForm', () => {
});
it('should init autosave', () => {
- expect(Autosave).toHaveBeenCalledWith({}, [
+ // we're using shallow mount here so there's no element to pass to Autosave
+ expect(Autosave).toHaveBeenCalledWith(undefined, [
'Note',
'Issue',
98,
diff --git a/spec/frontend/dropzone_input_spec.js b/spec/frontend/dropzone_input_spec.js
index 0fe70bac6b7..0f7926ccbf9 100644
--- a/spec/frontend/dropzone_input_spec.js
+++ b/spec/frontend/dropzone_input_spec.js
@@ -7,7 +7,7 @@ import { TEST_HOST } from 'spec/test_constants';
import PasteMarkdownTable from '~/behaviors/markdown/paste_markdown_table';
import dropzoneInput from '~/dropzone_input';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
const TEST_FILE = new File([], 'somefile.jpg');
TEST_FILE.upload = {};
@@ -92,7 +92,7 @@ describe('dropzone_input', () => {
],
});
- axiosMock.onPost().reply(httpStatusCodes.OK, { link: { markdown: 'foo' } });
+ axiosMock.onPost().reply(HTTP_STATUS_OK, { link: { markdown: 'foo' } });
await waitForPromises();
expect(axiosMock.history.post[0].data.get('file').name).toHaveLength(246);
});
@@ -131,7 +131,7 @@ describe('dropzone_input', () => {
},
],
});
- axiosMock.onPost().reply(httpStatusCodes.OK, { link: { markdown: 'foo' } });
+ axiosMock.onPost().reply(HTTP_STATUS_OK, { link: { markdown: 'foo' } });
await waitForPromises();
expect(axiosMock.history.post[0].data.get('file').name).toEqual('test.png');
});
diff --git a/spec/frontend/editor/schema/ci/json_tests/positive_tests/gitlab-ci.json b/spec/frontend/editor/schema/ci/json_tests/positive_tests/gitlab-ci.json
index 666a4852957..17a1b4474b6 100644
--- a/spec/frontend/editor/schema/ci/json_tests/positive_tests/gitlab-ci.json
+++ b/spec/frontend/editor/schema/ci/json_tests/positive_tests/gitlab-ci.json
@@ -107,7 +107,6 @@
"container_scanning": "scan2.json",
"dast": "dast.json",
"license_management": "license.json",
- "performance": "performance.json",
"metrics": "metrics.txt"
}
},
@@ -160,7 +159,6 @@
"container_scanning": ["scan2.json"],
"dast": ["dast.json"],
"license_management": ["license.json"],
- "performance": ["performance.json"],
"metrics": ["metrics.txt"]
}
},
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
index 29f4a0cd76d..996a48f7bc6 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/artifacts.yml
@@ -1,5 +1,30 @@
-# invalid artifact:reports:cyclonedx
+# invalid artifact:reports:browser_performance
+browser_performance no paths:
+ artifacts:
+ reports:
+ browser_performance:
+
+## Lists (or globs) are not allowed!
+browser_performance list of string paths:
+ artifacts:
+ reports:
+ browser_performance:
+ - foo
+ - ./bar/baz
+
+browser_performance mixed list of string paths and globs:
+ artifacts:
+ reports:
+ browser_performance:
+ - ./foo
+ - "bar/*.baz"
+
+browser_performance string array:
+ artifacts:
+ reports:
+ browser_performance: ["foo", "blah"]
+# invalid artifact:reports:cyclonedx
cyclonedx no paths:
artifacts:
reports:
@@ -17,6 +42,19 @@ cyclonedx not an array or string:
- foo
- bar
+# invalid artifacts:reports:coverage_report
+coverage-report-is-string:
+ artifacts:
+ reports:
+ coverage_report: cobertura
+
+# invalid artifact:reports:performance
+# Superceded by: artifact:reports:browser_performance
+performance string path:
+ artifacts:
+ reports:
+ performance: foo
+
# invalid artifacts:when
artifacts-when-unknown:
artifacts:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml
index d74a681b23b..f4a08492574 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/negative_tests/rules.yml
@@ -12,3 +12,8 @@ wrong path declaration:
rules:
- changes:
paths: { file: 'DOCKER' }
+
+# invalid rules:if
+rules-if-empty:
+ rules:
+ - if: \ No newline at end of file
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml
index a5c9153ee13..70761a09b58 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/artifacts.yml
@@ -1,5 +1,10 @@
-# valid artifact:reports:cyclonedx
+# valid artifact:reports:browser_performance
+browser_performance string path:
+ artifacts:
+ reports:
+ browser_performance: foo
+# valid artifact:reports:cyclonedx
cyclonedx string path:
artifacts:
reports:
@@ -24,6 +29,19 @@ cylonedx mixed list of string paths and globs:
- ./foo
- "bar/*.baz"
+# valid artifacts:reports:coverage_report
+coverage-report-cobertura:
+ artifacts:
+ reports:
+ coverage_report:
+ coverage_format: cobertura
+ path: coverage/cobertura-coverage.xml
+
+coverage-report-null:
+ artifacts:
+ reports:
+ coverage_report: null
+
# valid artifacts:when
artifacts-when-on-failure:
artifacts:
diff --git a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
index ef604f707b5..5dfaf323b22 100644
--- a/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
+++ b/spec/frontend/editor/schema/ci/yaml_tests/positive_tests/rules.yml
@@ -28,3 +28,7 @@ workflow:
variables:
IS_A_FEATURE: 'true'
when: always
+
+# valid rules:null
+rules-null:
+ rules: null
diff --git a/spec/frontend/environments/environment_details/deployment_job_spec.js b/spec/frontend/environments/environment_details/deployment_job_spec.js
new file mode 100644
index 00000000000..9bb61abb293
--- /dev/null
+++ b/spec/frontend/environments/environment_details/deployment_job_spec.js
@@ -0,0 +1,49 @@
+import { GlTruncate, GlLink, GlBadge } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import DeploymentJob from '~/environments/environment_details/components/deployment_job.vue';
+
+describe('app/assets/javascripts/environments/environment_details/components/deployment_job.vue', () => {
+ const jobData = {
+ webPath: 'http://example.com',
+ label: 'example job',
+ };
+ let wrapper;
+
+ const createWrapper = ({ job }) => {
+ return mountExtended(DeploymentJob, {
+ propsData: {
+ job,
+ },
+ });
+ };
+
+ describe('when the job data exists', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ job: jobData });
+ });
+
+ it('should render a link with a correct href', () => {
+ const jobLink = wrapper.findComponent(GlLink);
+ expect(jobLink.exists()).toBe(true);
+ expect(jobLink.attributes().href).toBe(jobData.webPath);
+ });
+ it('should render a truncated label', () => {
+ const truncatedLabel = wrapper.findComponent(GlTruncate);
+ expect(truncatedLabel.exists()).toBe(true);
+ expect(truncatedLabel.props().text).toBe(jobData.label);
+ });
+ });
+
+ describe('when the job data does not exist', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ job: null });
+ });
+
+ it('should render a badge with the text "API"', () => {
+ const badge = wrapper.findComponent(GlBadge);
+ expect(badge.exists()).toBe(true);
+ expect(badge.props().variant).toBe('info');
+ expect(badge.text()).toBe('API');
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_details/deployment_status_link_spec.js b/spec/frontend/environments/environment_details/deployment_status_link_spec.js
new file mode 100644
index 00000000000..5db7740423a
--- /dev/null
+++ b/spec/frontend/environments/environment_details/deployment_status_link_spec.js
@@ -0,0 +1,57 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import DeploymentStatusLink from '~/environments/environment_details/components/deployment_status_link.vue';
+import DeploymentStatusBadge from '~/environments/components/deployment_status_badge.vue';
+
+describe('app/assets/javascripts/environments/environment_details/components/deployment_status_link.vue', () => {
+ const testData = {
+ webPath: 'http://example.com',
+ status: 'success',
+ };
+ let wrapper;
+
+ const createWrapper = (props) => {
+ return mountExtended(DeploymentStatusLink, {
+ propsData: props,
+ });
+ };
+
+ describe('when the job link exists', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({
+ deploymentJob: { webPath: testData.webPath },
+ status: testData.status,
+ });
+ });
+
+ it('should render a link with a correct href', () => {
+ const jobLink = wrapper.findByTestId('deployment-status-job-link');
+ expect(jobLink.exists()).toBe(true);
+ expect(jobLink.attributes().href).toBe(testData.webPath);
+ });
+
+ it('should render a status badge', () => {
+ const statusBadge = wrapper.findComponent(DeploymentStatusBadge);
+ expect(statusBadge.exists()).toBe(true);
+ expect(statusBadge.props().status).toBe(testData.status);
+ });
+ });
+
+ describe('when no deployment job is provided', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({
+ status: testData.status,
+ });
+ });
+
+ it('should render a link with a correct href', () => {
+ const jobLink = wrapper.findByTestId('deployment-status-job-link');
+ expect(jobLink.exists()).toBe(false);
+ });
+
+ it('should render only a status badge', () => {
+ const statusBadge = wrapper.findComponent(DeploymentStatusBadge);
+ expect(statusBadge.exists()).toBe(true);
+ expect(statusBadge.props().status).toBe(testData.status);
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_details/deployment_triggerer_spec.js b/spec/frontend/environments/environment_details/deployment_triggerer_spec.js
new file mode 100644
index 00000000000..48af82661bf
--- /dev/null
+++ b/spec/frontend/environments/environment_details/deployment_triggerer_spec.js
@@ -0,0 +1,51 @@
+import { GlAvatar, GlAvatarLink } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import DeploymentTriggerer from '~/environments/environment_details/components/deployment_triggerer.vue';
+
+describe('app/assets/javascripts/environments/environment_details/components/deployment_triggerer.vue', () => {
+ const triggererData = {
+ id: 'gid://gitlab/User/1',
+ webUrl: 'http://gdk.test:3000/root',
+ name: 'Administrator',
+ avatarUrl: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ };
+ let wrapper;
+
+ const createWrapper = ({ triggerer }) => {
+ return mountExtended(DeploymentTriggerer, {
+ propsData: {
+ triggerer,
+ },
+ });
+ };
+
+ describe('when the triggerer data exists', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ triggerer: triggererData });
+ });
+
+ it('should render an avatar link with a correct href', () => {
+ const triggererAvatarLink = wrapper.findComponent(GlAvatarLink);
+ expect(triggererAvatarLink.exists()).toBe(true);
+ expect(triggererAvatarLink.attributes().href).toBe(triggererData.webUrl);
+ });
+
+ it('should render an avatar', () => {
+ const triggererAvatar = wrapper.findComponent(GlAvatar);
+ expect(triggererAvatar.exists()).toBe(true);
+ expect(triggererAvatar.attributes().title).toBe(triggererData.name);
+ expect(triggererAvatar.props().src).toBe(triggererData.avatarUrl);
+ });
+ });
+
+ describe('when the triggerer data does not exist', () => {
+ beforeEach(() => {
+ wrapper = createWrapper({ triggerer: null });
+ });
+
+ it('should render nothing', () => {
+ const avatarLink = wrapper.findComponent(GlAvatarLink);
+ expect(avatarLink.exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_details/empty_state_spec.js b/spec/frontend/environments/environment_details/empty_state_spec.js
new file mode 100644
index 00000000000..aaf597d68ed
--- /dev/null
+++ b/spec/frontend/environments/environment_details/empty_state_spec.js
@@ -0,0 +1,39 @@
+import { GlEmptyState } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import EmptyState from '~/environments/environment_details/empty_state.vue';
+import {
+ translations,
+ environmentsHelpPagePath,
+ codeBlockPlaceholders,
+} from '~/environments/environment_details/constants';
+
+describe('~/environments/environment_details/empty_state.vue', () => {
+ let wrapper;
+
+ const createWrapper = () => {
+ return mountExtended(EmptyState);
+ };
+
+ describe('when Empty State is rendered for environment details page', () => {
+ beforeEach(() => {
+ wrapper = createWrapper();
+ });
+
+ it('should render the proper title', () => {
+ expect(wrapper.text()).toContain(translations.emptyStateTitle);
+ });
+
+ it('should render GlEmptyState component with correct props', () => {
+ const glEmptyStateComponent = wrapper.findComponent(GlEmptyState);
+ expect(glEmptyStateComponent.props().primaryButtonText).toBe(
+ translations.emptyStatePrimaryButton,
+ );
+ expect(glEmptyStateComponent.props().primaryButtonLink).toBe(environmentsHelpPagePath);
+ });
+
+ it('should render formatted description', () => {
+ expect(wrapper.text()).not.toContain(codeBlockPlaceholders.code[0]);
+ expect(wrapper.text()).not.toContain(codeBlockPlaceholders.code[1]);
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_details/page_spec.js b/spec/frontend/environments/environment_details/page_spec.js
new file mode 100644
index 00000000000..3a1a3238abe
--- /dev/null
+++ b/spec/frontend/environments/environment_details/page_spec.js
@@ -0,0 +1,69 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlLoadingIcon, GlTableLite } from '@gitlab/ui';
+import resolvedEnvironmentDetails from 'test_fixtures/graphql/environments/graphql/queries/environment_details.query.graphql.json';
+import emptyEnvironmentDetails from 'test_fixtures/graphql/environments/graphql/queries/environment_details.query.graphql.empty.json';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import EnvironmentsDetailPage from '~/environments/environment_details/index.vue';
+import EmptyState from '~/environments/environment_details/empty_state.vue';
+import getEnvironmentDetails from '~/environments/graphql/queries/environment_details.query.graphql';
+import createMockApollo from '../../__helpers__/mock_apollo_helper';
+import waitForPromises from '../../__helpers__/wait_for_promises';
+
+describe('~/environments/environment_details/page.vue', () => {
+ Vue.use(VueApollo);
+
+ let wrapper;
+
+ const defaultWrapperParameters = {
+ resolvedData: resolvedEnvironmentDetails,
+ };
+
+ const createWrapper = ({ resolvedData } = defaultWrapperParameters) => {
+ const mockApollo = createMockApollo([
+ [getEnvironmentDetails, jest.fn().mockResolvedValue(resolvedData)],
+ ]);
+
+ return mountExtended(EnvironmentsDetailPage, {
+ apolloProvider: mockApollo,
+ propsData: {
+ projectFullPath: 'gitlab-group/test-project',
+ environmentName: 'test-environment-name',
+ },
+ });
+ };
+
+ describe('when fetching data', () => {
+ it('should show a loading indicator', () => {
+ wrapper = createWrapper();
+
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
+ expect(wrapper.findComponent(GlTableLite).exists()).not.toBe(true);
+ });
+ });
+
+ describe('when data is fetched', () => {
+ describe('and there are deployments', () => {
+ beforeEach(async () => {
+ wrapper = createWrapper();
+ await waitForPromises();
+ });
+ it('should render a table when query is loaded', async () => {
+ expect(wrapper.findComponent(GlLoadingIcon).exists()).not.toBe(true);
+ expect(wrapper.findComponent(GlTableLite).exists()).toBe(true);
+ });
+ });
+
+ describe('and there are no deployments', () => {
+ beforeEach(async () => {
+ wrapper = createWrapper({ resolvedData: emptyEnvironmentDetails });
+ await waitForPromises();
+ });
+
+ it('should render empty state component', async () => {
+ expect(wrapper.findComponent(GlTableLite).exists()).toBe(false);
+ expect(wrapper.findComponent(EmptyState).exists()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_details/pagination_spec.js b/spec/frontend/environments/environment_details/pagination_spec.js
new file mode 100644
index 00000000000..107f3c3dd5e
--- /dev/null
+++ b/spec/frontend/environments/environment_details/pagination_spec.js
@@ -0,0 +1,157 @@
+import { GlKeysetPagination } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import Pagination from '~/environments/environment_details/pagination.vue';
+
+describe('~/environments/environment_details/pagniation.vue', () => {
+ const mockRouter = {
+ push: jest.fn(),
+ };
+
+ const pageInfo = {
+ startCursor: 'eyJpZCI6IjE2In0',
+ endCursor: 'eyJpZCI6IjIifQ',
+ hasNextPage: true,
+ hasPreviousPage: true,
+ };
+ let wrapper;
+
+ const createWrapper = (pageInfoProp) => {
+ return mountExtended(Pagination, {
+ propsData: {
+ pageInfo: pageInfoProp,
+ },
+ mocks: {
+ $router: mockRouter,
+ },
+ });
+ };
+
+ describe('when neither next nor previous page exists', () => {
+ beforeEach(() => {
+ const emptyPageInfo = { ...pageInfo, hasPreviousPage: false, hasNextPage: false };
+ wrapper = createWrapper(emptyPageInfo);
+ });
+
+ it('should not render pagination component', () => {
+ expect(wrapper.html()).toBe('');
+ });
+ });
+
+ describe('when Pagination is rendered for environment details page', () => {
+ beforeEach(() => {
+ wrapper = createWrapper(pageInfo);
+ });
+
+ it('should pass correct props to keyset pagination', () => {
+ const glPagination = wrapper.findComponent(GlKeysetPagination);
+ expect(glPagination.exists()).toBe(true);
+ expect(glPagination.props()).toEqual(expect.objectContaining(pageInfo));
+ });
+
+ describe.each([
+ {
+ testPageInfo: pageInfo,
+ expectedAfter: `after=${pageInfo.endCursor}`,
+ expectedBefore: `before=${pageInfo.startCursor}`,
+ },
+ {
+ testPageInfo: { ...pageInfo, hasNextPage: true, hasPreviousPage: false },
+ expectedAfter: `after=${pageInfo.endCursor}`,
+ expectedBefore: '',
+ },
+ {
+ testPageInfo: { ...pageInfo, hasNextPage: false, hasPreviousPage: true },
+ expectedAfter: '',
+ expectedBefore: `before=${pageInfo.startCursor}`,
+ },
+ ])(
+ 'button links generation for $testPageInfo',
+ ({ testPageInfo, expectedAfter, expectedBefore }) => {
+ beforeEach(() => {
+ wrapper = createWrapper(testPageInfo);
+ });
+
+ it(`should have button links defined as ${expectedAfter || 'empty'} and
+ ${expectedBefore || 'empty'}`, () => {
+ const glPagination = wrapper.findComponent(GlKeysetPagination);
+ expect(glPagination.props().prevButtonLink).toContain(expectedBefore);
+ expect(glPagination.props().nextButtonLink).toContain(expectedAfter);
+ });
+ },
+ );
+
+ describe.each([
+ {
+ clickEvent: {
+ shiftKey: false,
+ ctrlKey: false,
+ altKey: false,
+ metaKey: false,
+ },
+ isDefaultPrevented: true,
+ },
+ {
+ clickEvent: {
+ shiftKey: true,
+ ctrlKey: false,
+ altKey: false,
+ metaKey: false,
+ },
+ isDefaultPrevented: false,
+ },
+ {
+ clickEvent: {
+ shiftKey: false,
+ ctrlKey: true,
+ altKey: false,
+ metaKey: false,
+ },
+ isDefaultPrevented: false,
+ },
+ {
+ clickEvent: {
+ shiftKey: false,
+ ctrlKey: false,
+ altKey: true,
+ metaKey: false,
+ },
+ isDefaultPrevented: false,
+ },
+ {
+ clickEvent: {
+ shiftKey: false,
+ ctrlKey: false,
+ altKey: false,
+ metaKey: true,
+ },
+ isDefaultPrevented: false,
+ },
+ ])(
+ 'when a pagination button is clicked with $clickEvent',
+ ({ clickEvent, isDefaultPrevented }) => {
+ let clickEventMock;
+ beforeEach(() => {
+ clickEventMock = { ...clickEvent, preventDefault: jest.fn() };
+ });
+
+ it(`should ${isDefaultPrevented ? '' : 'not '}prevent default event`, () => {
+ const pagination = wrapper.findComponent(GlKeysetPagination);
+ pagination.vm.$emit('click', clickEventMock);
+ expect(clickEventMock.preventDefault).toHaveBeenCalledTimes(isDefaultPrevented ? 1 : 0);
+ });
+ },
+ );
+
+ it('should navigate to a correct previous page', () => {
+ const pagination = wrapper.findComponent(GlKeysetPagination);
+ pagination.vm.$emit('prev', pageInfo.startCursor);
+ expect(mockRouter.push).toHaveBeenCalledWith({ query: { before: pageInfo.startCursor } });
+ });
+
+ it('should navigate to a correct next page', () => {
+ const pagination = wrapper.findComponent(GlKeysetPagination);
+ pagination.vm.$emit('next', pageInfo.endCursor);
+ expect(mockRouter.push).toHaveBeenCalledWith({ query: { after: pageInfo.endCursor } });
+ });
+ });
+});
diff --git a/spec/frontend/environments/environment_details_page_spec.js b/spec/frontend/environments/environment_details_page_spec.js
deleted file mode 100644
index 5a02b34250f..00000000000
--- a/spec/frontend/environments/environment_details_page_spec.js
+++ /dev/null
@@ -1,50 +0,0 @@
-import Vue from 'vue';
-import VueApollo from 'vue-apollo';
-import { GlLoadingIcon, GlTableLite } from '@gitlab/ui';
-import resolvedEnvironmentDetails from 'test_fixtures/graphql/environments/graphql/queries/environment_details.query.graphql.json';
-import { mountExtended } from 'helpers/vue_test_utils_helper';
-import createMockApollo from '../__helpers__/mock_apollo_helper';
-import waitForPromises from '../__helpers__/wait_for_promises';
-import EnvironmentsDetailPage from '../../../app/assets/javascripts/environments/environment_details/index.vue';
-import getEnvironmentDetails from '../../../app/assets/javascripts/environments/graphql/queries/environment_details.query.graphql';
-
-describe('~/environments/environment_details/page.vue', () => {
- Vue.use(VueApollo);
-
- let wrapper;
-
- const createWrapper = () => {
- const mockApollo = createMockApollo([
- [getEnvironmentDetails, jest.fn().mockResolvedValue(resolvedEnvironmentDetails)],
- ]);
-
- return mountExtended(EnvironmentsDetailPage, {
- apolloProvider: mockApollo,
- propsData: {
- projectFullPath: resolvedEnvironmentDetails.data.project.fullPath,
- environmentName: resolvedEnvironmentDetails.data.project.environment.name,
- },
- });
- };
-
- describe('when fetching data', () => {
- it('should show a loading indicator', () => {
- wrapper = createWrapper();
-
- expect(wrapper.findComponent(GlLoadingIcon).exists()).toBe(true);
- expect(wrapper.findComponent(GlTableLite).exists()).not.toBe(true);
- });
- });
-
- describe('when data is fetched', () => {
- beforeEach(async () => {
- wrapper = createWrapper();
- await waitForPromises();
- });
-
- it('should render a table when query is loaded', async () => {
- expect(wrapper.findComponent(GlLoadingIcon).exists()).not.toBe(true);
- expect(wrapper.findComponent(GlTableLite).exists()).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/error_tracking/components/error_tracking_list_spec.js b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
index adb2eaaf04e..31473899145 100644
--- a/spec/frontend/error_tracking/components/error_tracking_list_spec.js
+++ b/spec/frontend/error_tracking/components/error_tracking_list_spec.js
@@ -364,7 +364,23 @@ describe('ErrorTrackingList', () => {
});
it('shows empty state', () => {
- expect(wrapper.findComponent(GlEmptyState).isVisible()).toBe(true);
+ const emptyStateComponent = wrapper.findComponent(GlEmptyState);
+ const emptyStatePrimaryDescription = emptyStateComponent.find('span', {
+ exactText: 'Monitor your errors directly in GitLab.',
+ });
+ const emptyStateSecondaryDescription = emptyStateComponent.find('span', {
+ exactText: 'Error tracking is currently in',
+ });
+ const emptyStateLinks = emptyStateComponent.findAll('a');
+ expect(emptyStateComponent.isVisible()).toBe(true);
+ expect(emptyStatePrimaryDescription.exists()).toBe(true);
+ expect(emptyStateSecondaryDescription.exists()).toBe(true);
+ expect(emptyStateLinks.at(0).attributes('href')).toBe(
+ '/help/operations/error_tracking.html#integrated-error-tracking',
+ );
+ expect(emptyStateLinks.at(1).attributes('href')).toBe(
+ 'https://about.gitlab.com/handbook/product/gitlab-the-product/#open-beta',
+ );
});
});
diff --git a/spec/frontend/error_tracking/store/list/actions_spec.js b/spec/frontend/error_tracking/store/list/actions_spec.js
index 2809bbe834e..590983bd93d 100644
--- a/spec/frontend/error_tracking/store/list/actions_spec.js
+++ b/spec/frontend/error_tracking/store/list/actions_spec.js
@@ -4,7 +4,7 @@ import * as actions from '~/error_tracking/store/list/actions';
import * as types from '~/error_tracking/store/list/mutation_types';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/flash.js');
@@ -23,7 +23,7 @@ describe('error tracking actions', () => {
it('should start polling for data', () => {
const payload = { errors: [{ id: 1 }, { id: 2 }] };
- mock.onGet().reply(httpStatusCodes.OK, payload);
+ mock.onGet().reply(HTTP_STATUS_OK, payload);
return testAction(
actions.startPolling,
{},
@@ -39,7 +39,7 @@ describe('error tracking actions', () => {
});
it('should show flash on API error', async () => {
- mock.onGet().reply(httpStatusCodes.BAD_REQUEST);
+ mock.onGet().reply(HTTP_STATUS_BAD_REQUEST);
await testAction(
actions.startPolling,
diff --git a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
index c9095441d41..8653ebac20d 100644
--- a/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
+++ b/spec/frontend/error_tracking_settings/components/project_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import { pick, clone } from 'lodash';
@@ -42,7 +42,7 @@ describe('error tracking settings project dropdown', () => {
describe('empty project list', () => {
it('renders the dropdown', () => {
expect(wrapper.find('#project-dropdown').exists()).toBe(true);
- expect(wrapper.findComponent(GlDropdown).exists()).toBe(true);
+ expect(wrapper.findComponent(GlCollapsibleListbox).exists()).toBe(true);
});
it('shows helper text', () => {
@@ -57,8 +57,10 @@ describe('error tracking settings project dropdown', () => {
});
it('does not contain any dropdown items', () => {
- expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(false);
- expect(wrapper.findComponent(GlDropdown).props('text')).toBe('No projects available');
+ expect(wrapper.findComponent(GlCollapsibleListbox).props('items')).toEqual([]);
+ expect(wrapper.findComponent(GlCollapsibleListbox).props('toggleText')).toBe(
+ 'No projects available',
+ );
});
});
@@ -71,12 +73,12 @@ describe('error tracking settings project dropdown', () => {
it('renders the dropdown', () => {
expect(wrapper.find('#project-dropdown').exists()).toBe(true);
- expect(wrapper.findComponent(GlDropdown).exists()).toBe(true);
+ expect(wrapper.findComponent(GlCollapsibleListbox).exists()).toBe(true);
});
it('contains a number of dropdown items', () => {
- expect(wrapper.findComponent(GlDropdownItem).exists()).toBe(true);
- expect(wrapper.findAllComponents(GlDropdownItem).length).toBe(2);
+ expect(wrapper.findComponent(GlCollapsibleListbox).exists()).toBe(true);
+ expect(wrapper.findComponent(GlCollapsibleListbox).props('items').length).toBe(2);
});
});
diff --git a/spec/frontend/error_tracking_settings/mock.js b/spec/frontend/error_tracking_settings/mock.js
index b2d7a912518..96d93540ba5 100644
--- a/spec/frontend/error_tracking_settings/mock.js
+++ b/spec/frontend/error_tracking_settings/mock.js
@@ -5,12 +5,14 @@ const defaultStore = createStore();
export const projectList = [
{
+ id: '1',
name: 'name',
slug: 'slug',
organizationName: 'organizationName',
organizationSlug: 'organizationSlug',
},
{
+ id: '2',
name: 'name2',
slug: 'slug2',
organizationName: 'organizationName2',
@@ -19,6 +21,7 @@ export const projectList = [
];
export const staleProject = {
+ id: '3',
name: 'staleName',
slug: 'staleSlug',
organizationName: 'staleOrganizationName',
@@ -26,6 +29,7 @@ export const staleProject = {
};
export const normalizedProject = {
+ id: '5',
name: 'name',
slug: 'slug',
organizationName: 'organization_name',
@@ -33,6 +37,7 @@ export const normalizedProject = {
};
export const sampleBackendProject = {
+ id: '5',
name: normalizedProject.name,
slug: normalizedProject.slug,
organization_name: normalizedProject.organizationName,
@@ -45,6 +50,7 @@ export const sampleFrontendSettings = {
integrated: false,
token: 'token',
selectedProject: {
+ id: '5',
slug: normalizedProject.slug,
name: normalizedProject.name,
organizationName: normalizedProject.organizationName,
@@ -58,6 +64,7 @@ export const transformedSettings = {
integrated: false,
token: 'token',
project: {
+ sentry_project_id: '5',
slug: normalizedProject.slug,
name: normalizedProject.name,
organization_name: normalizedProject.organizationName,
diff --git a/spec/frontend/feature_flags/components/environments_dropdown_spec.js b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
index 2b9710c9085..a4738fed37e 100644
--- a/spec/frontend/feature_flags/components/environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/environments_dropdown_spec.js
@@ -6,7 +6,7 @@ import waitForPromises from 'helpers/wait_for_promises';
import { TEST_HOST } from 'spec/test_constants';
import EnvironmentsDropdown from '~/feature_flags/components/environments_dropdown.vue';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
describe('Feature flags > Environments dropdown', () => {
let wrapper;
@@ -51,7 +51,7 @@ describe('Feature flags > Environments dropdown', () => {
describe('on focus', () => {
it('sets results with the received data', async () => {
- mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(HTTP_STATUS_OK, results);
factory();
findEnvironmentSearchInput().vm.$emit('focus');
await waitForPromises();
@@ -63,7 +63,7 @@ describe('Feature flags > Environments dropdown', () => {
describe('on keyup', () => {
it('sets results with the received data', async () => {
- mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(HTTP_STATUS_OK, results);
factory();
findEnvironmentSearchInput().vm.$emit('keyup');
await waitForPromises();
@@ -76,7 +76,7 @@ describe('Feature flags > Environments dropdown', () => {
describe('on input change', () => {
describe('on success', () => {
beforeEach(async () => {
- mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, results);
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(HTTP_STATUS_OK, results);
factory();
findEnvironmentSearchInput().vm.$emit('focus');
findEnvironmentSearchInput().vm.$emit('input', 'production');
@@ -128,7 +128,7 @@ describe('Feature flags > Environments dropdown', () => {
describe('on click create button', () => {
beforeEach(async () => {
- mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(httpStatusCodes.OK, []);
+ mock.onGet(`${TEST_HOST}/environments.json'`).replyOnce(HTTP_STATUS_OK, []);
factory();
findEnvironmentSearchInput().vm.$emit('focus');
findEnvironmentSearchInput().vm.$emit('input', 'production');
diff --git a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
index 1c0c444c296..b71cdf78207 100644
--- a/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
+++ b/spec/frontend/feature_flags/components/new_environments_dropdown_spec.js
@@ -4,7 +4,7 @@ import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import NewEnvironmentsDropdown from '~/feature_flags/components/new_environments_dropdown.vue';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
const TEST_HOST = '/test';
const TEST_SEARCH = 'production';
@@ -74,7 +74,7 @@ describe('New Environments Dropdown', () => {
describe('with results', () => {
let items;
beforeEach(() => {
- axiosMock.onGet(TEST_HOST).reply(httpStatusCodes.OK, ['prod', 'production']);
+ axiosMock.onGet(TEST_HOST).reply(HTTP_STATUS_OK, ['prod', 'production']);
wrapper.findComponent(GlSearchBoxByType).vm.$emit('focus');
wrapper.findComponent(GlSearchBoxByType).vm.$emit('input', 'prod');
return axios.waitForAll().then(() => {
diff --git a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
index d82081041d9..4d5cb26810e 100644
--- a/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
+++ b/spec/frontend/feature_highlight/feature_highlight_helper_spec.js
@@ -2,7 +2,7 @@ import MockAdapter from 'axios-mock-adapter';
import { dismiss } from '~/feature_highlight/feature_highlight_helper';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes, { HTTP_STATUS_CREATED } from '~/lib/utils/http_status';
+import { HTTP_STATUS_CREATED, HTTP_STATUS_INTERNAL_SERVER_ERROR } from '~/lib/utils/http_status';
jest.mock('~/flash');
@@ -11,7 +11,6 @@ describe('feature highlight helper', () => {
let mockAxios;
const endpoint = '/-/callouts/dismiss';
const highlightId = '123';
- const { INTERNAL_SERVER_ERROR } = httpStatusCodes;
beforeEach(() => {
mockAxios = new MockAdapter(axios);
@@ -28,7 +27,9 @@ describe('feature highlight helper', () => {
});
it('triggers flash when dismiss request fails', async () => {
- mockAxios.onPost(endpoint, { feature_name: highlightId }).replyOnce(INTERNAL_SERVER_ERROR);
+ mockAxios
+ .onPost(endpoint, { feature_name: highlightId })
+ .replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
await dismiss(endpoint, highlightId);
diff --git a/spec/frontend/fixtures/environments.rb b/spec/frontend/fixtures/environments.rb
index 3ca5b50ac9c..77e2a96b328 100644
--- a/spec/frontend/fixtures/environments.rb
+++ b/spec/frontend/fixtures/environments.rb
@@ -18,36 +18,55 @@ RSpec.describe 'Environments (JavaScript fixtures)', feature_category: :environm
let(:user) { create(:user) }
let(:role) { :developer }
- let_it_be(:deployment) do
- create(:deployment, :success, environment: environment, deployable: nil)
- end
- let_it_be(:deployment_success) do
- create(:deployment, :success, environment: environment, deployable: build)
- end
+ describe GraphQL::Query, type: :request do
+ environment_details_query_path = 'environments/graphql/queries/environment_details.query.graphql'
- let_it_be(:deployment_failed) do
- create(:deployment, :failed, environment: environment, deployable: build)
- end
+ context 'with no deployments' do
+ it "graphql/#{environment_details_query_path}.empty.json" do
+ query = get_graphql_query_as_string(environment_details_query_path)
+ puts project.full_path
+ puts environment.name
+ post_graphql(query, current_user: admin,
+ variables:
+ {
+ projectFullPath: project.full_path,
+ environmentName: environment.name,
+ pageSize: 10
+ })
+ expect_graphql_errors_to_be_empty
+ end
+ end
- let_it_be(:deployment_running) do
- create(:deployment, :running, environment: environment, deployable: build)
- end
+ context 'with deployments' do
+ let_it_be(:deployment) do
+ create(:deployment, :success, environment: environment, deployable: nil)
+ end
- describe GraphQL::Query, type: :request do
- environment_details_query_path = 'environments/graphql/queries/environment_details.query.graphql'
+ let_it_be(:deployment_success) do
+ create(:deployment, :success, environment: environment, deployable: build)
+ end
+
+ let_it_be(:deployment_failed) do
+ create(:deployment, :failed, environment: environment, deployable: build)
+ end
+
+ let_it_be(:deployment_running) do
+ create(:deployment, :running, environment: environment, deployable: build)
+ end
+
+ it "graphql/#{environment_details_query_path}.json" do
+ query = get_graphql_query_as_string(environment_details_query_path)
- it "graphql/#{environment_details_query_path}.json" do
- query = get_graphql_query_as_string(environment_details_query_path)
-
- post_graphql(query, current_user: admin,
- variables:
- {
- projectFullPath: project.full_path,
- environmentName: environment.name,
- pageSize: 10
- })
- expect_graphql_errors_to_be_empty
+ post_graphql(query, current_user: admin,
+ variables:
+ {
+ projectFullPath: project.full_path,
+ environmentName: environment.name,
+ pageSize: 10
+ })
+ expect_graphql_errors_to_be_empty
+ end
end
end
end
diff --git a/spec/frontend/fixtures/issues.rb b/spec/frontend/fixtures/issues.rb
index bc5ece20032..1e6baf30a76 100644
--- a/spec/frontend/fixtures/issues.rb
+++ b/spec/frontend/fixtures/issues.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::IssuesController, '(JavaScript fixtures)', type: :controller do
+RSpec.describe Projects::IssuesController, '(JavaScript fixtures)', :with_license, type: :controller do
include JavaScriptFixturesHelpers
let(:user) { create(:user, feed_token: 'feedtoken:coldfeed') }
diff --git a/spec/frontend/fixtures/projects.rb b/spec/frontend/fixtures/projects.rb
index 101ba203a57..2ccf2c0392f 100644
--- a/spec/frontend/fixtures/projects.rb
+++ b/spec/frontend/fixtures/projects.rb
@@ -66,4 +66,36 @@ RSpec.describe 'Projects (JavaScript fixtures)', type: :controller do
end
end
end
+
+ describe 'Storage', feature_category: :subscription_cost_management do
+ describe GraphQL::Query, type: :request do
+ include GraphqlHelpers
+ context 'project storage statistics query' do
+ before do
+ project.statistics.update!(
+ repository_size: 3_900_000,
+ lfs_objects_size: 4_800_000,
+ build_artifacts_size: 400_000,
+ pipeline_artifacts_size: 400_000,
+ container_registry_size: 3_900_000,
+ wiki_size: 300_000,
+ packages_size: 3_800_000,
+ uploads_size: 900_000
+ )
+ end
+
+ base_input_path = 'usage_quotas/storage/queries/'
+ base_output_path = 'graphql/usage_quotas/storage/'
+ query_name = 'project_storage.query.graphql'
+
+ it "#{base_output_path}#{query_name}.json" do
+ query = get_graphql_query_as_string("#{base_input_path}#{query_name}")
+
+ post_graphql(query, current_user: user, variables: { fullPath: project.full_path })
+
+ expect_graphql_errors_to_be_empty
+ end
+ end
+ end
+ end
end
diff --git a/spec/frontend/fixtures/runner_instructions.rb b/spec/frontend/fixtures/runner_instructions.rb
index 90a01c37479..5659b8023e9 100644
--- a/spec/frontend/fixtures/runner_instructions.rb
+++ b/spec/frontend/fixtures/runner_instructions.rb
@@ -7,7 +7,7 @@ RSpec.describe 'Runner Instructions (JavaScript fixtures)', feature_category: :r
include JavaScriptFixturesHelpers
include GraphqlHelpers
- query_path = 'vue_shared/components/runner_instructions/graphql/queries'
+ query_path = 'vue_shared/components/runner_instructions/graphql'
describe GraphQL::Query do
describe 'get_runner_platforms.query.graphql', type: :request do
diff --git a/spec/frontend/flash_spec.js b/spec/frontend/flash_spec.js
index ade36cd1637..2f0a52a9884 100644
--- a/spec/frontend/flash_spec.js
+++ b/spec/frontend/flash_spec.js
@@ -1,9 +1,8 @@
import * as Sentry from '@sentry/browser';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
-import createFlash, {
+import {
hideFlash,
addDismissFlashClickListener,
- FLASH_TYPES,
FLASH_CLOSED_EVENT,
createAlert,
VARIANT_WARNING,
@@ -340,207 +339,6 @@ describe('Flash', () => {
});
});
- describe('createFlash', () => {
- const message = 'test';
- const fadeTransition = false;
- const addBodyClass = true;
- const defaultParams = {
- message,
- actionConfig: null,
- fadeTransition,
- addBodyClass,
- };
-
- describe('no flash-container', () => {
- it('does not add to the DOM', () => {
- const flashEl = createFlash({ message });
-
- expect(flashEl).toBeNull();
-
- expect(document.querySelector('.flash-alert')).toBeNull();
- });
- });
-
- describe('with flash-container', () => {
- beforeEach(() => {
- setHTMLFixture(
- '<div class="content-wrapper js-content-wrapper"><div class="flash-container"></div></div>',
- );
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('adds flash alert element into the document by default', () => {
- createFlash({ ...defaultParams });
-
- expect(document.querySelector('.flash-container .flash-alert')).not.toBeNull();
- expect(document.body.className).toContain('flash-shown');
- });
-
- it('adds flash of a warning type', () => {
- createFlash({ ...defaultParams, type: FLASH_TYPES.WARNING });
-
- expect(document.querySelector('.flash-container .flash-warning')).not.toBeNull();
- expect(document.body.className).toContain('flash-shown');
- });
-
- it('escapes text', () => {
- createFlash({ ...defaultParams, message: '<script>alert("a")</script>' });
-
- const html = document.querySelector('.flash-text').innerHTML;
-
- expect(html).toContain('&lt;script&gt;alert("a")&lt;/script&gt;');
- expect(html).not.toContain('<script>alert("a")</script>');
- });
-
- it('adds flash into specified parent', () => {
- createFlash({ ...defaultParams, parent: document.querySelector('.content-wrapper') });
-
- expect(document.querySelector('.content-wrapper .flash-alert')).not.toBeNull();
- expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
- });
-
- it('adds container classes when inside content-wrapper', () => {
- createFlash(defaultParams);
-
- expect(document.querySelector('.flash-text').className).toBe('flash-text');
- expect(document.querySelector('.content-wrapper').innerText.trim()).toEqual(message);
- });
-
- it('does not add container when outside of content-wrapper', () => {
- document.querySelector('.content-wrapper').className = 'js-content-wrapper';
- createFlash(defaultParams);
-
- expect(document.querySelector('.flash-text').className.trim()).toContain('flash-text');
- });
-
- it('removes element after clicking', () => {
- createFlash({ ...defaultParams });
-
- document.querySelector('.flash-alert .js-close-icon').click();
-
- expect(document.querySelector('.flash-alert')).toBeNull();
-
- expect(document.body.className).not.toContain('flash-shown');
- });
-
- it('does not capture error using Sentry', () => {
- createFlash({ ...defaultParams, captureError: false, error: new Error('Error!') });
-
- expect(Sentry.captureException).not.toHaveBeenCalled();
- });
-
- it('captures error using Sentry', () => {
- createFlash({ ...defaultParams, captureError: true, error: new Error('Error!') });
-
- expect(Sentry.captureException).toHaveBeenCalledWith(expect.any(Error));
- expect(Sentry.captureException).toHaveBeenCalledWith(
- expect.objectContaining({
- message: 'Error!',
- }),
- );
- });
-
- describe('with actionConfig', () => {
- const findFlashAction = () => document.querySelector('.flash-container .flash-action');
-
- it('adds action link', () => {
- createFlash({
- ...defaultParams,
- actionConfig: {
- title: 'test',
- },
- });
-
- expect(findFlashAction()).not.toBeNull();
- });
-
- it('creates link with href', () => {
- createFlash({
- ...defaultParams,
- actionConfig: {
- href: 'testing',
- title: 'test',
- },
- });
-
- const action = findFlashAction();
-
- expect(action.href).toBe(`${window.location}testing`);
- expect(action.textContent.trim()).toBe('test');
- });
-
- it('uses hash as href when no href is present', () => {
- createFlash({
- ...defaultParams,
- actionConfig: {
- title: 'test',
- },
- });
-
- expect(findFlashAction().href).toBe(`${window.location}#`);
- });
-
- it('adds role when no href is present', () => {
- createFlash({
- ...defaultParams,
- actionConfig: {
- title: 'test',
- },
- });
-
- expect(findFlashAction().getAttribute('role')).toBe('button');
- });
-
- it('escapes the title text', () => {
- createFlash({
- ...defaultParams,
- actionConfig: {
- title: '<script>alert("a")</script>',
- },
- });
-
- const html = findFlashAction().innerHTML;
-
- expect(html).toContain('&lt;script&gt;alert("a")&lt;/script&gt;');
- expect(html).not.toContain('<script>alert("a")</script>');
- });
-
- it('calls actionConfig clickHandler on click', () => {
- const clickHandler = jest.fn();
-
- createFlash({
- ...defaultParams,
- actionConfig: {
- title: 'test',
- clickHandler,
- },
- });
-
- findFlashAction().click();
-
- expect(clickHandler).toHaveBeenCalled();
- });
- });
-
- describe('additional behavior', () => {
- describe('close', () => {
- it('clicks the close icon', () => {
- const flash = createFlash({ ...defaultParams });
- const close = document.querySelector('.flash-alert .js-close-icon');
-
- jest.spyOn(close, 'click');
- flash.close();
-
- expect(close.click.mock.calls.length).toBe(1);
- });
- });
- });
- });
- });
-
describe('addDismissFlashClickListener', () => {
let el;
diff --git a/spec/frontend/frequent_items/components/app_spec.js b/spec/frontend/frequent_items/components/app_spec.js
index c201bbf4af2..b1e87aca63d 100644
--- a/spec/frontend/frequent_items/components/app_spec.js
+++ b/spec/frontend/frequent_items/components/app_spec.js
@@ -1,3 +1,4 @@
+import { GlButton, GlIcon } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
@@ -103,6 +104,7 @@ describe('Frequent Items App Component', () => {
expect(loading.exists()).toBe(true);
expect(loading.find('[aria-label="Loading projects"]').exists()).toBe(true);
+ expect(findSectionHeader().exists()).toBe(false);
});
it('should render frequent projects list header', () => {
@@ -112,25 +114,6 @@ describe('Frequent Items App Component', () => {
expect(sectionHeader.text()).toBe('Frequently visited');
});
- it('should render frequent projects list', async () => {
- const expectedResult = getTopFrequentItems(mockFrequentProjects);
- localStorage.setItem(TEST_STORAGE_KEY, JSON.stringify(mockFrequentProjects));
-
- expect(findFrequentItems().length).toBe(1);
-
- triggerDropdownOpen();
- await nextTick();
-
- expect(findFrequentItems().length).toBe(expectedResult.length);
- expect(findFrequentItemsList().props()).toEqual({
- items: expectedResult,
- namespace: TEST_NAMESPACE,
- hasSearchQuery: false,
- isFetchFailed: false,
- matcher: '',
- });
- });
-
it('should render searched projects list', async () => {
mock.onGet(/\/api\/v4\/projects.json(.*)$/).replyOnce(200, mockSearchedProjects.data);
@@ -164,6 +147,47 @@ describe('Frequent Items App Component', () => {
}),
);
});
+
+ describe('with frequent items list', () => {
+ const expectedResult = getTopFrequentItems(mockFrequentProjects);
+
+ beforeEach(async () => {
+ localStorage.setItem(TEST_STORAGE_KEY, JSON.stringify(mockFrequentProjects));
+ triggerDropdownOpen();
+ await nextTick();
+ });
+
+ it('should render edit button within header', () => {
+ const itemEditButton = findSectionHeader().findComponent(GlButton);
+
+ expect(itemEditButton.exists()).toBe(true);
+ expect(itemEditButton.attributes('title')).toBe('Toggle edit mode');
+ expect(itemEditButton.findComponent(GlIcon).props('name')).toBe('pencil');
+ });
+
+ it('should render frequent projects list', () => {
+ expect(findFrequentItems().length).toBe(expectedResult.length);
+ expect(findFrequentItemsList().props()).toEqual({
+ items: expectedResult,
+ namespace: TEST_NAMESPACE,
+ hasSearchQuery: false,
+ isFetchFailed: false,
+ isItemRemovalFailed: false,
+ matcher: '',
+ });
+ });
+
+ it('dispatches action `toggleItemsListEditablity` when edit button is clicked', async () => {
+ const itemEditButton = findSectionHeader().findComponent(GlButton);
+ itemEditButton.vm.$emit('click');
+
+ await nextTick();
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ `${TEST_VUEX_MODULE}/toggleItemsListEditablity`,
+ );
+ });
+ });
});
describe('with searchClass', () => {
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
index e6673fa78ec..4f2badf869d 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_item_spec.js
@@ -1,5 +1,5 @@
-import { GlButton } from '@gitlab/ui';
-import Vue from 'vue';
+import { GlIcon } from '@gitlab/ui';
+import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import { trimText } from 'helpers/text_helper';
@@ -12,6 +12,7 @@ import { mockProject } from '../mock_data';
Vue.use(Vuex);
describe('FrequentItemsListItemComponent', () => {
+ const TEST_VUEX_MODULE = 'frequentProjects';
let wrapper;
let trackingSpy;
let store;
@@ -20,11 +21,18 @@ describe('FrequentItemsListItemComponent', () => {
const findAvatar = () => wrapper.findComponent(ProjectAvatar);
const findAllTitles = () => wrapper.findAllByTestId('frequent-items-item-title');
const findNamespace = () => wrapper.findByTestId('frequent-items-item-namespace');
- const findAllButtons = () => wrapper.findAllComponents(GlButton);
+ const findAllFrequentItems = () => wrapper.findAllByTestId('frequent-item-link');
const findAllNamespace = () => wrapper.findAllByTestId('frequent-items-item-namespace');
const findAllAvatars = () => wrapper.findAllComponents(ProjectAvatar);
const findAllMetadataContainers = () =>
wrapper.findAllByTestId('frequent-items-item-metadata-container');
+ const findRemoveButton = () => wrapper.findByTestId('item-remove');
+
+ const toggleItemsListEditablity = async () => {
+ store.dispatch(`${TEST_VUEX_MODULE}/toggleItemsListEditablity`);
+
+ await nextTick();
+ };
const createComponent = (props = {}) => {
wrapper = shallowMountExtended(frequentItemsListItemComponent, {
@@ -38,7 +46,7 @@ describe('FrequentItemsListItemComponent', () => {
...props,
},
provide: {
- vuexModule: 'frequentProjects',
+ vuexModule: TEST_VUEX_MODULE,
},
});
};
@@ -102,7 +110,7 @@ describe('FrequentItemsListItemComponent', () => {
it.each`
name | selector | expected
- ${'button'} | ${findAllButtons} | ${1}
+ ${'list item'} | ${findAllFrequentItems} | ${1}
${'avatar container'} | ${findAllAvatars} | ${1}
${'metadata container'} | ${findAllMetadataContainers} | ${1}
${'title'} | ${findAllTitles} | ${1}
@@ -111,8 +119,37 @@ describe('FrequentItemsListItemComponent', () => {
expect(selector()).toHaveLength(expected);
});
+ it('renders remove button within item when `isItemsListEditable` is true', async () => {
+ await toggleItemsListEditablity();
+
+ const removeButton = findRemoveButton();
+ expect(removeButton.exists()).toBe(true);
+ expect(removeButton.attributes('title')).toBe('Remove');
+ expect(removeButton.findComponent(GlIcon).props('name')).toBe('close');
+ });
+
+ it('dispatches action `removeFrequentItem` when remove button is clicked', async () => {
+ await toggleItemsListEditablity();
+
+ jest.spyOn(store, 'dispatch');
+
+ const removeButton = findRemoveButton();
+ removeButton.vm.$emit(
+ 'click',
+ { stopPropagation: jest.fn(), preventDefault: jest.fn() },
+ mockProject.id,
+ );
+
+ await nextTick();
+
+ expect(store.dispatch).toHaveBeenCalledWith(
+ `${TEST_VUEX_MODULE}/removeFrequentItem`,
+ mockProject.id,
+ );
+ });
+
it('tracks when item link is clicked', () => {
- const link = wrapper.findComponent(GlButton);
+ const link = wrapper.findByTestId('frequent-item-link');
link.vm.$emit('click');
diff --git a/spec/frontend/frequent_items/components/frequent_items_list_spec.js b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
index 9f08a432a3d..d024925f62b 100644
--- a/spec/frontend/frequent_items/components/frequent_items_list_spec.js
+++ b/spec/frontend/frequent_items/components/frequent_items_list_spec.js
@@ -18,6 +18,7 @@ describe('FrequentItemsListComponent', () => {
namespace: 'projects',
items: mockFrequentProjects,
isFetchFailed: false,
+ isItemRemovalFailed: false,
hasSearchQuery: false,
matcher: 'lab',
...props,
@@ -51,22 +52,34 @@ describe('FrequentItemsListComponent', () => {
});
describe('fetched item messages', () => {
- it('should return appropriate empty list message based on value of `localStorageFailed` prop with projects', async () => {
+ it('should show default empty list message', async () => {
createComponent({
- isFetchFailed: true,
+ items: [],
});
- expect(wrapper.vm.listEmptyMessage).toBe(
- 'This feature requires browser localStorage support',
+ expect(wrapper.findByTestId('frequent-items-list-empty').text()).toContain(
+ 'Projects you visit often will appear here',
);
-
- wrapper.setProps({
- isFetchFailed: false,
- });
- await nextTick();
-
- expect(wrapper.vm.listEmptyMessage).toBe('Projects you visit often will appear here');
});
+
+ it.each`
+ isFetchFailed | isItemRemovalFailed
+ ${true} | ${false}
+ ${false} | ${true}
+ `(
+ 'should show failure message when `isFetchFailed` is $isFetchFailed or `isItemRemovalFailed` is $isItemRemovalFailed',
+ ({ isFetchFailed, isItemRemovalFailed }) => {
+ createComponent({
+ items: [],
+ isFetchFailed,
+ isItemRemovalFailed,
+ });
+
+ expect(wrapper.findByTestId('frequent-items-list-empty').text()).toContain(
+ 'This feature requires browser localStorage support',
+ );
+ },
+ );
});
describe('searched item messages', () => {
diff --git a/spec/frontend/frequent_items/store/actions_spec.js b/spec/frontend/frequent_items/store/actions_spec.js
index 3fc3eaf52a2..4f998cc26da 100644
--- a/spec/frontend/frequent_items/store/actions_spec.js
+++ b/spec/frontend/frequent_items/store/actions_spec.js
@@ -5,6 +5,7 @@ import * as types from '~/frequent_items/store/mutation_types';
import state from '~/frequent_items/store/state';
import AccessorUtilities from '~/lib/utils/accessor';
import axios from '~/lib/utils/axios_utils';
+import { useLocalStorageSpy } from 'helpers/local_storage_helper';
import {
mockNamespace,
mockStorageKey,
@@ -13,6 +14,7 @@ import {
} from '../mock_data';
describe('Frequent Items Dropdown Store Actions', () => {
+ useLocalStorageSpy();
let mockedState;
let mock;
@@ -52,6 +54,18 @@ describe('Frequent Items Dropdown Store Actions', () => {
});
});
+ describe('toggleItemsListEditablity', () => {
+ it('should toggle items list editablity', () => {
+ return testAction(
+ actions.toggleItemsListEditablity,
+ null,
+ mockedState,
+ [{ type: types.TOGGLE_ITEMS_LIST_EDITABILITY }],
+ [],
+ );
+ });
+ });
+
describe('requestFrequentItems', () => {
it('should request frequent items', () => {
return testAction(
@@ -211,4 +225,77 @@ describe('Frequent Items Dropdown Store Actions', () => {
);
});
});
+
+ describe('removeFrequentItemSuccess', () => {
+ it('should remove frequent item on success', () => {
+ return testAction(
+ actions.removeFrequentItemSuccess,
+ { itemId: 1 },
+ mockedState,
+ [
+ {
+ type: types.RECEIVE_REMOVE_FREQUENT_ITEM_SUCCESS,
+ payload: { itemId: 1 },
+ },
+ ],
+ [],
+ );
+ });
+ });
+
+ describe('removeFrequentItemError', () => {
+ it('should should not remove frequent item on failure', () => {
+ return testAction(
+ actions.removeFrequentItemError,
+ null,
+ mockedState,
+ [{ type: types.RECEIVE_REMOVE_FREQUENT_ITEM_ERROR }],
+ [],
+ );
+ });
+ });
+
+ describe('removeFrequentItem', () => {
+ beforeEach(() => {
+ mockedState.items = [...mockFrequentProjects];
+ window.localStorage.setItem(mockStorageKey, JSON.stringify(mockFrequentProjects));
+ });
+
+ it('should remove provided itemId from localStorage', () => {
+ jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(true);
+
+ actions.removeFrequentItem(
+ { commit: jest.fn(), dispatch: jest.fn(), state: mockedState },
+ mockFrequentProjects[0].id,
+ );
+
+ expect(window.localStorage.getItem(mockStorageKey)).toBe(
+ JSON.stringify(mockFrequentProjects.slice(1)), // First item was removed
+ );
+ });
+
+ it('should dispatch `removeFrequentItemSuccess` on localStorage update success', () => {
+ jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(true);
+
+ return testAction(
+ actions.removeFrequentItem,
+ mockFrequentProjects[0].id,
+ mockedState,
+ [],
+ [{ type: 'removeFrequentItemSuccess', payload: mockFrequentProjects[0].id }],
+ );
+ });
+
+ it('should dispatch `removeFrequentItemError` on localStorage update failure', () => {
+ jest.spyOn(AccessorUtilities, 'canUseLocalStorage').mockReturnValue(false);
+
+ return testAction(
+ actions.removeFrequentItem,
+ mockFrequentProjects[0].id,
+ mockedState,
+ [],
+ [{ type: 'removeFrequentItemError' }],
+ );
+ });
+ });
});
diff --git a/spec/frontend/frequent_items/store/mutations_spec.js b/spec/frontend/frequent_items/store/mutations_spec.js
index e593c9fae58..1e1878c3377 100644
--- a/spec/frontend/frequent_items/store/mutations_spec.js
+++ b/spec/frontend/frequent_items/store/mutations_spec.js
@@ -44,6 +44,18 @@ describe('Frequent Items dropdown mutations', () => {
});
});
+ describe('TOGGLE_ITEMS_LIST_EDITABILITY', () => {
+ it('should toggle items list editablity', () => {
+ mutations[types.TOGGLE_ITEMS_LIST_EDITABILITY](stateCopy);
+
+ expect(stateCopy.isItemsListEditable).toEqual(true);
+
+ mutations[types.TOGGLE_ITEMS_LIST_EDITABILITY](stateCopy);
+
+ expect(stateCopy.isItemsListEditable).toEqual(false);
+ });
+ });
+
describe('REQUEST_FREQUENT_ITEMS', () => {
it('should set view states when requesting frequent items', () => {
mutations[types.REQUEST_FREQUENT_ITEMS](stateCopy);
@@ -114,4 +126,27 @@ describe('Frequent Items dropdown mutations', () => {
expect(stateCopy.isFetchFailed).toEqual(true);
});
});
+
+ describe('RECEIVE_REMOVE_FREQUENT_ITEM_SUCCESS', () => {
+ it('should remove item with provided itemId from the items', () => {
+ stateCopy.isItemRemovalFailed = true;
+ stateCopy.items = mockFrequentProjects;
+
+ mutations[types.RECEIVE_REMOVE_FREQUENT_ITEM_SUCCESS](stateCopy, mockFrequentProjects[0].id);
+
+ expect(stateCopy.items).toHaveLength(mockFrequentProjects.length - 1);
+ expect(stateCopy.items).toEqual([...mockFrequentProjects.slice(1)]);
+ expect(stateCopy.isItemRemovalFailed).toBe(false);
+ });
+ });
+
+ describe('RECEIVE_REMOVE_FREQUENT_ITEM_ERROR', () => {
+ it('should remove item with provided itemId from the items', () => {
+ stateCopy.isItemRemovalFailed = false;
+
+ mutations[types.RECEIVE_REMOVE_FREQUENT_ITEM_ERROR](stateCopy);
+
+ expect(stateCopy.isItemRemovalFailed).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/gfm_auto_complete/mock_data.js b/spec/frontend/gfm_auto_complete/mock_data.js
index 9c5a9d7ef3d..d58ccaf0f39 100644
--- a/spec/frontend/gfm_auto_complete/mock_data.js
+++ b/spec/frontend/gfm_auto_complete/mock_data.js
@@ -37,8 +37,8 @@ export const crmContactsMock = [
{
id: 1,
email: 'contact.1@email.com',
- firstName: 'Contact',
- lastName: 'One',
+ first_name: 'Contact',
+ last_name: 'One',
search: 'contact.1@email.com',
state: 'active',
set: false,
@@ -46,8 +46,8 @@ export const crmContactsMock = [
{
id: 2,
email: 'contact.2@email.com',
- firstName: 'Contact',
- lastName: 'Two',
+ first_name: 'Contact',
+ last_name: 'Two',
search: 'contact.2@email.com',
state: 'active',
set: false,
@@ -55,8 +55,8 @@ export const crmContactsMock = [
{
id: 3,
email: 'contact.3@email.com',
- firstName: 'Contact',
- lastName: 'Three',
+ first_name: 'Contact',
+ last_name: 'Three',
search: 'contact.3@email.com',
state: 'inactive',
set: false,
@@ -64,8 +64,8 @@ export const crmContactsMock = [
{
id: 4,
email: 'contact.4@email.com',
- firstName: 'Contact',
- lastName: 'Four',
+ first_name: 'Contact',
+ last_name: 'Four',
search: 'contact.4@email.com',
state: 'inactive',
set: true,
@@ -73,8 +73,8 @@ export const crmContactsMock = [
{
id: 5,
email: 'contact.5@email.com',
- firstName: 'Contact',
- lastName: 'Five',
+ first_name: 'Contact',
+ last_name: 'Five',
search: 'contact.5@email.com',
state: 'active',
set: true,
@@ -82,8 +82,8 @@ export const crmContactsMock = [
{
id: 5,
email: 'contact.6@email.com',
- firstName: 'Contact',
- lastName: 'Six',
+ first_name: 'Contact',
+ last_name: 'Six',
search: 'contact.6@email.com',
state: 'active',
set: undefined, // On purpose
diff --git a/spec/frontend/gfm_auto_complete_spec.js b/spec/frontend/gfm_auto_complete_spec.js
index eeef92d4183..cc2dc084e47 100644
--- a/spec/frontend/gfm_auto_complete_spec.js
+++ b/spec/frontend/gfm_auto_complete_spec.js
@@ -4,6 +4,7 @@ import $ from 'jquery';
import labelsFixture from 'test_fixtures/autocomplete_sources/labels.json';
import { setHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
import GfmAutoComplete, {
+ escape,
membersBeforeSave,
highlighter,
CONTACT_STATE_ACTIVE,
@@ -21,6 +22,20 @@ import {
crmContactsMock,
} from 'ee_else_ce_jest/gfm_auto_complete/mock_data';
+describe('escape', () => {
+ it.each`
+ xssPayload | escapedPayload
+ ${'<script>alert(1)</script>'} | ${'&lt;script&gt;alert(1)&lt;/script&gt;'}
+ ${'%3Cscript%3E alert(1) %3C%2Fscript%3E'} | ${'&lt;script&gt; alert(1) &lt;/script&gt;'}
+ ${'%253Cscript%253E alert(1) %253C%252Fscript%253E'} | ${'&lt;script&gt; alert(1) &lt;/script&gt;'}
+ `(
+ 'escapes the input string correctly accounting for multiple encoding',
+ ({ xssPayload, escapedPayload }) => {
+ expect(escape(xssPayload)).toBe(escapedPayload);
+ },
+ );
+});
+
describe('GfmAutoComplete', () => {
const fetchDataMock = { fetchData: jest.fn() };
let gfmAutoCompleteCallbacks = GfmAutoComplete.prototype.getDefaultCallbacks.call(fetchDataMock);
@@ -590,7 +605,7 @@ describe('GfmAutoComplete', () => {
id: 5,
title: '${search}<script>oh no $', // eslint-disable-line no-template-curly-in-string
}),
- ).toBe('<li><small>5</small> &dollar;{search}&lt;script&gt;oh no &dollar;</li>');
+ ).toBe('<li><small>5</small> &amp;dollar;{search}&lt;script&gt;oh no &amp;dollar;</li>');
});
});
@@ -636,7 +651,7 @@ describe('GfmAutoComplete', () => {
availabilityStatus: '',
}),
).toBe(
- '<li>IMG my-group <small>&dollar;{search}&lt;script&gt;oh no &dollar;</small> <i class="icon"/></li>',
+ '<li>IMG my-group <small>&amp;dollar;{search}&lt;script&gt;oh no &amp;dollar;</small> <i class="icon"/></li>',
);
});
@@ -813,7 +828,7 @@ describe('GfmAutoComplete', () => {
const title = '${search}<script>oh no $'; // eslint-disable-line no-template-curly-in-string
expect(GfmAutoComplete.Labels.templateFunction(color, title)).toBe(
- '<li><span class="dropdown-label-box" style="background: #123456"></span> &dollar;{search}&lt;script&gt;oh no &dollar;</li>',
+ '<li><span class="dropdown-label-box" style="background: #123456"></span> &amp;dollar;{search}&lt;script&gt;oh no &amp;dollar;</li>',
);
});
});
@@ -868,7 +883,7 @@ describe('GfmAutoComplete', () => {
const title = '${search}<script>oh no $'; // eslint-disable-line no-template-curly-in-string
expect(GfmAutoComplete.Milestones.templateFunction(title, expired)).toBe(
- '<li>&dollar;{search}&lt;script&gt;oh no &dollar;</li>',
+ '<li>&amp;dollar;{search}&lt;script&gt;oh no &amp;dollar;</li>',
);
});
});
@@ -925,7 +940,9 @@ describe('GfmAutoComplete', () => {
const expectContacts = ({ input, output }) => {
triggerDropdown(input);
- expect(getDropdownItems()).toEqual(output.map((contact) => contact.email));
+ expect(getDropdownItems()).toEqual(
+ output.map((contact) => `${contact.first_name} ${contact.last_name} ${contact.email}`),
+ );
};
describe('with no contacts assigned', () => {
diff --git a/spec/frontend/group_settings/components/shared_runners_form_spec.js b/spec/frontend/group_settings/components/shared_runners_form_spec.js
index 5282c0ed839..85475c749b0 100644
--- a/spec/frontend/group_settings/components/shared_runners_form_spec.js
+++ b/spec/frontend/group_settings/components/shared_runners_form_spec.js
@@ -10,7 +10,7 @@ jest.mock('~/api/groups_api');
const GROUP_ID = '99';
const RUNNER_ENABLED_VALUE = 'enabled';
const RUNNER_DISABLED_VALUE = 'disabled_and_unoverridable';
-const RUNNER_ALLOW_OVERRIDE_VALUE = 'disabled_with_override';
+const RUNNER_ALLOW_OVERRIDE_VALUE = 'disabled_and_overridable';
describe('group_settings/components/shared_runners_form', () => {
let wrapper;
diff --git a/spec/frontend/ide/components/repo_editor_spec.js b/spec/frontend/ide/components/repo_editor_spec.js
index 211fee31a9c..9092d73571b 100644
--- a/spec/frontend/ide/components/repo_editor_spec.js
+++ b/spec/frontend/ide/components/repo_editor_spec.js
@@ -122,7 +122,7 @@ describe('RepoEditor', () => {
vm.$once('editorSetup', resolve);
});
- const createComponent = async ({ state = {}, activeFile = dummyFile.text, flags = {} } = {}) => {
+ const createComponent = async ({ state = {}, activeFile = dummyFile.text } = {}) => {
const store = prepareStore(state, activeFile);
wrapper = shallowMount(RepoEditor, {
store,
@@ -132,9 +132,6 @@ describe('RepoEditor', () => {
mocks: {
ContentViewer,
},
- provide: {
- glFeatures: flags,
- },
});
await waitForPromises();
vm = wrapper.vm;
@@ -196,12 +193,8 @@ describe('RepoEditor', () => {
});
describe('schema registration for .gitlab-ci.yml', () => {
- const setup = async (activeFile, flagIsOn = true) => {
- await createComponent({
- flags: {
- schemaLinting: flagIsOn,
- },
- });
+ const setup = async (activeFile) => {
+ await createComponent();
vm.editor.registerCiSchema = jest.fn();
if (activeFile) {
wrapper.setProps({ file: activeFile });
@@ -210,15 +203,13 @@ describe('RepoEditor', () => {
await nextTick();
};
it.each`
- flagIsOn | activeFile | shouldUseExtension | desc
- ${false} | ${dummyFile.markdown} | ${false} | ${`file is not CI config; should NOT`}
- ${true} | ${dummyFile.markdown} | ${false} | ${`file is not CI config; should NOT`}
- ${false} | ${dummyFile.ciConfig} | ${false} | ${`file is CI config; should NOT`}
- ${true} | ${dummyFile.ciConfig} | ${true} | ${`file is CI config; should`}
+ activeFile | shouldUseExtension | desc
+ ${dummyFile.markdown} | ${false} | ${`file is not CI config; should NOT`}
+ ${dummyFile.ciConfig} | ${true} | ${`file is CI config; should`}
`(
- 'when the flag is "$flagIsOn", $desc use extension',
- async ({ flagIsOn, activeFile, shouldUseExtension }) => {
- await setup(activeFile, flagIsOn);
+ 'when the activeFile is "$activeFile", $desc use extension',
+ async ({ activeFile, shouldUseExtension }) => {
+ await setup(activeFile);
if (shouldUseExtension) {
expect(applyExtensionSpy).toHaveBeenCalledWith({
diff --git a/spec/frontend/ide/lib/gitlab_web_ide/get_base_config_spec.js b/spec/frontend/ide/lib/gitlab_web_ide/get_base_config_spec.js
index 4b4e96f3b41..ed67a0948e4 100644
--- a/spec/frontend/ide/lib/gitlab_web_ide/get_base_config_spec.js
+++ b/spec/frontend/ide/lib/gitlab_web_ide/get_base_config_spec.js
@@ -3,20 +3,32 @@ import { TEST_HOST } from 'helpers/test_constants';
const TEST_GITLAB_WEB_IDE_PUBLIC_PATH = 'test/gitlab-web-ide/public/path';
const TEST_GITLAB_URL = 'https://gdk.test/';
+const TEST_RELATIVE_URL_ROOT = '/gl_rel_root';
describe('~/ide/lib/gitlab_web_ide/get_base_config', () => {
- it('returns base properties for @gitlab/web-ide config', () => {
+ beforeEach(() => {
// why: add trailing "/" to test that it gets removed
process.env.GITLAB_WEB_IDE_PUBLIC_PATH = `${TEST_GITLAB_WEB_IDE_PUBLIC_PATH}/`;
window.gon.gitlab_url = TEST_GITLAB_URL;
+ window.gon.relative_url_root = '';
+ });
- // act
+ it('with default, returns base properties for @gitlab/web-ide config', () => {
const actual = getBaseConfig();
- // asset
expect(actual).toEqual({
baseUrl: `${TEST_HOST}/${TEST_GITLAB_WEB_IDE_PUBLIC_PATH}`,
gitlabUrl: TEST_GITLAB_URL,
});
});
+
+ it('with relative_url_root, returns baseUrl with relative url root', () => {
+ window.gon.relative_url_root = TEST_RELATIVE_URL_ROOT;
+
+ const actual = getBaseConfig();
+
+ expect(actual).toMatchObject({
+ baseUrl: `${TEST_HOST}${TEST_RELATIVE_URL_ROOT}/${TEST_GITLAB_WEB_IDE_PUBLIC_PATH}`,
+ });
+ });
});
diff --git a/spec/frontend/ide/stores/modules/commit/actions_spec.js b/spec/frontend/ide/stores/modules/commit/actions_spec.js
index 4e8467de759..8601e13f7ca 100644
--- a/spec/frontend/ide/stores/modules/commit/actions_spec.js
+++ b/spec/frontend/ide/stores/modules/commit/actions_spec.js
@@ -366,17 +366,38 @@ describe('IDE commit module actions', () => {
});
describe('merge request', () => {
- it('redirects to new merge request page', async () => {
- jest.spyOn(eventHub, '$on').mockImplementation();
+ it.each`
+ branchName | targetBranchName | branchNameInURL | targetBranchInURL
+ ${'foo'} | ${'main'} | ${'foo'} | ${'main'}
+ ${'foo#bar'} | ${'main'} | ${'foo%23bar'} | ${'main'}
+ ${'foo#bar'} | ${'not#so#main'} | ${'foo%23bar'} | ${'not%23so%23main'}
+ `(
+ 'redirects to the correct new MR page when new branch is "$branchName" and target branch is "$targetBranchName"',
+ async ({ branchName, targetBranchName, branchNameInURL, targetBranchInURL }) => {
+ Object.assign(store.state.projects.abcproject, {
+ branches: {
+ [targetBranchName]: {
+ name: targetBranchName,
+ workingReference: '1',
+ commit: {
+ id: TEST_COMMIT_SHA,
+ },
+ can_push: true,
+ },
+ },
+ });
+ store.state.currentBranchId = targetBranchName;
+ store.state.commit.newBranchName = branchName;
- store.state.commit.commitAction = COMMIT_TO_NEW_BRANCH;
- store.state.commit.shouldCreateMR = true;
+ store.state.commit.commitAction = COMMIT_TO_NEW_BRANCH;
+ store.state.commit.shouldCreateMR = true;
- await store.dispatch('commit/commitChanges');
- expect(visitUrl).toHaveBeenCalledWith(
- `webUrl/-/merge_requests/new?merge_request[source_branch]=${store.getters['commit/placeholderBranchName']}&merge_request[target_branch]=main&nav_source=webide`,
- );
- });
+ await store.dispatch('commit/commitChanges');
+ expect(visitUrl).toHaveBeenCalledWith(
+ `webUrl/-/merge_requests/new?merge_request[source_branch]=${branchNameInURL}&merge_request[target_branch]=${targetBranchInURL}&nav_source=webide`,
+ );
+ },
+ );
it('does not redirect to new merge request page when shouldCreateMR is not checked', async () => {
jest.spyOn(eventHub, '$on').mockImplementation();
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js
index 8d21088bcaf..09be1e333b3 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/checks_spec.js
@@ -10,7 +10,11 @@ import {
import * as messages from '~/ide/stores/modules/terminal/messages';
import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types';
import axios from '~/lib/utils/axios_utils';
-import httpStatus, { HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_FORBIDDEN,
+ HTTP_STATUS_NOT_FOUND,
+ HTTP_STATUS_UNPROCESSABLE_ENTITY,
+} from '~/lib/utils/http_status';
const TEST_PROJECT_PATH = 'lorem/root';
const TEST_BRANCH_ID = 'main';
@@ -102,7 +106,7 @@ describe('IDE store terminal check actions', () => {
);
});
- [httpStatus.FORBIDDEN, httpStatus.NOT_FOUND].forEach((status) => {
+ [HTTP_STATUS_FORBIDDEN, HTTP_STATUS_NOT_FOUND].forEach((status) => {
it(`hides tab, when status is ${status}`, () => {
const payload = { response: { status } };
diff --git a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
index df365442c67..9fd5f1a38d7 100644
--- a/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/actions/session_controls_spec.js
@@ -6,7 +6,7 @@ import { STARTING, PENDING, STOPPING, STOPPED } from '~/ide/stores/modules/termi
import * as messages from '~/ide/stores/modules/terminal/messages';
import * as mutationTypes from '~/ide/stores/modules/terminal/mutation_types';
import axios from '~/lib/utils/axios_utils';
-import httpStatus, { HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
+import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
jest.mock('~/flash');
@@ -285,7 +285,7 @@ describe('IDE store terminal session controls actions', () => {
);
});
- [httpStatus.NOT_FOUND, HTTP_STATUS_UNPROCESSABLE_ENTITY].forEach((status) => {
+ [HTTP_STATUS_NOT_FOUND, HTTP_STATUS_UNPROCESSABLE_ENTITY].forEach((status) => {
it(`dispatches request and startSession on ${status}`, () => {
mock
.onPost(state.session.retryPath, { branch: rootState.currentBranchId, format: 'json' })
diff --git a/spec/frontend/ide/stores/modules/terminal/messages_spec.js b/spec/frontend/ide/stores/modules/terminal/messages_spec.js
index 2a802d6b4af..f99496a4b98 100644
--- a/spec/frontend/ide/stores/modules/terminal/messages_spec.js
+++ b/spec/frontend/ide/stores/modules/terminal/messages_spec.js
@@ -1,7 +1,11 @@
import { escape } from 'lodash';
import { TEST_HOST } from 'spec/test_constants';
import * as messages from '~/ide/stores/modules/terminal/messages';
-import httpStatus, { HTTP_STATUS_UNPROCESSABLE_ENTITY } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_FORBIDDEN,
+ HTTP_STATUS_NOT_FOUND,
+ HTTP_STATUS_UNPROCESSABLE_ENTITY,
+} from '~/lib/utils/http_status';
import { sprintf } from '~/locale';
const TEST_HELP_URL = `${TEST_HOST}/help`;
@@ -26,13 +30,13 @@ describe('IDE store terminal messages', () => {
});
it('returns permission error, with status FORBIDDEN', () => {
- const result = messages.configCheckError(httpStatus.FORBIDDEN, TEST_HELP_URL);
+ const result = messages.configCheckError(HTTP_STATUS_FORBIDDEN, TEST_HELP_URL);
expect(result).toBe(messages.ERROR_PERMISSION);
});
it('returns unexpected error, with unexpected status', () => {
- const result = messages.configCheckError(httpStatus.NOT_FOUND, TEST_HELP_URL);
+ const result = messages.configCheckError(HTTP_STATUS_NOT_FOUND, TEST_HELP_URL);
expect(result).toBe(messages.UNEXPECTED_ERROR_CONFIG);
});
diff --git a/spec/frontend/import_entities/components/import_status_spec.js b/spec/frontend/import_entities/components/import_status_spec.js
index 686a21e3923..56c4ed827d7 100644
--- a/spec/frontend/import_entities/components/import_status_spec.js
+++ b/spec/frontend/import_entities/components/import_status_spec.js
@@ -18,6 +18,7 @@ describe('Import entities status component', () => {
describe('success status', () => {
const getStatusText = () => wrapper.findComponent(GlBadge).text();
+ const getStatusIcon = () => wrapper.findComponent(GlBadge).props('icon');
it('displays finished status as complete when no stats are provided', () => {
createComponent({
@@ -38,6 +39,7 @@ describe('Import entities status component', () => {
});
expect(getStatusText()).toBe('Complete');
+ expect(getStatusIcon()).toBe('status-success');
});
it('displays finished status as partial when all stats items were processed', () => {
@@ -52,6 +54,7 @@ describe('Import entities status component', () => {
});
expect(getStatusText()).toBe('Partial import');
+ expect(getStatusIcon()).toBe('status-alert');
});
});
@@ -105,9 +108,9 @@ describe('Import entities status component', () => {
const getStatusIcon = () =>
wrapper.findComponent(GlAccordionItem).findComponent(GlIcon).props().name;
- const createComponentWithStats = ({ fetched, imported }) => {
+ const createComponentWithStats = ({ fetched, imported, status = 'created' }) => {
createComponent({
- status: 'created',
+ status,
stats: {
fetched: { label: fetched },
imported: { label: imported },
@@ -124,7 +127,7 @@ describe('Import entities status component', () => {
expect(getStatusIcon()).toBe('status-scheduled');
});
- it('displays running status when imported is not equal to fetched', () => {
+ it('displays running status when imported is not equal to fetched and import is not finished', () => {
createComponentWithStats({
fetched: 100,
imported: 10,
@@ -133,6 +136,16 @@ describe('Import entities status component', () => {
expect(getStatusIcon()).toBe('status-running');
});
+ it('displays alert status when imported is not equal to fetched and import is finished', () => {
+ createComponentWithStats({
+ fetched: 100,
+ imported: 10,
+ status: STATUSES.FINISHED,
+ });
+
+ expect(getStatusIcon()).toBe('status-alert');
+ });
+
it('displays success status when imported is equal to fetched', () => {
createComponentWithStats({
fetched: 100,
diff --git a/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js b/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
index cd56f573011..da7fb4e060d 100644
--- a/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_actions_cell_spec.js
@@ -1,4 +1,4 @@
-import { GlButton, GlIcon } from '@gitlab/ui';
+import { GlButton, GlIcon, GlDropdown, GlDropdownItem } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import ImportActionsCell from '~/import_entities/import_groups/components/import_actions_cell.vue';
@@ -8,6 +8,7 @@ describe('import actions cell', () => {
const createComponent = (props) => {
wrapper = shallowMount(ImportActionsCell, {
propsData: {
+ isProjectsImportEnabled: false,
isFinished: false,
isAvailableForImport: false,
isInvalid: false,
@@ -78,4 +79,39 @@ describe('import actions cell', () => {
expect(wrapper.emitted('import-group')).toHaveLength(1);
});
+
+ describe.each`
+ isFinished | expectedAction
+ ${false} | ${'Import'}
+ ${true} | ${'Re-import'}
+ `(
+ 'when import projects is enabled, group is available for import and finish status is $status',
+ ({ isFinished, expectedAction }) => {
+ beforeEach(() => {
+ createComponent({ isProjectsImportEnabled: true, isAvailableForImport: true, isFinished });
+ });
+
+ it('render import dropdown', () => {
+ const dropdown = wrapper.findComponent(GlDropdown);
+ expect(dropdown.props('text')).toBe(`${expectedAction} with projects`);
+ expect(dropdown.findComponent(GlDropdownItem).text()).toBe(
+ `${expectedAction} without projects`,
+ );
+ });
+
+ it('request migrate projects by default', async () => {
+ const dropdown = wrapper.findComponent(GlDropdown);
+ dropdown.vm.$emit('click');
+
+ expect(wrapper.emitted('import-group')[0]).toStrictEqual([{ migrateProjects: true }]);
+ });
+
+ it('request not to migrate projects via dropdown option', async () => {
+ const dropdown = wrapper.findComponent(GlDropdown);
+ dropdown.findComponent(GlDropdownItem).vm.$emit('click');
+
+ expect(wrapper.emitted('import-group')[0]).toStrictEqual([{ migrateProjects: false }]);
+ });
+ },
+ );
});
diff --git a/spec/frontend/import_entities/import_groups/components/import_table_spec.js b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
index f7a97f22d44..bd79e20e698 100644
--- a/spec/frontend/import_entities/import_groups/components/import_table_spec.js
+++ b/spec/frontend/import_entities/import_groups/components/import_table_spec.js
@@ -7,7 +7,7 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import { createAlert } from '~/flash';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST } from '~/lib/utils/http_status';
import axios from '~/lib/utils/axios_utils';
import { STATUSES } from '~/import_entities/constants';
import { i18n, ROOT_NAMESPACE } from '~/import_entities/import_groups/constants';
@@ -49,6 +49,8 @@ describe('import table', () => {
const findImportSelectedButton = () =>
wrapper.findAll('button').wrappers.find((w) => w.text() === 'Import selected');
+ const findImportSelectedDropdown = () =>
+ wrapper.findAll('.gl-dropdown').wrappers.find((w) => w.text().includes('Import with projects'));
const findImportButtons = () =>
wrapper.findAll('button').wrappers.filter((w) => w.text() === 'Import');
const findPaginationDropdown = () => wrapper.find('[data-testid="page-size"]');
@@ -64,7 +66,12 @@ describe('import table', () => {
const selectRow = (idx) =>
wrapper.findAll('tbody td input[type=checkbox]').at(idx).setChecked(true);
- const createComponent = ({ bulkImportSourceGroups, importGroups, defaultTargetNamespace }) => {
+ const createComponent = ({
+ bulkImportSourceGroups,
+ importGroups,
+ defaultTargetNamespace,
+ glFeatures = {},
+ }) => {
apolloProvider = createMockApollo(
[
[
@@ -93,6 +100,9 @@ describe('import table', () => {
directives: {
GlTooltip: createMockDirective(),
},
+ provide: {
+ glFeatures,
+ },
apolloProvider,
});
};
@@ -258,7 +268,7 @@ describe('import table', () => {
},
});
- axiosMock.onPost('/import/bulk_imports.json').reply(httpStatus.BAD_REQUEST);
+ axiosMock.onPost('/import/bulk_imports.json').reply(HTTP_STATUS_BAD_REQUEST);
await waitForPromises();
await findImportButtons()[0].trigger('click');
@@ -530,16 +540,16 @@ describe('import table', () => {
mutation: importGroupsMutation,
variables: {
importRequests: [
- {
+ expect.objectContaining({
targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
newName: NEW_GROUPS[0].lastImportTarget.newName,
sourceGroupId: NEW_GROUPS[0].id,
- },
- {
+ }),
+ expect.objectContaining({
targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
newName: NEW_GROUPS[1].lastImportTarget.newName,
sourceGroupId: NEW_GROUPS[1].id,
- },
+ }),
],
},
});
@@ -610,4 +620,83 @@ describe('import table', () => {
expect(wrapper.findComponent(GlAlert).exists()).toBe(false);
});
});
+
+ describe('when import projects is enabled', () => {
+ const NEW_GROUPS = [
+ generateFakeEntry({ id: 1, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 2, status: STATUSES.NONE }),
+ generateFakeEntry({ id: 3, status: STATUSES.FINISHED }),
+ ];
+
+ beforeEach(() => {
+ createComponent({
+ bulkImportSourceGroups: () => ({
+ nodes: NEW_GROUPS,
+ pageInfo: FAKE_PAGE_INFO,
+ versionValidation: FAKE_VERSION_VALIDATION,
+ }),
+ glFeatures: {
+ bulkImportProjects: true,
+ },
+ });
+ jest.spyOn(apolloProvider.defaultClient, 'mutate');
+ return waitForPromises();
+ });
+
+ it('renders import all dropdown', async () => {
+ expect(findImportSelectedDropdown().exists()).toBe(true);
+ });
+
+ it('includes migrateProjects: true when dropdown is clicked', async () => {
+ await selectRow(0);
+ await selectRow(1);
+ await nextTick();
+ await findImportSelectedDropdown().find('button').trigger('click');
+ expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
+ mutation: importGroupsMutation,
+ variables: {
+ importRequests: [
+ expect.objectContaining({
+ targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
+ newName: NEW_GROUPS[0].lastImportTarget.newName,
+ sourceGroupId: NEW_GROUPS[0].id,
+ migrateProjects: true,
+ }),
+ expect.objectContaining({
+ targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
+ newName: NEW_GROUPS[1].lastImportTarget.newName,
+ sourceGroupId: NEW_GROUPS[1].id,
+ migrateProjects: true,
+ }),
+ ],
+ },
+ });
+ });
+
+ it('includes migrateProjects: false when dropdown item is clicked', async () => {
+ await selectRow(0);
+ await selectRow(1);
+ await nextTick();
+ await findImportSelectedDropdown().find('.gl-dropdown-item button').trigger('click');
+ expect(apolloProvider.defaultClient.mutate).toHaveBeenCalledWith({
+ mutation: importGroupsMutation,
+ variables: {
+ importRequests: [
+ expect.objectContaining({
+ targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
+ newName: NEW_GROUPS[0].lastImportTarget.newName,
+ sourceGroupId: NEW_GROUPS[0].id,
+ migrateProjects: false,
+ }),
+ expect.objectContaining({
+ targetNamespace: AVAILABLE_NAMESPACES[0].fullPath,
+ newName: NEW_GROUPS[1].lastImportTarget.newName,
+ sourceGroupId: NEW_GROUPS[1].id,
+ migrateProjects: false,
+ }),
+ ],
+ },
+ });
+ });
+ });
});
diff --git a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
index adc4ebcffb8..ce111a0c10c 100644
--- a/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
+++ b/spec/frontend/import_entities/import_groups/graphql/client_factory_spec.js
@@ -13,7 +13,7 @@ import updateImportStatusMutation from '~/import_entities/import_groups/graphql/
import bulkImportSourceGroupsQuery from '~/import_entities/import_groups/graphql/queries/bulk_import_source_groups.query.graphql';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { statusEndpointFixture } from './fixtures';
jest.mock('~/flash');
@@ -52,7 +52,7 @@ describe('Bulk import resolvers', () => {
axiosMockAdapter = new MockAdapter(axios);
client = createClient();
- axiosMockAdapter.onGet(FAKE_ENDPOINTS.status).reply(httpStatus.OK, statusEndpointFixture);
+ axiosMockAdapter.onGet(FAKE_ENDPOINTS.status).reply(HTTP_STATUS_OK, statusEndpointFixture);
client.watchQuery({ query: bulkImportSourceGroupsQuery }).subscribe(({ data }) => {
results = data.bulkImportSourceGroups.nodes;
});
@@ -143,7 +143,7 @@ describe('Bulk import resolvers', () => {
it('sets import status to CREATED for successful groups when request completes', async () => {
axiosMockAdapter
.onPost(FAKE_ENDPOINTS.createBulkImport)
- .reply(httpStatus.OK, [{ success: true, id: 1 }]);
+ .reply(HTTP_STATUS_OK, [{ success: true, id: 1 }]);
await client.mutate({
mutation: importGroupsMutation,
@@ -163,7 +163,7 @@ describe('Bulk import resolvers', () => {
});
it('sets import status to CREATED for successful groups when request completes with legacy response', async () => {
- axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(httpStatus.OK, { id: 1 });
+ axiosMockAdapter.onPost(FAKE_ENDPOINTS.createBulkImport).reply(HTTP_STATUS_OK, { id: 1 });
await client.mutate({
mutation: importGroupsMutation,
@@ -186,7 +186,7 @@ describe('Bulk import resolvers', () => {
const FAKE_ERROR_MESSAGE = 'foo';
axiosMockAdapter
.onPost(FAKE_ENDPOINTS.createBulkImport)
- .reply(httpStatus.OK, [{ success: false, id: 1, message: FAKE_ERROR_MESSAGE }]);
+ .reply(HTTP_STATUS_OK, [{ success: false, id: 1, message: FAKE_ERROR_MESSAGE }]);
await client.mutate({
mutation: importGroupsMutation,
@@ -210,7 +210,7 @@ describe('Bulk import resolvers', () => {
it('updateImportStatus updates status', async () => {
axiosMockAdapter
.onPost(FAKE_ENDPOINTS.createBulkImport)
- .reply(httpStatus.OK, [{ success: true, id: 1 }]);
+ .reply(HTTP_STATUS_OK, [{ success: true, id: 1 }]);
const NEW_STATUS = 'dummy';
await client.mutate({
diff --git a/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js b/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
index 08c407cc4b4..1d1b285c1b6 100644
--- a/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
+++ b/spec/frontend/incidents_settings/components/incidents_settings_service_spec.js
@@ -3,7 +3,7 @@ import { createAlert } from '~/flash';
import { ERROR_MSG } from '~/incidents_settings/constants';
import IncidentsSettingsService from '~/incidents_settings/incidents_settings_service';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { refreshCurrentPage } from '~/lib/utils/url_utility';
jest.mock('~/flash');
@@ -26,7 +26,7 @@ describe('IncidentsSettingsService', () => {
describe('updateSettings', () => {
it('should refresh the page on successful update', () => {
- mock.onPatch().reply(httpStatusCodes.OK);
+ mock.onPatch().reply(HTTP_STATUS_OK);
return service.updateSettings({}).then(() => {
expect(refreshCurrentPage).toHaveBeenCalled();
@@ -34,7 +34,7 @@ describe('IncidentsSettingsService', () => {
});
it('should display a flash message on update error', () => {
- mock.onPatch().reply(httpStatusCodes.BAD_REQUEST);
+ mock.onPatch().reply(HTTP_STATUS_BAD_REQUEST);
return service.updateSettings({}).then(() => {
expect(createAlert).toHaveBeenCalledWith({
@@ -47,7 +47,7 @@ describe('IncidentsSettingsService', () => {
describe('resetWebhookUrl', () => {
it('should make a call for webhook update', () => {
jest.spyOn(axios, 'post');
- mock.onPost().reply(httpStatusCodes.OK);
+ mock.onPost().reply(HTTP_STATUS_OK);
return service.resetWebhookUrl().then(() => {
expect(axios.post).toHaveBeenCalledWith(webhookUpdateEndpoint);
diff --git a/spec/frontend/integrations/edit/components/integration_form_spec.js b/spec/frontend/integrations/edit/components/integration_form_spec.js
index 4b49e492880..383dfb36aa5 100644
--- a/spec/frontend/integrations/edit/components/integration_form_spec.js
+++ b/spec/frontend/integrations/edit/components/integration_form_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlBadge, GlForm } from '@gitlab/ui';
+import { GlAlert, GlForm } from '@gitlab/ui';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
@@ -11,18 +11,16 @@ import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
import IntegrationForm from '~/integrations/edit/components/integration_form.vue';
import OverrideDropdown from '~/integrations/edit/components/override_dropdown.vue';
import TriggerFields from '~/integrations/edit/components/trigger_fields.vue';
-import IntegrationSectionConnection from '~/integrations/edit/components/sections/connection.vue';
import IntegrationFormActions from '~/integrations/edit/components/integration_form_actions.vue';
+import IntegrationFormSection from '~/integrations/edit/components/integration_forms/section.vue';
import {
I18N_SUCCESSFUL_CONNECTION_MESSAGE,
I18N_DEFAULT_ERROR_MESSAGE,
INTEGRATION_FORM_TYPE_SLACK,
- billingPlans,
- billingPlanNames,
} from '~/integrations/constants';
import { createStore } from '~/integrations/edit/store';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { refreshCurrentPage } from '~/lib/utils/url_utility';
import {
mockIntegrationProps,
@@ -73,15 +71,11 @@ describe('IntegrationForm', () => {
const findActiveCheckbox = () => wrapper.findComponent(ActiveCheckbox);
const findTriggerFields = () => wrapper.findComponent(TriggerFields);
const findAlert = () => wrapper.findComponent(GlAlert);
- const findGlBadge = () => wrapper.findComponent(GlBadge);
const findGlForm = () => wrapper.findComponent(GlForm);
const findRedirectToField = () => wrapper.findByTestId('redirect-to-field');
const findDynamicField = () => wrapper.findComponent(DynamicField);
const findAllDynamicFields = () => wrapper.findAllComponents(DynamicField);
- const findAllSections = () => wrapper.findAllByTestId('integration-section');
- const findConnectionSection = () => findAllSections().at(0);
- const findConnectionSectionComponent = () =>
- findConnectionSection().findComponent(IntegrationSectionConnection);
+ const findAllSections = () => wrapper.findAllComponents(IntegrationFormSection);
const findHelpHtml = () => wrapper.findByTestId('help-html');
const findFormActions = () => wrapper.findComponent(IntegrationFormActions);
@@ -215,54 +209,13 @@ describe('IntegrationForm', () => {
beforeEach(() => {
createComponent({
customStateProps: {
- sections: [mockSectionConnection],
- },
- });
- });
-
- it('renders the expected number of sections', () => {
- expect(findAllSections().length).toBe(1);
- });
-
- it('renders title, description and the correct dynamic component', () => {
- const connectionSection = findConnectionSection();
-
- expect(connectionSection.find('h4').text()).toBe(mockSectionConnection.title);
- expect(connectionSection.find('p').text()).toBe(mockSectionConnection.description);
- expect(findGlBadge().exists()).toBe(false);
- expect(findConnectionSectionComponent().exists()).toBe(true);
- });
-
- it('renders GlBadge when `plan` is present', () => {
- createComponent({
- customStateProps: {
sections: [mockSectionConnection, mockSectionJiraIssues],
},
});
-
- expect(findGlBadge().exists()).toBe(true);
- expect(findGlBadge().text()).toMatchInterpolatedText(billingPlanNames[billingPlans.PREMIUM]);
});
- it('passes only fields with section type', () => {
- const sectionFields = [
- { name: 'username', type: 'text', section: mockSectionConnection.type },
- { name: 'API token', type: 'password', section: mockSectionConnection.type },
- ];
-
- const nonSectionFields = [
- { name: 'branch', type: 'text' },
- { name: 'labels', type: 'select' },
- ];
-
- createComponent({
- customStateProps: {
- sections: [mockSectionConnection],
- fields: [...sectionFields, ...nonSectionFields],
- },
- });
-
- expect(findConnectionSectionComponent().props('fields')).toEqual(sectionFields);
+ it('renders the expected number of sections', () => {
+ expect(findAllSections()).toHaveLength(2);
});
describe.each`
@@ -281,7 +234,8 @@ describe('IntegrationForm', () => {
},
});
- findConnectionSectionComponent().vm.$emit('toggle-integration-active', formActive);
+ const section = findAllSections().at(0);
+ section.vm.$emit('toggle-integration-active', formActive);
});
it(`sets noValidate to ${novalidate}`, () => {
@@ -290,7 +244,7 @@ describe('IntegrationForm', () => {
},
);
- describe('when IntegrationSectionConnection emits `request-jira-issue-types` event', () => {
+ describe('when section emits `request-jira-issue-types` event', () => {
beforeEach(() => {
jest.spyOn(document, 'querySelector').mockReturnValue(document.createElement('form'));
@@ -302,7 +256,8 @@ describe('IntegrationForm', () => {
mountFn: mountExtended,
});
- findConnectionSectionComponent().vm.$emit('request-jira-issue-types');
+ const section = findAllSections().at(0);
+ section.vm.$emit('request-jira-issue-types');
});
it('dispatches `requestJiraIssueTypes` action', () => {
@@ -456,11 +411,11 @@ describe('IntegrationForm', () => {
});
describe.each`
- scenario | replyStatus | errorMessage | serviceResponse | expectToast | expectSentry
- ${'when "test settings" request fails'} | ${httpStatus.INTERNAL_SERVER_ERROR} | ${undefined} | ${undefined} | ${I18N_DEFAULT_ERROR_MESSAGE} | ${true}
- ${'when "test settings" returns an error'} | ${httpStatus.OK} | ${'an error'} | ${undefined} | ${'an error'} | ${false}
- ${'when "test settings" returns an error with details'} | ${httpStatus.OK} | ${'an error.'} | ${'extra info'} | ${'an error. extra info'} | ${false}
- ${'when "test settings" succeeds'} | ${httpStatus.OK} | ${undefined} | ${undefined} | ${I18N_SUCCESSFUL_CONNECTION_MESSAGE} | ${false}
+ scenario | replyStatus | errorMessage | serviceResponse | expectToast | expectSentry
+ ${'when "test settings" request fails'} | ${HTTP_STATUS_INTERNAL_SERVER_ERROR} | ${undefined} | ${undefined} | ${I18N_DEFAULT_ERROR_MESSAGE} | ${true}
+ ${'when "test settings" returns an error'} | ${HTTP_STATUS_OK} | ${'an error'} | ${undefined} | ${'an error'} | ${false}
+ ${'when "test settings" returns an error with details'} | ${HTTP_STATUS_OK} | ${'an error.'} | ${'extra info'} | ${'an error. extra info'} | ${false}
+ ${'when "test settings" succeeds'} | ${HTTP_STATUS_OK} | ${undefined} | ${undefined} | ${I18N_SUCCESSFUL_CONNECTION_MESSAGE} | ${false}
`(
'$scenario',
({ replyStatus, errorMessage, serviceResponse, expectToast, expectSentry }) => {
@@ -491,7 +446,7 @@ describe('IntegrationForm', () => {
const mockResetPath = '/reset';
beforeEach(async () => {
- mockAxios.onPost(mockResetPath).replyOnce(httpStatus.INTERNAL_SERVER_ERROR);
+ mockAxios.onPost(mockResetPath).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent({
customStateProps: {
resetPath: mockResetPath,
@@ -526,7 +481,7 @@ describe('IntegrationForm', () => {
describe('when "reset settings" succeeds', () => {
beforeEach(async () => {
- mockAxios.onPost(mockResetPath).replyOnce(httpStatus.OK);
+ mockAxios.onPost(mockResetPath).replyOnce(HTTP_STATUS_OK);
createComponent({
customStateProps: {
resetPath: mockResetPath,
diff --git a/spec/frontend/integrations/edit/components/integration_forms/section_spec.js b/spec/frontend/integrations/edit/components/integration_forms/section_spec.js
new file mode 100644
index 00000000000..5f82941778e
--- /dev/null
+++ b/spec/frontend/integrations/edit/components/integration_forms/section_spec.js
@@ -0,0 +1,109 @@
+import { GlBadge } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { billingPlans, billingPlanNames } from '~/integrations/constants';
+import DynamicField from '~/integrations/edit/components/dynamic_field.vue';
+import IntegrationFormSection from '~/integrations/edit/components/integration_forms/section.vue';
+import IntegrationSectionConnection from '~/integrations/edit/components/sections/connection.vue';
+import { createStore } from '~/integrations/edit/store';
+import {
+ mockIntegrationProps,
+ mockSectionConnection,
+ mockSectionJiraIssues,
+} from '../../mock_data';
+
+describe('Integration Form Section', () => {
+ let wrapper;
+
+ const defaultProps = {
+ section: mockSectionConnection,
+ isValidated: false,
+ };
+
+ const createComponent = ({
+ customStateProps = {},
+ props = {},
+ mountFn = shallowMountExtended,
+ } = {}) => {
+ const store = createStore({
+ customState: {
+ ...mockIntegrationProps,
+ ...customStateProps,
+ },
+ });
+
+ wrapper = mountFn(IntegrationFormSection, {
+ store,
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ stubs: {
+ IntegrationSectionConnection,
+ },
+ });
+ };
+
+ const findGlBadge = () => wrapper.findComponent(GlBadge);
+ const findFieldsComponent = () => wrapper.findComponent(IntegrationSectionConnection);
+ const findAllDynamicFields = () => wrapper.findAllComponents(DynamicField);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders title, description and the correct dynamic component', () => {
+ expect(wrapper.findByText(mockSectionConnection.title).exists()).toBe(true);
+ expect(wrapper.findByText(mockSectionConnection.description).exists()).toBe(true);
+ expect(findGlBadge().exists()).toBe(false);
+ });
+
+ it('renders GlBadge when `plan` is present', () => {
+ createComponent({
+ props: {
+ section: mockSectionJiraIssues,
+ },
+ });
+
+ expect(findGlBadge().exists()).toBe(true);
+ expect(findGlBadge().text()).toMatchInterpolatedText(billingPlanNames[billingPlans.PREMIUM]);
+ });
+
+ it('renders only fields for this section type', () => {
+ const sectionFields = [
+ { name: 'username', type: 'text', section: mockSectionConnection.type },
+ { name: 'API token', type: 'password', section: mockSectionConnection.type },
+ ];
+
+ const nonSectionFields = [{ name: 'branch', type: 'text' }];
+
+ createComponent({
+ customStateProps: {
+ fields: [...sectionFields, ...nonSectionFields],
+ },
+ });
+
+ expect(findAllDynamicFields()).toHaveLength(2);
+ sectionFields.forEach((field, index) => {
+ expect(findAllDynamicFields().at(index).props('name')).toBe(field.name);
+ });
+ });
+
+ describe('events proxy from the section', () => {
+ let section;
+ const dummyPayload = 'foo';
+
+ beforeEach(() => {
+ section = findFieldsComponent();
+ });
+
+ it('toggle-integration-active', () => {
+ section.vm.$emit('toggle-integration-active', dummyPayload);
+ expect(wrapper.emitted('toggle-integration-active')).toEqual([[dummyPayload]]);
+ });
+
+ it('request-jira-issue-types', () => {
+ section.vm.$emit('request-jira-issue-types', dummyPayload);
+ expect(wrapper.emitted('request-jira-issue-types')).toEqual([[dummyPayload]]);
+ });
+ });
+});
diff --git a/spec/frontend/integrations/edit/components/trigger_field_spec.js b/spec/frontend/integrations/edit/components/trigger_field_spec.js
index 6a68337813e..ed0b3324708 100644
--- a/spec/frontend/integrations/edit/components/trigger_field_spec.js
+++ b/spec/frontend/integrations/edit/components/trigger_field_spec.js
@@ -1,6 +1,6 @@
import { nextTick } from 'vue';
import { shallowMount } from '@vue/test-utils';
-import { GlFormCheckbox } from '@gitlab/ui';
+import { GlFormCheckbox, GlFormInput } from '@gitlab/ui';
import TriggerField from '~/integrations/edit/components/trigger_field.vue';
import { integrationTriggerEventTitles } from '~/integrations/constants';
@@ -10,7 +10,9 @@ describe('TriggerField', () => {
const defaultProps = {
event: { name: 'push_events' },
+ type: 'gitlab_slack_application',
};
+ const mockField = { name: 'push_channel' };
const createComponent = ({ props = {}, isInheriting = false } = {}) => {
wrapper = shallowMount(TriggerField, {
@@ -26,6 +28,7 @@ describe('TriggerField', () => {
});
const findGlFormCheckbox = () => wrapper.findComponent(GlFormCheckbox);
+ const findGlFormInput = () => wrapper.findComponent(GlFormInput);
const findHiddenInput = () => wrapper.find('input[type="hidden"]');
describe('template', () => {
@@ -55,6 +58,32 @@ describe('TriggerField', () => {
expect(findHiddenInput().attributes('value')).toBe('false');
});
+ it('renders hidden GlFormInput', () => {
+ createComponent({
+ props: {
+ event: { name: 'push_events', field: mockField },
+ },
+ });
+
+ expect(findGlFormInput().exists()).toBe(true);
+ expect(findGlFormInput().isVisible()).toBe(false);
+ });
+
+ describe('checkbox is selected', () => {
+ it('renders visible GlFormInput', async () => {
+ createComponent({
+ props: {
+ event: { name: 'push_events', field: mockField },
+ },
+ });
+
+ await findGlFormCheckbox().vm.$emit('input', true);
+
+ expect(findGlFormInput().exists()).toBe(true);
+ expect(findGlFormInput().isVisible()).toBe(true);
+ });
+ });
+
it('toggles value of hidden input on checkbox input', async () => {
createComponent({
props: { event: { name: 'push_events', value: true } },
diff --git a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
index fd60d7f817f..fdb728281b5 100644
--- a/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
+++ b/spec/frontend/integrations/overrides/components/integration_overrides_spec.js
@@ -8,7 +8,7 @@ import IntegrationOverrides from '~/integrations/overrides/components/integratio
import IntegrationTabs from '~/integrations/overrides/components/integration_tabs.vue';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ProjectAvatar from '~/vue_shared/components/project_avatar.vue';
import UrlSync from '~/vue_shared/components/url_sync.vue';
@@ -39,7 +39,7 @@ describe('IntegrationOverrides', () => {
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, mockOverrides, {
+ mockAxios.onGet(defaultProps.overridesPath).reply(HTTP_STATUS_OK, mockOverrides, {
'X-TOTAL': mockOverrides.length,
'X-PAGE': 1,
});
@@ -125,7 +125,7 @@ describe('IntegrationOverrides', () => {
describe('when request fails', () => {
beforeEach(async () => {
jest.spyOn(Sentry, 'captureException');
- mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.INTERNAL_SERVER_ERROR);
+ mockAxios.onGet(defaultProps.overridesPath).reply(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent();
await waitForPromises();
@@ -150,7 +150,7 @@ describe('IntegrationOverrides', () => {
describe('pagination', () => {
describe('when total items does not exceed the page limit', () => {
it('does not render', async () => {
- mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, [mockOverrides[0]], {
+ mockAxios.onGet(defaultProps.overridesPath).reply(HTTP_STATUS_OK, [mockOverrides[0]], {
'X-TOTAL': DEFAULT_PER_PAGE - 1,
'X-PAGE': 1,
});
@@ -169,7 +169,7 @@ describe('IntegrationOverrides', () => {
beforeEach(async () => {
createComponent({ stubs: { UrlSync } });
- mockAxios.onGet(defaultProps.overridesPath).reply(httpStatus.OK, [mockOverrides[0]], {
+ mockAxios.onGet(defaultProps.overridesPath).reply(HTTP_STATUS_OK, [mockOverrides[0]], {
'X-TOTAL': DEFAULT_PER_PAGE * 2,
'X-PAGE': mockPage,
});
diff --git a/spec/frontend/invite_members/components/invite_members_modal_spec.js b/spec/frontend/invite_members/components/invite_members_modal_spec.js
index 22fcedb2eaf..b6b34e1063b 100644
--- a/spec/frontend/invite_members/components/invite_members_modal_spec.js
+++ b/spec/frontend/invite_members/components/invite_members_modal_spec.js
@@ -24,7 +24,11 @@ import {
import eventHub from '~/invite_members/event_hub';
import ContentTransition from '~/vue_shared/components/content_transition.vue';
import axios from '~/lib/utils/axios_utils';
-import httpStatus, { HTTP_STATUS_CREATED } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_BAD_REQUEST,
+ HTTP_STATUS_CREATED,
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
+} from '~/lib/utils/http_status';
import { getParameterValues } from '~/lib/utils/url_utility';
import {
displaySuccessfulInvitationAlert,
@@ -361,7 +365,7 @@ describe('InviteMembersModal', () => {
describe('rendering the user limit notification', () => {
it('shows the user limit notification alert when reached limit', () => {
- const usersLimitDataset = { reachedLimit: true };
+ const usersLimitDataset = { alertVariant: 'reached' };
createInviteMembersToProjectWrapper(usersLimitDataset);
@@ -369,7 +373,15 @@ describe('InviteMembersModal', () => {
});
it('shows the user limit notification alert when close to dashboard limit', () => {
- const usersLimitDataset = { closeToDashboardLimit: true };
+ const usersLimitDataset = { alertVariant: 'close' };
+
+ createInviteMembersToProjectWrapper(usersLimitDataset);
+
+ expect(findUserLimitAlert().exists()).toBe(true);
+ });
+
+ it('shows the user limit notification alert when :preview_free_user_cap is enabled', () => {
+ const usersLimitDataset = { alertVariant: 'notification' };
createInviteMembersToProjectWrapper(usersLimitDataset);
@@ -549,7 +561,7 @@ describe('InviteMembersModal', () => {
it('displays the generic error for http server error', async () => {
mockInvitationsApi(
- httpStatus.INTERNAL_SERVER_ERROR,
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
'Request failed with status code 500',
);
@@ -648,7 +660,7 @@ describe('InviteMembersModal', () => {
});
it('displays the api error for invalid email syntax', async () => {
- mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
+ mockInvitationsApi(HTTP_STATUS_BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
clickInviteButton();
@@ -660,7 +672,7 @@ describe('InviteMembersModal', () => {
});
it('clears the error when the modal is hidden', async () => {
- mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
+ mockInvitationsApi(HTTP_STATUS_BAD_REQUEST, invitationsApiResponse.EMAIL_INVALID);
clickInviteButton();
@@ -715,7 +727,7 @@ describe('InviteMembersModal', () => {
});
it('displays the invalid syntax error for bad request', async () => {
- mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.ERROR_EMAIL_INVALID);
+ mockInvitationsApi(HTTP_STATUS_BAD_REQUEST, invitationsApiResponse.ERROR_EMAIL_INVALID);
clickInviteButton();
@@ -739,7 +751,7 @@ describe('InviteMembersModal', () => {
createInviteMembersToGroupWrapper();
await triggerMembersTokenSelect([user3, user4]);
- mockInvitationsApi(httpStatus.BAD_REQUEST, invitationsApiResponse.ERROR_EMAIL_INVALID);
+ mockInvitationsApi(HTTP_STATUS_BAD_REQUEST, invitationsApiResponse.ERROR_EMAIL_INVALID);
clickInviteButton();
diff --git a/spec/frontend/invite_members/components/user_limit_notification_spec.js b/spec/frontend/invite_members/components/user_limit_notification_spec.js
index 2a780490468..490b2e8bc7c 100644
--- a/spec/frontend/invite_members/components/user_limit_notification_spec.js
+++ b/spec/frontend/invite_members/components/user_limit_notification_spec.js
@@ -1,9 +1,14 @@
import { GlAlert, GlSprintf } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import UserLimitNotification from '~/invite_members/components/user_limit_notification.vue';
-import { REACHED_LIMIT_VARIANT, CLOSE_TO_LIMIT_VARIANT } from '~/invite_members/constants';
+import {
+ NOTIFICATION_LIMIT_VARIANT,
+ REACHED_LIMIT_VARIANT,
+ CLOSE_TO_LIMIT_VARIANT,
+} from '~/invite_members/constants';
import { freeUsersLimit, remainingSeats } from '../mock_data/member_modal';
+const INFO_ALERT_TITLE = 'Your top-level group name is over the 5 user limit.';
const WARNING_ALERT_TITLE = 'You only have space for 2 more members in name';
describe('UserLimitNotification', () => {
@@ -31,6 +36,17 @@ describe('UserLimitNotification', () => {
});
};
+ describe('when previewing free user cap', () => {
+ it("renders user's preview limit notification", () => {
+ createComponent(NOTIFICATION_LIMIT_VARIANT);
+
+ const alert = findAlert();
+
+ expect(alert.attributes('title')).toEqual(INFO_ALERT_TITLE);
+ expect(alert.text()).toContain('GitLab will enforce this limit in the future.');
+ });
+ });
+
describe('when close to limit within a group', () => {
it("renders user's limit notification", () => {
createComponent(CLOSE_TO_LIMIT_VARIANT);
@@ -51,7 +67,7 @@ describe('UserLimitNotification', () => {
expect(alert.attributes('title')).toEqual("You've reached your 5 members limit for name");
expect(alert.text()).toContain(
- 'To invite new users to this namespace, you must remove existing users.',
+ 'To invite new users to this top-level group, you must remove existing users.',
);
});
});
diff --git a/spec/frontend/issuable/components/issuable_by_email_spec.js b/spec/frontend/issuable/components/issuable_by_email_spec.js
index 01abf239e57..b04a6c0b8fd 100644
--- a/spec/frontend/issuable/components/issuable_by_email_spec.js
+++ b/spec/frontend/issuable/components/issuable_by_email_spec.js
@@ -5,7 +5,7 @@ import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import IssuableByEmail from '~/issuable/components/issuable_by_email.vue';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_OK } from '~/lib/utils/http_status';
const initialEmail = 'user@gitlab.com';
@@ -130,7 +130,7 @@ describe('IssuableByEmail', () => {
});
it('should update the email when the request succeeds', async () => {
- mockAxios.onPut(resetPath).reply(httpStatus.OK, { new_address: 'foo@bar.com' });
+ mockAxios.onPut(resetPath).reply(HTTP_STATUS_OK, { new_address: 'foo@bar.com' });
wrapper = createComponent({
issuableType: 'issue',
@@ -144,7 +144,7 @@ describe('IssuableByEmail', () => {
});
it('should show a toast message when the request fails', async () => {
- mockAxios.onPut(resetPath).reply(httpStatus.NOT_FOUND, {});
+ mockAxios.onPut(resetPath).reply(HTTP_STATUS_NOT_FOUND, {});
wrapper = createComponent({
issuableType: 'issue',
diff --git a/spec/frontend/issuable/components/issuable_header_warnings_spec.js b/spec/frontend/issuable/components/issuable_header_warnings_spec.js
index e3a36dc8820..99aa6778e1e 100644
--- a/spec/frontend/issuable/components/issuable_header_warnings_spec.js
+++ b/spec/frontend/issuable/components/issuable_header_warnings_spec.js
@@ -7,7 +7,7 @@ import createIssueStore from '~/notes/stores';
import IssuableHeaderWarnings from '~/issuable/components/issuable_header_warnings.vue';
const ISSUABLE_TYPE_ISSUE = 'issue';
-const ISSUABLE_TYPE_MR = 'merge request';
+const ISSUABLE_TYPE_MR = 'merge_request';
Vue.use(Vuex);
@@ -57,6 +57,7 @@ describe('IssuableHeaderWarnings', () => {
beforeEach(() => {
store.getters.getNoteableData.confidential = confidentialStatus;
store.getters.getNoteableData.discussion_locked = lockStatus;
+ store.getters.getNoteableData.targetType = issuableType;
createComponent({ store, provide: { hidden: hiddenStatus } });
});
@@ -84,7 +85,7 @@ describe('IssuableHeaderWarnings', () => {
if (hiddenStatus) {
expect(hiddenIcon.attributes('title')).toBe(
- 'This issue is hidden because its author has been banned',
+ `This ${issuableType.replace('_', ' ')} is hidden because its author has been banned`,
);
expect(getBinding(hiddenIcon.element, 'gl-tooltip')).not.toBeUndefined();
}
diff --git a/spec/frontend/issuable/issuable_form_spec.js b/spec/frontend/issuable/issuable_form_spec.js
index 5e67ea42b87..28ec0e22d8b 100644
--- a/spec/frontend/issuable/issuable_form_spec.js
+++ b/spec/frontend/issuable/issuable_form_spec.js
@@ -35,8 +35,8 @@ describe('IssuableForm', () => {
let $description;
beforeEach(() => {
- $title = $form.find('input[name*="[title]"]');
- $description = $form.find('textarea[name*="[description]"]');
+ $title = $form.find('input[name*="[title]"]').get(0);
+ $description = $form.find('textarea[name*="[description]"]').get(0);
});
afterEach(() => {
@@ -103,7 +103,11 @@ describe('IssuableForm', () => {
createIssuable($form);
expect(Autosave).toHaveBeenCalledTimes(totalAutosaveFormFields);
- expect(Autosave).toHaveBeenLastCalledWith($input, ['/', '', id], `autosave///=${id}`);
+ expect(Autosave).toHaveBeenLastCalledWith(
+ $input.get(0),
+ ['/', '', id],
+ `autosave///=${id}`,
+ );
});
});
diff --git a/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js b/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
index 3f40772f7fc..841cea28ffc 100644
--- a/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
+++ b/spec/frontend/issues/dashboard/components/issues_dashboard_app_spec.js
@@ -27,6 +27,9 @@ import { scrollUp } from '~/lib/utils/scroll_utils';
import {
TOKEN_TYPE_ASSIGNEE,
TOKEN_TYPE_AUTHOR,
+ TOKEN_TYPE_LABEL,
+ TOKEN_TYPE_MILESTONE,
+ TOKEN_TYPE_MY_REACTION,
} from '~/vue_shared/components/filtered_search_bar/constants';
import IssuableList from '~/vue_shared/issuable/list/components/issuable_list_root.vue';
import { IssuableStates } from '~/vue_shared/issuable/list/constants';
@@ -42,8 +45,12 @@ describe('IssuesDashboardApp component', () => {
Vue.use(VueApollo);
const defaultProvide = {
+ autocompleteAwardEmojisPath: 'autocomplete/award/emojis/path',
calendarPath: 'calendar/path',
- emptyStateSvgPath: 'empty-state.svg',
+ dashboardLabelsPath: 'dashboard/labels/path',
+ dashboardMilestonesPath: 'dashboard/milestones/path',
+ emptyStateWithFilterSvgPath: 'empty/state/with/filter/svg/path.svg',
+ emptyStateWithoutFilterSvgPath: 'empty/state/with/filter/svg/path.svg',
hasBlockedIssuesFeature: true,
hasIssuableHealthStatusFeature: true,
hasIssueWeightsFeature: true,
@@ -97,74 +104,122 @@ describe('IssuesDashboardApp component', () => {
axiosMock.reset();
});
- it('renders IssuableList component', async () => {
- mountComponent();
- jest.runOnlyPendingTimers();
- await waitForPromises();
-
- expect(findIssuableList().props()).toMatchObject({
- currentTab: IssuableStates.Opened,
- hasNextPage: true,
- hasPreviousPage: false,
- hasScopedLabelsFeature: defaultProvide.hasScopedLabelsFeature,
- initialSortBy: CREATED_DESC,
- issuables: issuesQueryResponse.data.issues.nodes,
- issuablesLoading: false,
- namespace: 'dashboard',
- recentSearchesStorageKey: 'issues',
- searchInputPlaceholder: IssuesDashboardApp.i18n.searchInputPlaceholder,
- showPaginationControls: true,
- sortOptions: getSortOptions({
- hasBlockedIssuesFeature: defaultProvide.hasBlockedIssuesFeature,
- hasIssuableHealthStatusFeature: defaultProvide.hasIssuableHealthStatusFeature,
- hasIssueWeightsFeature: defaultProvide.hasIssueWeightsFeature,
- }),
- tabs: IssuesDashboardApp.IssuableListTabs,
- urlParams: {
- sort: urlSortParams[CREATED_DESC],
- state: IssuableStates.Opened,
- },
- useKeysetPagination: true,
+ describe('UI components', () => {
+ beforeEach(() => {
+ setWindowLocation(locationSearch);
+ mountComponent();
+ jest.runOnlyPendingTimers();
+ return waitForPromises();
});
- });
- it('renders RSS button link', () => {
- mountComponent();
+ it('renders IssuableList component', () => {
+ expect(findIssuableList().props()).toMatchObject({
+ currentTab: IssuableStates.Opened,
+ hasNextPage: true,
+ hasPreviousPage: false,
+ hasScopedLabelsFeature: defaultProvide.hasScopedLabelsFeature,
+ initialSortBy: CREATED_DESC,
+ issuables: issuesQueryResponse.data.issues.nodes,
+ issuablesLoading: false,
+ namespace: 'dashboard',
+ recentSearchesStorageKey: 'issues',
+ searchInputPlaceholder: IssuesDashboardApp.i18n.searchInputPlaceholder,
+ showPaginationControls: true,
+ sortOptions: getSortOptions({
+ hasBlockedIssuesFeature: defaultProvide.hasBlockedIssuesFeature,
+ hasIssuableHealthStatusFeature: defaultProvide.hasIssuableHealthStatusFeature,
+ hasIssueWeightsFeature: defaultProvide.hasIssueWeightsFeature,
+ }),
+ tabs: IssuesDashboardApp.IssuableListTabs,
+ urlParams: {
+ sort: urlSortParams[CREATED_DESC],
+ state: IssuableStates.Opened,
+ },
+ useKeysetPagination: true,
+ });
+ });
- expect(findRssButton().attributes('href')).toBe(defaultProvide.rssPath);
- expect(findRssButton().props('icon')).toBe('rss');
- });
+ it('renders RSS button link', () => {
+ expect(findRssButton().attributes('href')).toBe(defaultProvide.rssPath);
+ });
- it('renders calendar button link', () => {
- mountComponent();
+ it('renders calendar button link', () => {
+ expect(findCalendarButton().attributes('href')).toBe(defaultProvide.calendarPath);
+ });
+
+ it('renders issue time information', () => {
+ expect(findIssueCardTimeInfo().exists()).toBe(true);
+ });
- expect(findCalendarButton().attributes('href')).toBe(defaultProvide.calendarPath);
- expect(findCalendarButton().props('icon')).toBe('calendar');
+ it('renders issue statistics', () => {
+ expect(findIssueCardStatistics().exists()).toBe(true);
+ });
});
- it('renders issue time information', async () => {
- mountComponent();
- jest.runOnlyPendingTimers();
- await waitForPromises();
+ describe('fetching issues', () => {
+ describe('with a search query', () => {
+ describe('when there are issues returned', () => {
+ beforeEach(() => {
+ setWindowLocation(locationSearch);
+ mountComponent();
+ jest.runOnlyPendingTimers();
+ return waitForPromises();
+ });
- expect(findIssueCardTimeInfo().exists()).toBe(true);
- });
+ it('renders the issues', () => {
+ expect(findIssuableList().props('issuables')).toEqual(
+ defaultQueryResponse.data.issues.nodes,
+ );
+ });
- it('renders issue statistics', async () => {
- mountComponent();
- jest.runOnlyPendingTimers();
- await waitForPromises();
+ it('does not render empty state', () => {
+ expect(findEmptyState().exists()).toBe(false);
+ });
+ });
- expect(findIssueCardStatistics().exists()).toBe(true);
- });
+ describe('when there are no issues returned', () => {
+ beforeEach(() => {
+ setWindowLocation(locationSearch);
+ mountComponent({
+ issuesQueryHandler: jest.fn().mockResolvedValue(emptyIssuesQueryResponse),
+ });
+ return waitForPromises();
+ });
+
+ it('renders no issues', () => {
+ expect(findIssuableList().props('issuables')).toEqual([]);
+ });
+
+ it('renders empty state', () => {
+ expect(findEmptyState().props()).toMatchObject({
+ description: IssuesDashboardApp.i18n.emptyStateWithFilterDescription,
+ svgPath: defaultProvide.emptyStateWithFilterSvgPath,
+ title: IssuesDashboardApp.i18n.emptyStateWithFilterTitle,
+ });
+ });
+ });
+ });
+
+ describe('with no search query', () => {
+ let issuesQueryHandler;
+
+ beforeEach(() => {
+ issuesQueryHandler = jest.fn().mockResolvedValue(defaultQueryResponse);
+ mountComponent({ issuesQueryHandler });
+ return waitForPromises();
+ });
- it('renders empty state', async () => {
- mountComponent({ issuesQueryHandler: jest.fn().mockResolvedValue(emptyIssuesQueryResponse) });
- await waitForPromises();
+ it('does not call issues query', () => {
+ expect(issuesQueryHandler).not.toHaveBeenCalled();
+ });
- expect(findEmptyState().props()).toMatchObject({
- svgPath: defaultProvide.emptyStateSvgPath,
- title: IssuesDashboardApp.i18n.emptyStateTitle,
+ it('renders empty state', () => {
+ expect(findEmptyState().props()).toMatchObject({
+ description: null,
+ svgPath: defaultProvide.emptyStateWithoutFilterSvgPath,
+ title: IssuesDashboardApp.i18n.emptyStateWithoutFilterTitle,
+ });
+ });
});
});
@@ -233,6 +288,7 @@ describe('IssuesDashboardApp component', () => {
describe('when there is an error fetching issues', () => {
beforeEach(() => {
+ setWindowLocation(locationSearch);
mountComponent({ issuesQueryHandler: jest.fn().mockRejectedValue(new Error('ERROR')) });
jest.runOnlyPendingTimers();
return waitForPromises();
@@ -281,6 +337,9 @@ describe('IssuesDashboardApp component', () => {
expect(findIssuableList().props('searchTokens')).toMatchObject([
{ type: TOKEN_TYPE_ASSIGNEE, preloadedUsers },
{ type: TOKEN_TYPE_AUTHOR, preloadedUsers },
+ { type: TOKEN_TYPE_LABEL },
+ { type: TOKEN_TYPE_MILESTONE },
+ { type: TOKEN_TYPE_MY_REACTION },
]);
});
});
diff --git a/spec/frontend/issues/dashboard/utils_spec.js b/spec/frontend/issues/dashboard/utils_spec.js
new file mode 100644
index 00000000000..08d00eee3e3
--- /dev/null
+++ b/spec/frontend/issues/dashboard/utils_spec.js
@@ -0,0 +1,88 @@
+import AxiosMockAdapter from 'axios-mock-adapter';
+import fuzzaldrinPlus from 'fuzzaldrin-plus';
+import { AutocompleteCache } from '~/issues/dashboard/utils';
+import { MAX_LIST_SIZE } from '~/issues/list/constants';
+import axios from '~/lib/utils/axios_utils';
+
+describe('AutocompleteCache', () => {
+ let autocompleteCache;
+ let axiosMock;
+ const cacheName = 'name';
+ const searchProperty = 'property';
+ const url = 'url';
+
+ const data = [
+ { [searchProperty]: 'one' },
+ { [searchProperty]: 'two' },
+ { [searchProperty]: 'three' },
+ { [searchProperty]: 'four' },
+ { [searchProperty]: 'five' },
+ { [searchProperty]: 'six' },
+ { [searchProperty]: 'seven' },
+ { [searchProperty]: 'eight' },
+ { [searchProperty]: 'nine' },
+ { [searchProperty]: 'ten' },
+ { [searchProperty]: 'eleven' },
+ { [searchProperty]: 'twelve' },
+ { [searchProperty]: 'thirteen' },
+ { [searchProperty]: 'fourteen' },
+ { [searchProperty]: 'fifteen' },
+ ];
+
+ beforeEach(() => {
+ autocompleteCache = new AutocompleteCache();
+ axiosMock = new AxiosMockAdapter(axios);
+ });
+
+ afterEach(() => {
+ axiosMock.reset();
+ });
+
+ describe('when there is no cached data', () => {
+ let response;
+
+ beforeEach(async () => {
+ axiosMock.onGet(url).replyOnce(200, data);
+ response = await autocompleteCache.fetch({ url, cacheName, searchProperty });
+ });
+
+ it('fetches items via the API', () => {
+ expect(axiosMock.history.get[0].url).toBe(url);
+ });
+
+ it('returns a maximum of 10 items', () => {
+ expect(response).toHaveLength(MAX_LIST_SIZE);
+ });
+ });
+
+ describe('when there is cached data', () => {
+ let response;
+
+ beforeEach(async () => {
+ axiosMock.onGet(url).replyOnce(200, data);
+ jest.spyOn(fuzzaldrinPlus, 'filter');
+ // Populate cache
+ await autocompleteCache.fetch({ url, cacheName, searchProperty });
+ // Execute filtering on cache data
+ response = await autocompleteCache.fetch({ url, cacheName, searchProperty, search: 'een' });
+ });
+
+ it('returns filtered items based on search characters', () => {
+ expect(response).toEqual([
+ { [searchProperty]: 'fifteen' },
+ { [searchProperty]: 'thirteen' },
+ { [searchProperty]: 'fourteen' },
+ { [searchProperty]: 'eleven' },
+ { [searchProperty]: 'seven' },
+ ]);
+ });
+
+ it('filters using fuzzaldrinPlus', () => {
+ expect(fuzzaldrinPlus.filter).toHaveBeenCalled();
+ });
+
+ it('does not call the API', () => {
+ expect(axiosMock.history.get[1]).toBeUndefined();
+ });
+ });
+});
diff --git a/spec/frontend/issues/list/mock_data.js b/spec/frontend/issues/list/mock_data.js
index 0690501dee9..70b1521ff70 100644
--- a/spec/frontend/issues/list/mock_data.js
+++ b/spec/frontend/issues/list/mock_data.js
@@ -16,6 +16,7 @@ import {
TOKEN_TYPE_RELEASE,
TOKEN_TYPE_TYPE,
TOKEN_TYPE_WEIGHT,
+ TOKEN_TYPE_HEALTH,
} from '~/vue_shared/components/filtered_search_bar/constants';
export const getIssuesQueryResponse = {
@@ -149,6 +150,8 @@ export const locationSearch = [
'label_name[]=tv',
'not[label_name][]=live action',
'not[label_name][]=drama',
+ 'or[label_name][]=comedy',
+ 'or[label_name][]=sitcom',
'release_tag=v3',
'release_tag=v4',
'not[release_tag]=v20',
@@ -170,6 +173,8 @@ export const locationSearch = [
'not[weight]=3',
'crm_contact_id=123',
'crm_organization_id=456',
+ 'health_status=atRisk',
+ 'not[health_status]=onTrack',
].join('&');
export const locationSearchWithSpecialValues = [
@@ -182,6 +187,7 @@ export const locationSearchWithSpecialValues = [
'milestone_title=Upcoming',
'epic_id=None',
'weight=None',
+ 'health_status=None',
].join('&');
export const filteredTokens = [
@@ -204,6 +210,8 @@ export const filteredTokens = [
{ type: TOKEN_TYPE_LABEL, value: { data: 'tv', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'live action', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_LABEL, value: { data: 'drama', operator: OPERATOR_NOT } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'comedy', operator: OPERATOR_OR } },
+ { type: TOKEN_TYPE_LABEL, value: { data: 'sitcom', operator: OPERATOR_OR } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v3', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v4', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_RELEASE, value: { data: 'v20', operator: OPERATOR_NOT } },
@@ -225,6 +233,8 @@ export const filteredTokens = [
{ type: TOKEN_TYPE_WEIGHT, value: { data: '3', operator: OPERATOR_NOT } },
{ type: TOKEN_TYPE_CONTACT, value: { data: '123', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_ORGANIZATION, value: { data: '456', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_HEALTH, value: { data: 'atRisk', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_HEALTH, value: { data: 'onTrack', operator: OPERATOR_NOT } },
{ type: FILTERED_SEARCH_TERM, value: { data: 'find' } },
{ type: FILTERED_SEARCH_TERM, value: { data: 'issues' } },
];
@@ -239,6 +249,7 @@ export const filteredTokensWithSpecialValues = [
{ type: TOKEN_TYPE_MILESTONE, value: { data: 'Upcoming', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_EPIC, value: { data: 'None', operator: OPERATOR_IS } },
{ type: TOKEN_TYPE_WEIGHT, value: { data: 'None', operator: OPERATOR_IS } },
+ { type: TOKEN_TYPE_HEALTH, value: { data: 'None', operator: OPERATOR_IS } },
];
export const apiParams = {
@@ -255,6 +266,7 @@ export const apiParams = {
weight: '1',
crmContactId: '123',
crmOrganizationId: '456',
+ healthStatusFilter: 'atRisk',
not: {
authorUsername: 'marge',
assigneeUsernames: ['patty', 'selma'],
@@ -266,10 +278,12 @@ export const apiParams = {
iterationId: ['20', '42'],
epicId: '34',
weight: '3',
+ healthStatusFilter: 'onTrack',
},
or: {
authorUsernames: ['burns', 'smithers'],
assigneeUsernames: ['carl', 'lenny'],
+ labelNames: ['comedy', 'sitcom'],
},
};
@@ -283,6 +297,7 @@ export const apiParamsWithSpecialValues = {
milestoneWildcardId: 'UPCOMING',
epicId: 'None',
weight: 'None',
+ healthStatusFilter: 'NONE',
};
export const urlParams = {
@@ -296,6 +311,7 @@ export const urlParams = {
'not[milestone_title]': ['season 20', 'season 30'],
'label_name[]': ['cartoon', 'tv'],
'not[label_name][]': ['live action', 'drama'],
+ 'or[label_name][]': ['comedy', 'sitcom'],
release_tag: ['v3', 'v4'],
'not[release_tag]': ['v20', 'v30'],
'type[]': ['issue', 'feature'],
@@ -311,6 +327,8 @@ export const urlParams = {
'not[weight]': '3',
crm_contact_id: '123',
crm_organization_id: '456',
+ health_status: 'atRisk',
+ 'not[health_status]': 'onTrack',
};
export const urlParamsWithSpecialValues = {
@@ -323,6 +341,7 @@ export const urlParamsWithSpecialValues = {
milestone_title: 'Upcoming',
epic_id: 'None',
weight: 'None',
+ health_status: 'None',
};
export const project1 = {
diff --git a/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js b/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js
index d30a8c081cc..8413b8463c1 100644
--- a/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js
+++ b/spec/frontend/issues/related_merge_requests/components/related_merge_requests_spec.js
@@ -1,4 +1,4 @@
-import { mount } from '@vue/test-utils';
+import { shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import mockData from 'test_fixtures/issues/related_merge_requests.json';
import axios from '~/lib/utils/axios_utils';
@@ -20,7 +20,7 @@ describe('RelatedMergeRequests', () => {
mock = new MockAdapter(axios);
mock.onGet(`${API_ENDPOINT}?per_page=100`).reply(200, mockData, { 'x-total': 2 });
- wrapper = mount(RelatedMergeRequests, {
+ wrapper = shallowMount(RelatedMergeRequests, {
store: createStore(),
propsData: {
endpoint: API_ENDPOINT,
@@ -49,7 +49,7 @@ describe('RelatedMergeRequests', () => {
});
});
- it('should return an array with single assingee', () => {
+ it('should return an array with single assignee', () => {
const mr = { assignee: assignees[0] };
expect(wrapper.vm.getAssignees(mr)).toEqual([assignees[0]]);
diff --git a/spec/frontend/issues/show/components/header_actions_spec.js b/spec/frontend/issues/show/components/header_actions_spec.js
index 7d6ca44e679..aaf228ae181 100644
--- a/spec/frontend/issues/show/components/header_actions_spec.js
+++ b/spec/frontend/issues/show/components/header_actions_spec.js
@@ -6,6 +6,7 @@ import { mockTracking } from 'helpers/tracking_helper';
import { createAlert, VARIANT_SUCCESS } from '~/flash';
import { IssuableStatus, IssueType } from '~/issues/constants';
import DeleteIssueModal from '~/issues/show/components/delete_issue_modal.vue';
+import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
import HeaderActions from '~/issues/show/components/header_actions.vue';
import { ISSUE_STATE_EVENT_CLOSE, ISSUE_STATE_EVENT_REOPEN } from '~/issues/show/constants';
import promoteToEpicMutation from '~/issues/show/queries/promote_to_epic.mutation.graphql';
@@ -38,8 +39,9 @@ describe('HeaderActions component', () => {
issueType: IssueType.Issue,
newIssuePath: 'gitlab-org/gitlab-test/-/issues/new',
projectPath: 'gitlab-org/gitlab-test',
- reportAbusePath:
- '-/abuse_reports/new?ref_url=http%3A%2F%2Flocalhost%2Fgitlab-org%2Fgitlab-test%2F-%2Fissues%2F32&user_id=1',
+ reportAbusePath: '-/abuse_reports/add_category',
+ reportedUserId: '1',
+ reportedFromUrl: 'http://localhost:/gitlab-org/-/issues/32',
submitAsSpamPath: 'gitlab-org/gitlab-test/-/issues/32/submit_as_spam',
};
@@ -401,4 +403,31 @@ describe('HeaderActions component', () => {
});
});
});
+
+ describe('abuse category selector', () => {
+ const findAbuseCategorySelector = () => wrapper.findComponent(AbuseCategorySelector);
+
+ beforeEach(() => {
+ wrapper = mountComponent({ props: { isIssueAuthor: false } });
+ });
+
+ it('renders', () => {
+ expect(findAbuseCategorySelector().exists()).toBe(true);
+ expect(findAbuseCategorySelector().props('showDrawer')).toEqual(false);
+ });
+
+ it('opens the drawer', async () => {
+ findDesktopDropdownItems().at(2).vm.$emit('click');
+
+ await nextTick();
+
+ expect(findAbuseCategorySelector().props('showDrawer')).toEqual(true);
+ });
+
+ it('closes the drawer', async () => {
+ await findAbuseCategorySelector().vm.$emit('close-drawer');
+
+ expect(findAbuseCategorySelector().props('showDrawer')).toEqual(false);
+ });
+ });
});
diff --git a/spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js b/spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js
index 1286617d64a..6c923cae0cc 100644
--- a/spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js
+++ b/spec/frontend/issues/show/components/incidents/create_timeline_events_form_spec.js
@@ -1,6 +1,6 @@
import VueApollo from 'vue-apollo';
import Vue from 'vue';
-import { GlDatepicker } from '@gitlab/ui';
+import { GlDatepicker, GlListboxItem } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import CreateTimelineEvent from '~/issues/show/components/incidents/create_timeline_event.vue';
@@ -27,6 +27,7 @@ const mockInputData = {
incidentId: 'gid://gitlab/Issue/1',
note: 'test',
occurredAt: '2020-07-08T00:00:00.000Z',
+ timelineEventTagNames: ['Start time'],
};
describe('Create Timeline events', () => {
@@ -51,9 +52,14 @@ describe('Create Timeline events', () => {
findHourInput().setValue(inputDate.getHours());
findMinuteInput().setValue(inputDate.getMinutes());
};
+ const findListboxItems = () => wrapper.findAllComponents(GlListboxItem);
+ const setEventTags = () => {
+ findListboxItems().at(0).vm.$emit('select', true);
+ };
const fillForm = () => {
setDatetime();
setNoteInput();
+ setEventTags();
};
function createMockApolloProvider() {
@@ -80,6 +86,7 @@ describe('Create Timeline events', () => {
provide: {
fullPath: 'group/project',
issuableId: '1',
+ glFeatures: { incidentEventTags: true },
},
apolloProvider,
});
diff --git a/spec/frontend/issues/show/components/incidents/mock_data.js b/spec/frontend/issues/show/components/incidents/mock_data.js
index 9accfcea791..6606bed1567 100644
--- a/spec/frontend/issues/show/components/incidents/mock_data.js
+++ b/spec/frontend/issues/show/components/incidents/mock_data.js
@@ -74,6 +74,7 @@ const mockUpdatedEvent = {
action: 'comment',
occurredAt: '2022-07-01T12:47:00Z',
createdAt: '2022-07-20T12:47:40Z',
+ timelineEventTags: [],
};
export const timelineEventsQueryListResponse = {
diff --git a/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
index d5b199cc790..f06d968a4c5 100644
--- a/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
+++ b/spec/frontend/issues/show/components/incidents/timeline_events_form_spec.js
@@ -1,11 +1,15 @@
import VueApollo from 'vue-apollo';
import Vue, { nextTick } from 'vue';
-import { GlDatepicker } from '@gitlab/ui';
-import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
+import { GlDatepicker, GlListbox } from '@gitlab/ui';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
import TimelineEventsForm from '~/issues/show/components/incidents/timeline_events_form.vue';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
-import { timelineFormI18n } from '~/issues/show/components/incidents/constants';
+import {
+ timelineFormI18n,
+ TIMELINE_EVENT_TAGS,
+ timelineEventTagsI18n,
+} from '~/issues/show/components/incidents/constants';
import { createAlert } from '~/flash';
import { useFakeDate } from 'helpers/fake_date';
@@ -17,17 +21,23 @@ const fakeDate = '2020-07-08T00:00:00.000Z';
const mockInputDate = new Date('2021-08-12');
+const mockTags = TIMELINE_EVENT_TAGS;
+
describe('Timeline events form', () => {
// July 8 2020
useFakeDate(fakeDate);
let wrapper;
- const mountComponent = ({ mountMethod = shallowMountExtended } = {}, props = {}) => {
+ const mountComponent = ({ mountMethod = mountExtended } = {}, props = {}, glFeatures = {}) => {
wrapper = mountMethod(TimelineEventsForm, {
+ provide: {
+ glFeatures,
+ },
propsData: {
showSaveAndAdd: true,
isEventProcessed: false,
...props,
+ tags: mockTags,
},
stubs: {
GlButton: true,
@@ -35,6 +45,10 @@ describe('Timeline events form', () => {
});
};
+ beforeEach(() => {
+ mountComponent();
+ });
+
afterEach(() => {
createAlert.mockReset();
wrapper.destroy();
@@ -48,16 +62,26 @@ describe('Timeline events form', () => {
const findDatePicker = () => wrapper.findComponent(GlDatepicker);
const findHourInput = () => wrapper.findByTestId('input-hours');
const findMinuteInput = () => wrapper.findByTestId('input-minutes');
- const setDatetime = () => {
- findDatePicker().vm.$emit('input', mockInputDate);
- findHourInput().setValue(5);
- findMinuteInput().setValue(45);
- };
+ const findTagDropdown = () => wrapper.findComponent(GlListbox);
const findTextarea = () => wrapper.findByTestId('input-note');
+ const findTextareaValue = () => findTextarea().element.value;
const findCountNumeric = (count) => wrapper.findByText(count);
const findCountVerbose = (count) => wrapper.findByText(`${count} characters remaining`);
const findCountHint = () => wrapper.findByText(timelineFormI18n.hint);
+ const setDatetime = () => {
+ findDatePicker().vm.$emit('input', mockInputDate);
+ findHourInput().setValue(5);
+ findMinuteInput().setValue(45);
+ };
+ const selectTags = async (tags) => {
+ findTagDropdown().vm.$emit(
+ 'select',
+ tags.map((x) => x.value),
+ );
+ await nextTick();
+ };
+ const selectOneTag = () => selectTags([mockTags[0]]);
const submitForm = async () => {
findSubmitButton().vm.$emit('click');
await waitForPromises();
@@ -90,23 +114,97 @@ describe('Timeline events form', () => {
]);
});
- describe('form button behaviour', () => {
+ describe('with incident_event_tag feature flag enabled', () => {
beforeEach(() => {
- mountComponent({ mountMethod: mountExtended });
+ mountComponent(
+ {},
+ {},
+ {
+ incidentEventTags: true,
+ },
+ );
+ });
+
+ describe('event tag dropdown', () => {
+ it('should render option list from provided array', () => {
+ expect(findTagDropdown().props('items')).toEqual(mockTags);
+ });
+
+ it('should allow to choose multiple tags', async () => {
+ await selectTags(mockTags);
+
+ expect(findTagDropdown().props('selected')).toEqual(mockTags.map((x) => x.value));
+ });
+
+ it('should show default option, when none is chosen', () => {
+ expect(findTagDropdown().props('toggleText')).toBe(timelineFormI18n.selectTags);
+ });
+
+ it('should show the tag, when one is selected', async () => {
+ await selectOneTag();
+
+ expect(findTagDropdown().props('toggleText')).toBe(timelineEventTagsI18n.startTime);
+ });
+
+ it('should show the number of selected tags, when more than one is selected', async () => {
+ await selectTags(mockTags);
+
+ expect(findTagDropdown().props('toggleText')).toBe('2 tags');
+ });
+
+ it('should be cleared when clear is triggered', async () => {
+ await selectTags(mockTags);
+
+ // This component expects the parent to call `clear`, so this is the only way to trigger this
+ wrapper.vm.clear();
+ await nextTick();
+
+ expect(findTagDropdown().props('toggleText')).toBe(timelineFormI18n.selectTags);
+ expect(findTagDropdown().props('selected')).toEqual([]);
+ });
+
+ it('should populate incident note with tags if a note was empty', async () => {
+ await selectTags(mockTags);
+
+ expect(findTextareaValue()).toBe(
+ `${timelineFormI18n.areaDefaultMessage} ${mockTags
+ .map((x) => x.value.toLowerCase())
+ .join(', ')}`,
+ );
+ });
+
+ it('should populate incident note with tag but allow to customise it', async () => {
+ await selectOneTag();
+
+ await findTextarea().setValue('my customised event note');
+
+ await nextTick();
+
+ expect(findTextareaValue()).toBe('my customised event note');
+ });
+
+ it('should not populate incident note with tag if it had a note', async () => {
+ await findTextarea().setValue('hello');
+ await selectOneTag();
+
+ expect(findTextareaValue()).toBe('hello');
+ });
});
+ });
+ describe('form button behaviour', () => {
it('should save event on submit', async () => {
await submitForm();
expect(wrapper.emitted()).toEqual({
- 'save-event': [[{ note: '', occurredAt: fakeDate }, false]],
+ 'save-event': [[{ note: '', occurredAt: fakeDate, timelineEventTags: [] }, false]],
});
});
it('should save event on "submit and add another"', async () => {
await submitFormAndAddAnother();
expect(wrapper.emitted()).toEqual({
- 'save-event': [[{ note: '', occurredAt: fakeDate }, true]],
+ 'save-event': [[{ note: '', occurredAt: fakeDate, timelineEventTags: [] }, true]],
});
});
@@ -145,10 +243,6 @@ describe('Timeline events form', () => {
});
describe('form character limit', () => {
- beforeEach(() => {
- mountComponent({ mountMethod: mountExtended });
- });
-
it('sets a character limit hint', () => {
expect(findCountHint().exists()).toBe(true);
});
@@ -172,32 +266,32 @@ describe('Timeline events form', () => {
});
describe('Delete button', () => {
- it('does not show the delete button if showDelete prop is false', () => {
- mountComponent({ mountMethod: mountExtended }, { showDelete: false });
+ it('does not show the delete button if isEditing prop is false', () => {
+ mountComponent({ mountMethod: mountExtended }, { isEditing: false });
expect(findDeleteButton().exists()).toBe(false);
});
- it('shows the delete button if showDelete prop is true', () => {
- mountComponent({ mountMethod: mountExtended }, { showDelete: true });
+ it('shows the delete button if isEditing prop is true', () => {
+ mountComponent({ mountMethod: mountExtended }, { isEditing: true });
expect(findDeleteButton().exists()).toBe(true);
});
it('disables the delete button if isEventProcessed prop is true', () => {
- mountComponent({ mountMethod: mountExtended }, { showDelete: true, isEventProcessed: true });
+ mountComponent({ mountMethod: mountExtended }, { isEditing: true, isEventProcessed: true });
expect(findDeleteButton().props('disabled')).toBe(true);
});
it('does not disable the delete button if isEventProcessed prop is false', () => {
- mountComponent({ mountMethod: mountExtended }, { showDelete: true, isEventProcessed: false });
+ mountComponent({ mountMethod: mountExtended }, { isEditing: true, isEventProcessed: false });
expect(findDeleteButton().props('disabled')).toBe(false);
});
it('emits delete event on click', () => {
- mountComponent({ mountMethod: mountExtended }, { showDelete: true, isEventProcessed: true });
+ mountComponent({ mountMethod: mountExtended }, { isEditing: true, isEventProcessed: true });
deleteForm();
diff --git a/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
index b0218a9df12..944854faab3 100644
--- a/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
+++ b/spec/frontend/jira_connect/branches/components/project_dropdown_spec.js
@@ -1,10 +1,4 @@
-import {
- GlAvatarLabeled,
- GlDropdown,
- GlDropdownItem,
- GlLoadingIcon,
- GlSearchBoxByType,
-} from '@gitlab/ui';
+import { GlAvatarLabeled, GlCollapsibleListbox, GlListboxItem } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -36,12 +30,8 @@ const mockQueryLoading = jest.fn().mockReturnValue(new Promise(() => {}));
describe('ProjectDropdown', () => {
let wrapper;
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
- const findDropdownItemByProjectId = (projectId) =>
- wrapper.find(`[data-testid="test-project-${projectId}"]`);
- const findSearchBox = () => wrapper.findComponent(GlSearchBoxByType);
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findAllGlListboxItems = () => wrapper.findAllComponents(GlListboxItem);
function createMockApolloProvider({ mockGetProjectsQuery = mockGetProjectsQuerySuccess } = {}) {
Vue.use(VueApollo);
@@ -55,6 +45,7 @@ describe('ProjectDropdown', () => {
wrapper = mountFn(ProjectDropdown, {
apolloProvider: mockApollo || createMockApolloProvider(),
propsData: props,
+ stubs: { GlCollapsibleListbox },
});
}
@@ -72,16 +63,11 @@ describe('ProjectDropdown', () => {
it('sets dropdown `loading` prop to `true`', () => {
expect(findDropdown().props('loading')).toBe(true);
});
-
- it('renders loading icon in dropdown', () => {
- expect(findLoadingIcon().isVisible()).toBe(true);
- });
});
describe('when projects query succeeds', () => {
beforeEach(async () => {
createComponent();
- await waitForPromises();
await nextTick();
});
@@ -90,12 +76,19 @@ describe('ProjectDropdown', () => {
});
it('renders dropdown items with correct props', () => {
- const dropdownItems = findAllDropdownItems();
- const avatars = dropdownItems.wrappers.map((item) => item.findComponent(GlAvatarLabeled));
+ const dropdownItems = findDropdown().props('items');
+ expect(dropdownItems).toHaveLength(mockProjects.length);
+ expect(dropdownItems).toMatchObject(mockProjects);
+ });
+
+ it('renders dropdown items with correct template', () => {
+ expect(findAllGlListboxItems()).toHaveLength(mockProjects.length);
+ const avatars = findAllGlListboxItems().wrappers.map((item) =>
+ item.findComponent(GlAvatarLabeled),
+ );
const avatarAttributes = avatars.map((avatar) => avatar.attributes());
const avatarProps = avatars.map((avatar) => avatar.props());
- expect(dropdownItems.wrappers).toHaveLength(mockProjects.length);
expect(avatarProps).toMatchObject(
mockProjects.map((project) => ({
label: project.name,
@@ -113,8 +106,7 @@ describe('ProjectDropdown', () => {
describe('when selecting a dropdown item', () => {
it('emits `change` event with the selected project', async () => {
const mockProject = mockProjects[0];
- const itemToSelect = findDropdownItemByProjectId(mockProject.id);
- await itemToSelect.vm.$emit('click');
+ await findDropdown().vm.$emit('select', mockProject.id);
expect(wrapper.emitted('change')[0]).toEqual([mockProject]);
});
@@ -124,17 +116,11 @@ describe('ProjectDropdown', () => {
const mockProject = mockProjects[0];
beforeEach(() => {
- wrapper.setProps({
- selectedProject: mockProject,
- });
- });
-
- it('sets `isChecked` prop of the corresponding dropdown item to `true`', () => {
- expect(findDropdownItemByProjectId(mockProject.id).props('isChecked')).toBe(true);
+ createComponent({ props: { selectedProject: mockProject } });
});
- it('sets dropdown text to `selectedBranchName` value', () => {
- expect(findDropdown().props('text')).toBe(mockProject.nameWithNamespace);
+ it('selects the specified item', () => {
+ expect(findDropdown().props('selected')).toBe(mockProject.id);
});
});
});
@@ -155,11 +141,10 @@ describe('ProjectDropdown', () => {
describe('when searching branches', () => {
it('triggers a refetch', async () => {
createComponent({ mountFn: mount });
- await waitForPromises();
jest.clearAllMocks();
const mockSearchTerm = 'gitl';
- await findSearchBox().vm.$emit('input', mockSearchTerm);
+ await findDropdown().vm.$emit('search', mockSearchTerm);
expect(mockGetProjectsQuerySuccess).toHaveBeenCalledWith({
after: '',
diff --git a/spec/frontend/jira_connect/subscriptions/api_spec.js b/spec/frontend/jira_connect/subscriptions/api_spec.js
index cf496d5836a..21636017f10 100644
--- a/spec/frontend/jira_connect/subscriptions/api_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/api_spec.js
@@ -9,7 +9,7 @@ import {
updateInstallation,
} from '~/jira_connect/subscriptions/api';
import { getJwt } from '~/jira_connect/subscriptions/utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
jest.mock('~/jira_connect/subscriptions/utils', () => ({
getJwt: jest.fn().mockResolvedValue('jwt'),
@@ -49,7 +49,7 @@ describe('JiraConnect API', () => {
jwt: mockJwt,
namespace_path: mockNamespace,
})
- .replyOnce(httpStatus.OK, mockResponse);
+ .replyOnce(HTTP_STATUS_OK, mockResponse);
response = await makeRequest();
@@ -67,7 +67,7 @@ describe('JiraConnect API', () => {
it('returns success response', async () => {
jest.spyOn(axiosInstance, 'delete');
- axiosMock.onDelete(mockRemovePath).replyOnce(httpStatus.OK, mockResponse);
+ axiosMock.onDelete(mockRemovePath).replyOnce(HTTP_STATUS_OK, mockResponse);
response = await makeRequest();
@@ -99,7 +99,7 @@ describe('JiraConnect API', () => {
page: mockPage,
per_page: mockPerPage,
})
- .replyOnce(httpStatus.OK, mockResponse);
+ .replyOnce(HTTP_STATUS_OK, mockResponse);
response = await makeRequest();
@@ -121,7 +121,7 @@ describe('JiraConnect API', () => {
jest.spyOn(axiosInstance, 'get');
- axiosMock.onGet(expectedUrl).replyOnce(httpStatus.OK, mockResponse);
+ axiosMock.onGet(expectedUrl).replyOnce(HTTP_STATUS_OK, mockResponse);
response = await makeRequest();
@@ -139,7 +139,7 @@ describe('JiraConnect API', () => {
jest.spyOn(axiosInstance, 'post');
- axiosMock.onPost(expectedUrl).replyOnce(httpStatus.OK, mockResponse);
+ axiosMock.onPost(expectedUrl).replyOnce(HTTP_STATUS_OK, mockResponse);
response = await makeRequest();
@@ -175,7 +175,7 @@ describe('JiraConnect API', () => {
instance_url: expectedInstanceUrl,
},
})
- .replyOnce(httpStatus.OK, mockResponse);
+ .replyOnce(HTTP_STATUS_OK, mockResponse);
response = await makeRequest();
diff --git a/spec/frontend/jira_connect/subscriptions/components/compatibility_alert_spec.js b/spec/frontend/jira_connect/subscriptions/components/compatibility_alert_spec.js
deleted file mode 100644
index 5f38a0acb9d..00000000000
--- a/spec/frontend/jira_connect/subscriptions/components/compatibility_alert_spec.js
+++ /dev/null
@@ -1,56 +0,0 @@
-import { GlAlert, GlLink } from '@gitlab/ui';
-import { shallowMount, mount } from '@vue/test-utils';
-import CompatibilityAlert from '~/jira_connect/subscriptions/components/compatibility_alert.vue';
-
-import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
-
-describe('CompatibilityAlert', () => {
- let wrapper;
-
- const createComponent = ({ mountFn = shallowMount } = {}) => {
- wrapper = mountFn(CompatibilityAlert);
- };
-
- const findAlert = () => wrapper.findComponent(GlAlert);
- const findLink = () => wrapper.findComponent(GlLink);
- const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('displays an alert', () => {
- createComponent();
-
- expect(findAlert().exists()).toBe(true);
- });
-
- it('renders help link with target="_blank" and rel="noopener noreferrer"', () => {
- createComponent({ mountFn: mount });
- expect(findLink().attributes()).toMatchObject({
- target: '_blank',
- rel: 'noopener',
- });
- });
-
- it('`local-storage-sync` value prop is initially false', () => {
- createComponent();
-
- expect(findLocalStorageSync().props('value')).toBe(false);
- });
-
- describe('when dismissed', () => {
- beforeEach(async () => {
- createComponent();
- await findAlert().vm.$emit('dismiss');
- });
-
- it('hides alert', () => {
- expect(findAlert().exists()).toBe(false);
- });
-
- it('updates value prop of `local-storage-sync`', () => {
- expect(findLocalStorageSync().props('value')).toBe(true);
- });
- });
-});
diff --git a/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
index 01317eb5dba..e20c4b62e77 100644
--- a/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
+++ b/spec/frontend/jira_connect/subscriptions/components/sign_in_oauth_button_spec.js
@@ -4,6 +4,7 @@ import { nextTick } from 'vue';
import SignInOauthButton from '~/jira_connect/subscriptions/components/sign_in_oauth_button.vue';
import {
+ GITLAB_COM_BASE_PATH,
I18N_DEFAULT_SIGN_IN_BUTTON_TEXT,
OAUTH_WINDOW_OPTIONS,
} from '~/jira_connect/subscriptions/constants';
@@ -36,6 +37,9 @@ describe('SignInOauthButton', () => {
},
state: 'good-state',
};
+ const defaultProps = {
+ gitlabBasePath: GITLAB_COM_BASE_PATH,
+ };
const createComponent = ({ slots, props } = {}) => {
store = createStore();
@@ -48,7 +52,7 @@ describe('SignInOauthButton', () => {
provide: {
oauthMetadata: mockOauthMetadata,
},
- propsData: props,
+ propsData: { ...defaultProps, ...props },
});
};
@@ -57,16 +61,17 @@ describe('SignInOauthButton', () => {
});
const findButton = () => wrapper.findComponent(GlButton);
+ describe('when `gitlabBasePath` is GitLab.com', () => {
+ it('displays a button', () => {
+ createComponent();
- it('displays a button', () => {
- createComponent();
-
- expect(findButton().exists()).toBe(true);
- expect(findButton().text()).toBe(I18N_DEFAULT_SIGN_IN_BUTTON_TEXT);
- expect(findButton().props('category')).toBe('primary');
+ expect(findButton().exists()).toBe(true);
+ expect(findButton().text()).toBe(I18N_DEFAULT_SIGN_IN_BUTTON_TEXT);
+ expect(findButton().props('category')).toBe('primary');
+ });
});
- describe('when `gitlabBasePath` is passed', () => {
+ describe('when `gitlabBasePath` is self-managed', () => {
const mockBasePath = 'https://gitlab.mycompany.com';
it('uses custom text for button', () => {
diff --git a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
index 748e151f31b..40e627262db 100644
--- a/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
+++ b/spec/frontend/jira_import/components/__snapshots__/jira_import_form_spec.js.snap
@@ -150,18 +150,12 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<input
aria-label="Search"
- class="gl-form-input gl-search-box-by-type-input form-control"
+ class="gl-form-input form-control gl-search-box-by-type-input"
placeholder="Search"
type="search"
/>
- <div
- class="gl-search-box-by-type-right-icons"
- >
- <!---->
-
- <!---->
- </div>
+ <!---->
</div>
<li
@@ -281,18 +275,12 @@ exports[`JiraImportForm table body shows correct information in each cell 1`] =
<input
aria-label="Search"
- class="gl-form-input gl-search-box-by-type-input form-control"
+ class="gl-form-input form-control gl-search-box-by-type-input"
placeholder="Search"
type="search"
/>
- <div
- class="gl-search-box-by-type-right-icons"
- >
- <!---->
-
- <!---->
- </div>
+ <!---->
</div>
<li
diff --git a/spec/frontend/jobs/components/job/manual_variables_form_spec.js b/spec/frontend/jobs/components/job/manual_variables_form_spec.js
index 45a1e9dca76..3040570df19 100644
--- a/spec/frontend/jobs/components/job/manual_variables_form_spec.js
+++ b/spec/frontend/jobs/components/job/manual_variables_form_spec.js
@@ -212,9 +212,30 @@ describe('Manual Variables Form', () => {
expect(findDeleteVarBtn().exists()).toBe(true);
});
+ });
+
+ describe('variable delete button placeholder', () => {
+ beforeEach(async () => {
+ getJobQueryResponse.mockResolvedValue(mockJobResponse);
+ await createComponentWithApollo();
+ });
it('delete variable button placeholder should only exist when a user cannot remove', async () => {
expect(findDeleteVarBtnPlaceholder().exists()).toBe(true);
});
+
+ it('does not show the placeholder button', () => {
+ expect(findDeleteVarBtnPlaceholder().classes('gl-opacity-0')).toBe(true);
+ });
+
+ it('placeholder button will not delete the row on click', async () => {
+ expect(findAllCiVariableKeys()).toHaveLength(1);
+ expect(findDeleteVarBtnPlaceholder().exists()).toBe(true);
+
+ await findDeleteVarBtnPlaceholder().trigger('click');
+
+ expect(findAllCiVariableKeys()).toHaveLength(1);
+ expect(findDeleteVarBtnPlaceholder().exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/jobs/components/job/sidebar_spec.js b/spec/frontend/jobs/components/job/sidebar_spec.js
index 27911eb76eb..aa9ca932023 100644
--- a/spec/frontend/jobs/components/job/sidebar_spec.js
+++ b/spec/frontend/jobs/components/job/sidebar_spec.js
@@ -3,7 +3,7 @@ import { nextTick } from 'vue';
import MockAdapter from 'axios-mock-adapter';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ArtifactsBlock from '~/jobs/components/job/sidebar/artifacts_block.vue';
import JobRetryForwardDeploymentModal from '~/jobs/components/job/sidebar/job_retry_forward_deployment_modal.vue';
import JobsContainer from '~/jobs/components/job/sidebar/jobs_container.vue';
@@ -43,7 +43,7 @@ describe('Sidebar details block', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet().reply(httpStatus.OK, {
+ mock.onGet().reply(HTTP_STATUS_OK, {
name: job.stage,
});
});
diff --git a/spec/frontend/jobs/components/table/jobs_table_spec.js b/spec/frontend/jobs/components/table/jobs_table_spec.js
index 803df3df37f..3c4f2d624fe 100644
--- a/spec/frontend/jobs/components/table/jobs_table_spec.js
+++ b/spec/frontend/jobs/components/table/jobs_table_spec.js
@@ -2,14 +2,14 @@ import { GlTable } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import JobsTable from '~/jobs/components/table/jobs_table.vue';
-import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import { mockJobsNodes } from '../../mock_data';
describe('Jobs Table', () => {
let wrapper;
const findTable = () => wrapper.findComponent(GlTable);
- const findStatusBadge = () => wrapper.findComponent(CiBadge);
+ const findCiBadgeLink = () => wrapper.findComponent(CiBadgeLink);
const findTableRows = () => wrapper.findAllByTestId('jobs-table-row');
const findJobStage = () => wrapper.findByTestId('job-stage-name');
const findJobName = () => wrapper.findByTestId('job-name');
@@ -43,7 +43,7 @@ describe('Jobs Table', () => {
});
it('displays job status', () => {
- expect(findStatusBadge().exists()).toBe(true);
+ expect(findCiBadgeLink().exists()).toBe(true);
});
it('displays the job stage and name', () => {
diff --git a/spec/frontend/language_switcher/components/app_spec.js b/spec/frontend/language_switcher/components/app_spec.js
index 6a1b94cd813..effb71c2775 100644
--- a/spec/frontend/language_switcher/components/app_spec.js
+++ b/spec/frontend/language_switcher/components/app_spec.js
@@ -1,3 +1,4 @@
+import { GlLink } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import LanguageSwitcherApp from '~/language_switcher/components/app.vue';
import { PREFERRED_LANGUAGE_COOKIE_KEY } from '~/language_switcher/constants';
@@ -29,6 +30,7 @@ describe('<LanguageSwitcher />', () => {
const getPreferredLanguage = () => wrapper.find('.gl-dropdown-button-text').text();
const findLanguageDropdownItem = (code) => wrapper.findByTestId(`language_switcher_lang_${code}`);
+ const findFooter = () => wrapper.findByTestId('footer');
it('preferred language', () => {
expect(getPreferredLanguage()).toBe(EN.text);
@@ -59,4 +61,12 @@ describe('<LanguageSwitcher />', () => {
expect(utils.setCookie).toHaveBeenCalledWith(PREFERRED_LANGUAGE_COOKIE_KEY, ES.value);
window.location = originalLocation;
});
+
+ it('renders footer link', () => {
+ const link = findFooter().findComponent(GlLink);
+
+ // Assert against actual value so we can implicitly test `helpPagePath` call
+ expect(link.attributes('href')).toBe('/help/development/i18n/translation.md');
+ expect(link.text()).toBe(LanguageSwitcherApp.HELP_TRANSLATE_MSG);
+ });
});
diff --git a/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js b/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js
index 055d57d6ada..8d6ace165ab 100644
--- a/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/date_calculation_utility_spec.js
@@ -3,7 +3,9 @@ import {
newDateAsLocaleTime,
nSecondsAfter,
nSecondsBefore,
+ isToday,
} from '~/lib/utils/datetime/date_calculation_utility';
+import { useFakeDate } from 'helpers/fake_date';
describe('newDateAsLocaleTime', () => {
it.each`
@@ -66,3 +68,19 @@ describe('nSecondsBefore', () => {
expect(nSecondsBefore(date, seconds)).toEqual(expected);
});
});
+
+describe('isToday', () => {
+ useFakeDate(2022, 11, 5);
+
+ describe('when date is today', () => {
+ it('returns `true`', () => {
+ expect(isToday(new Date(2022, 11, 5))).toBe(true);
+ });
+ });
+
+ describe('when date is not today', () => {
+ it('returns `false`', () => {
+ expect(isToday(new Date(2022, 11, 6))).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
index 2e0bb6a8dcd..a83b0ed9fbe 100644
--- a/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
+++ b/spec/frontend/lib/utils/datetime/date_format_utility_spec.js
@@ -149,17 +149,17 @@ describe('durationTimeFormatted', () => {
describe('formatUtcOffset', () => {
it.each`
offset | expected
- ${-32400} | ${'- 9'}
- ${'-12600'} | ${'- 3.5'}
- ${0} | ${'0'}
- ${'10800'} | ${'+ 3'}
- ${19800} | ${'+ 5.5'}
- ${0} | ${'0'}
- ${[]} | ${'0'}
- ${{}} | ${'0'}
- ${true} | ${'0'}
- ${null} | ${'0'}
- ${undefined} | ${'0'}
+ ${-32400} | ${'-9'}
+ ${'-12600'} | ${'-3.5'}
+ ${0} | ${' 0'}
+ ${'10800'} | ${'+3'}
+ ${19800} | ${'+5.5'}
+ ${0} | ${' 0'}
+ ${[]} | ${' 0'}
+ ${{}} | ${' 0'}
+ ${true} | ${' 0'}
+ ${null} | ${' 0'}
+ ${undefined} | ${' 0'}
`('returns $expected given $offset', ({ offset, expected }) => {
expect(utils.formatUtcOffset(offset)).toEqual(expected);
});
diff --git a/spec/frontend/lib/utils/poll_until_complete_spec.js b/spec/frontend/lib/utils/poll_until_complete_spec.js
index 3ce17ecfc8c..309e0cc540b 100644
--- a/spec/frontend/lib/utils/poll_until_complete_spec.js
+++ b/spec/frontend/lib/utils/poll_until_complete_spec.js
@@ -1,7 +1,11 @@
import AxiosMockAdapter from 'axios-mock-adapter';
import { TEST_HOST } from 'helpers/test_constants';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes, { HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_NO_CONTENT,
+ HTTP_STATUS_NOT_FOUND,
+ HTTP_STATUS_OK,
+} from '~/lib/utils/http_status';
import pollUntilComplete from '~/lib/utils/poll_until_complete';
const endpoint = `${TEST_HOST}/foo`;
@@ -24,7 +28,7 @@ describe('pollUntilComplete', () => {
describe('given an immediate success response', () => {
beforeEach(() => {
- mock.onGet(endpoint).replyOnce(httpStatusCodes.OK, mockData);
+ mock.onGet(endpoint).replyOnce(HTTP_STATUS_OK, mockData);
});
it('resolves with the response', () =>
@@ -39,7 +43,7 @@ describe('pollUntilComplete', () => {
.onGet(endpoint)
.replyOnce(HTTP_STATUS_NO_CONTENT, undefined, pollIntervalHeader)
.onGet(endpoint)
- .replyOnce(httpStatusCodes.OK, mockData);
+ .replyOnce(HTTP_STATUS_OK, mockData);
});
it('calls the endpoint until it succeeds, and resolves with the response', () =>
@@ -66,7 +70,7 @@ describe('pollUntilComplete', () => {
const errorMessage = 'error message';
beforeEach(() => {
- mock.onGet(endpoint).replyOnce(httpStatusCodes.NOT_FOUND, errorMessage);
+ mock.onGet(endpoint).replyOnce(HTTP_STATUS_NOT_FOUND, errorMessage);
});
it('rejects with the error response', () =>
@@ -78,7 +82,7 @@ describe('pollUntilComplete', () => {
describe('given params', () => {
const params = { foo: 'bar' };
beforeEach(() => {
- mock.onGet(endpoint, { params }).replyOnce(httpStatusCodes.OK, mockData);
+ mock.onGet(endpoint, { params }).replyOnce(HTTP_STATUS_OK, mockData);
});
it('requests the expected URL', () =>
diff --git a/spec/frontend/locale/ensure_single_line_spec.js b/spec/frontend/locale/ensure_single_line_spec.js
index 20b04cab9c8..ca3d57015af 100644
--- a/spec/frontend/locale/ensure_single_line_spec.js
+++ b/spec/frontend/locale/ensure_single_line_spec.js
@@ -1,4 +1,4 @@
-import ensureSingleLine from '~/locale/ensure_single_line';
+import ensureSingleLine from '~/locale/ensure_single_line.cjs';
describe('locale', () => {
describe('ensureSingleLine', () => {
diff --git a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
index df5c884f42e..b94964dc482 100644
--- a/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/access_request_action_buttons_spec.js
@@ -38,7 +38,6 @@ describe('AccessRequestActionButtons', () => {
title: 'Deny access',
isAccessRequest: true,
isInvite: false,
- icon: 'close',
});
});
diff --git a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
index ea819b4fb83..68009708c99 100644
--- a/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
+++ b/spec/frontend/members/components/action_buttons/invite_action_buttons_spec.js
@@ -39,12 +39,10 @@ describe('InviteActionButtons', () => {
it('sets props correctly', () => {
expect(findRemoveMemberButton().props()).toMatchObject({
memberId: member.id,
- memberType: null,
message: `Are you sure you want to revoke the invitation for ${member.invite.email} to join "${member.source.fullName}"`,
title: 'Revoke invite',
isAccessRequest: false,
isInvite: true,
- icon: 'remove',
});
});
});
diff --git a/spec/frontend/members/components/action_buttons/leave_button_spec.js b/spec/frontend/members/components/action_buttons/leave_button_spec.js
deleted file mode 100644
index ecfbf4460a6..00000000000
--- a/spec/frontend/members/components/action_buttons/leave_button_spec.js
+++ /dev/null
@@ -1,59 +0,0 @@
-import { GlButton } from '@gitlab/ui';
-import { shallowMount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
-import LeaveButton from '~/members/components/action_buttons/leave_button.vue';
-import LeaveModal from '~/members/components/modals/leave_modal.vue';
-import { LEAVE_MODAL_ID } from '~/members/constants';
-import { member } from '../../mock_data';
-
-describe('LeaveButton', () => {
- let wrapper;
-
- const createComponent = (propsData = {}) => {
- wrapper = shallowMount(LeaveButton, {
- propsData: {
- member,
- ...propsData,
- },
- directives: {
- GlTooltip: createMockDirective(),
- GlModal: createMockDirective(),
- },
- });
- };
-
- const findButton = () => wrapper.findComponent(GlButton);
-
- beforeEach(() => {
- createComponent();
- });
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- it('displays a tooltip', () => {
- const button = findButton();
-
- expect(getBinding(button.element, 'gl-tooltip')).not.toBeUndefined();
- expect(button.attributes('title')).toBe('Leave');
- });
-
- it('sets `aria-label` attribute', () => {
- expect(findButton().attributes('aria-label')).toBe('Leave');
- });
-
- it('renders leave modal', () => {
- const leaveModal = wrapper.findComponent(LeaveModal);
-
- expect(leaveModal.exists()).toBe(true);
- expect(leaveModal.props('member')).toEqual(member);
- });
-
- it('triggers leave modal', () => {
- const binding = getBinding(findButton().element, 'gl-modal');
-
- expect(binding).not.toBeUndefined();
- expect(binding.value).toBe(LEAVE_MODAL_ID);
- });
-});
diff --git a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
index 0e5b667eb9b..cca340169b7 100644
--- a/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
+++ b/spec/frontend/members/components/action_buttons/remove_member_button_spec.js
@@ -39,7 +39,6 @@ describe('RemoveMemberButton', () => {
},
propsData: {
memberId: 1,
- memberType: 'GroupMember',
message: 'Are you sure you want to remove John Smith?',
title: 'Remove member',
isAccessRequest: true,
@@ -77,20 +76,9 @@ describe('RemoveMemberButton', () => {
it('calls Vuex action to show `remove member` modal when clicked', () => {
findButton().vm.$emit('click');
- expect(actions.showRemoveMemberModal).toHaveBeenCalledWith(expect.any(Object), modalData);
- });
-
- describe('button optional properties', () => {
- it('has default value for category and text', () => {
- createComponent();
- expect(findButton().props('category')).toBe('secondary');
- expect(findButton().text()).toBe('');
- });
-
- it('allow changing value of button category and text', () => {
- createComponent({ buttonCategory: 'primary', buttonText: 'Decline request' });
- expect(findButton().props('category')).toBe('primary');
- expect(findButton().text()).toBe('Decline request');
+ expect(actions.showRemoveMemberModal).toHaveBeenCalledWith(expect.any(Object), {
+ ...modalData,
+ memberModelType: undefined,
});
});
});
diff --git a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js b/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
deleted file mode 100644
index 6ac46619bc9..00000000000
--- a/spec/frontend/members/components/action_buttons/user_action_buttons_spec.js
+++ /dev/null
@@ -1,161 +0,0 @@
-import { shallowMount } from '@vue/test-utils';
-import LeaveButton from '~/members/components/action_buttons/leave_button.vue';
-import RemoveMemberButton from '~/members/components/action_buttons/remove_member_button.vue';
-import UserActionButtons from '~/members/components/action_buttons/user_action_buttons.vue';
-import { parseUserDeletionObstacles } from '~/vue_shared/components/user_deletion_obstacles/utils';
-import { member, orphanedMember } from '../../mock_data';
-
-describe('UserActionButtons', () => {
- let wrapper;
-
- const createComponent = (propsData = {}) => {
- wrapper = shallowMount(UserActionButtons, {
- propsData: {
- member,
- isCurrentUser: false,
- isInvitedUser: false,
- ...propsData,
- },
- });
- };
-
- const findRemoveMemberButton = () => wrapper.findComponent(RemoveMemberButton);
-
- afterEach(() => {
- wrapper.destroy();
- });
-
- describe('when user has `canRemove` permissions', () => {
- beforeEach(() => {
- createComponent({
- permissions: {
- canRemove: true,
- },
- });
- });
-
- it('renders remove member button', () => {
- expect(findRemoveMemberButton().exists()).toBe(true);
- });
-
- it('sets props correctly', () => {
- expect(findRemoveMemberButton().props()).toEqual({
- memberId: member.id,
- memberType: 'GroupMember',
- message: `Are you sure you want to remove ${member.user.name} from "${member.source.fullName}"?`,
- title: null,
- isAccessRequest: false,
- isInvite: false,
- icon: '',
- buttonCategory: 'secondary',
- buttonText: 'Remove member',
- userDeletionObstacles: {
- name: member.user.name,
- obstacles: parseUserDeletionObstacles(member.user),
- },
- });
- });
-
- describe('when member is orphaned', () => {
- it('sets `message` prop correctly', () => {
- createComponent({
- member: orphanedMember,
- permissions: {
- canRemove: true,
- },
- });
-
- expect(findRemoveMemberButton().props('message')).toBe(
- `Are you sure you want to remove this orphaned member from "${orphanedMember.source.fullName}"?`,
- );
- });
- });
-
- describe('when member is the current user', () => {
- it('renders leave button', () => {
- createComponent({
- isCurrentUser: true,
- permissions: {
- canRemove: true,
- },
- });
-
- expect(wrapper.findComponent(LeaveButton).exists()).toBe(true);
- });
- });
- });
-
- describe('when user does not have `canRemove` permissions', () => {
- it('does not render remove member button', () => {
- createComponent({
- permissions: {
- canRemove: false,
- },
- });
-
- expect(findRemoveMemberButton().exists()).toBe(false);
- });
- });
-
- describe('when group member', () => {
- beforeEach(() => {
- createComponent({
- member: {
- ...member,
- type: 'GroupMember',
- },
- permissions: {
- canRemove: true,
- },
- });
- });
-
- it('sets member type correctly', () => {
- expect(findRemoveMemberButton().props().memberType).toBe('GroupMember');
- });
- });
-
- describe('when project member', () => {
- beforeEach(() => {
- createComponent({
- member: {
- ...member,
- type: 'ProjectMember',
- },
- permissions: {
- canRemove: true,
- },
- });
- });
-
- it('sets member type correctly', () => {
- expect(findRemoveMemberButton().props().memberType).toBe('ProjectMember');
- });
- });
-
- describe('isInvitedUser', () => {
- it.each`
- isInvitedUser | icon | buttonText | buttonCategory
- ${true} | ${'remove'} | ${null} | ${'primary'}
- ${false} | ${''} | ${'Remove member'} | ${'secondary'}
- `(
- 'passes the correct props to remove-member-button when isInvitedUser is $isInvitedUser',
- ({ isInvitedUser, icon, buttonText, buttonCategory }) => {
- createComponent({
- isInvitedUser,
- permissions: {
- canRemove: true,
- },
- });
-
- expect(findRemoveMemberButton().props()).toEqual(
- expect.objectContaining({
- icon,
- buttonText,
- buttonCategory,
- }),
- );
- },
- );
- });
-});
diff --git a/spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js b/spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js
new file mode 100644
index 00000000000..90f5b217007
--- /dev/null
+++ b/spec/frontend/members/components/action_dropdowns/leave_group_dropdown_item_spec.js
@@ -0,0 +1,54 @@
+import { GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import LeaveGroupDropdownItem from '~/members/components/action_dropdowns/leave_group_dropdown_item.vue';
+import LeaveModal from '~/members/components/modals/leave_modal.vue';
+import { LEAVE_MODAL_ID } from '~/members/constants';
+import { member, permissions } from '../../mock_data';
+
+describe('LeaveGroupDropdownItem', () => {
+ let wrapper;
+ const text = 'dummy';
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(LeaveGroupDropdownItem, {
+ propsData: {
+ member,
+ permissions,
+ ...propsData,
+ },
+ directives: {
+ GlModal: createMockDirective(),
+ },
+ slots: {
+ default: text,
+ },
+ });
+ };
+
+ const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a slot with red text', () => {
+ expect(findDropdownItem().html()).toContain(`<span class="gl-text-red-500">${text}</span>`);
+ });
+
+ it('contains LeaveModal component', () => {
+ const leaveModal = wrapper.findComponent(LeaveModal);
+
+ expect(leaveModal.props()).toEqual({ member, permissions });
+ });
+
+ it('binds to the LeaveModal component', () => {
+ const binding = getBinding(findDropdownItem().element, 'gl-modal');
+
+ expect(binding.value).toBe(LEAVE_MODAL_ID);
+ });
+});
diff --git a/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js b/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
new file mode 100644
index 00000000000..e1c498249d7
--- /dev/null
+++ b/spec/frontend/members/components/action_dropdowns/remove_member_dropdown_item_spec.js
@@ -0,0 +1,77 @@
+import { GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import Vuex from 'vuex';
+import { modalData } from 'jest/members/mock_data';
+import RemoveMemberDropdownItem from '~/members/components/action_dropdowns/remove_member_dropdown_item.vue';
+import { MEMBER_TYPES, MEMBER_MODEL_TYPE_GROUP_MEMBER } from '~/members/constants';
+
+Vue.use(Vuex);
+
+describe('RemoveMemberDropdownItem', () => {
+ let wrapper;
+ const text = 'dummy';
+
+ const actions = {
+ showRemoveMemberModal: jest.fn(),
+ };
+
+ const createStore = (state = {}) => {
+ return new Vuex.Store({
+ modules: {
+ [MEMBER_TYPES.user]: {
+ namespaced: true,
+ state: {
+ memberPath: '/groups/foo-bar/-/group_members/:id',
+ ...state,
+ },
+ actions,
+ },
+ },
+ });
+ };
+
+ const createComponent = (propsData = {}, state) => {
+ wrapper = shallowMount(RemoveMemberDropdownItem, {
+ store: createStore(state),
+ provide: {
+ namespace: MEMBER_TYPES.user,
+ },
+ propsData: {
+ memberId: 1,
+ memberModelType: MEMBER_MODEL_TYPE_GROUP_MEMBER,
+ modalMessage: 'Are you sure you want to remove John Smith?',
+ isAccessRequest: true,
+ isInvite: true,
+ userDeletionObstacles: { name: 'user', obstacles: [] },
+ ...propsData,
+ },
+ slots: {
+ default: text,
+ },
+ });
+ };
+
+ const findDropdownItem = () => wrapper.findComponent(GlDropdownItem);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders a slot with red text', () => {
+ expect(findDropdownItem().html()).toContain(`<span class="gl-text-red-500">${text}</span>`);
+ });
+
+ it('calls Vuex action to show `remove member` modal when clicked', () => {
+ findDropdownItem().vm.$emit('click');
+
+ expect(actions.showRemoveMemberModal).toHaveBeenCalledWith(expect.any(Object), {
+ ...modalData,
+ preventRemoval: false,
+ });
+ });
+});
diff --git a/spec/frontend/members/components/action_dropdowns/user_action_dropdown_spec.js b/spec/frontend/members/components/action_dropdowns/user_action_dropdown_spec.js
new file mode 100644
index 00000000000..5a2de1cac80
--- /dev/null
+++ b/spec/frontend/members/components/action_dropdowns/user_action_dropdown_spec.js
@@ -0,0 +1,220 @@
+import { shallowMount } from '@vue/test-utils';
+import { sprintf } from '~/locale';
+import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import LeaveGroupDropdownItem from '~/members/components/action_dropdowns/leave_group_dropdown_item.vue';
+import RemoveMemberDropdownItem from '~/members/components/action_dropdowns/remove_member_dropdown_item.vue';
+import UserActionDropdown from '~/members/components/action_dropdowns/user_action_dropdown.vue';
+import { I18N } from '~/members/components/action_dropdowns/constants';
+import {
+ MEMBER_MODEL_TYPE_GROUP_MEMBER,
+ MEMBER_MODEL_TYPE_PROJECT_MEMBER,
+} from '~/members/constants';
+import { parseUserDeletionObstacles } from '~/vue_shared/components/user_deletion_obstacles/utils';
+import { member, orphanedMember } from '../../mock_data';
+
+describe('UserActionDropdown', () => {
+ let wrapper;
+
+ const createComponent = (propsData = {}) => {
+ wrapper = shallowMount(UserActionDropdown, {
+ propsData: {
+ member,
+ isCurrentUser: false,
+ isInvitedUser: false,
+ ...propsData,
+ },
+ directives: {
+ GlTooltip: createMockDirective(),
+ },
+ });
+ };
+
+ const findRemoveMemberDropdownItem = () => wrapper.findComponent(RemoveMemberDropdownItem);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when user has `canRemove` permissions', () => {
+ beforeEach(() => {
+ createComponent({
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('renders remove member dropdown with correct text', () => {
+ const removeMemberDropdownItem = findRemoveMemberDropdownItem();
+ expect(removeMemberDropdownItem.exists()).toBe(true);
+ expect(removeMemberDropdownItem.html()).toContain(I18N.removeMember);
+ });
+
+ it('displays a tooltip', () => {
+ const tooltip = getBinding(wrapper.element, 'gl-tooltip');
+ expect(tooltip).not.toBeUndefined();
+ expect(tooltip.value).toBe(I18N.actions);
+ });
+
+ it('sets props correctly', () => {
+ expect(findRemoveMemberDropdownItem().props()).toEqual({
+ memberId: member.id,
+ memberModelType: MEMBER_MODEL_TYPE_GROUP_MEMBER,
+ modalMessage: sprintf(
+ I18N.confirmNormalUserRemoval,
+ {
+ userName: member.user.name,
+ group: member.source.fullName,
+ },
+ false,
+ ),
+ isAccessRequest: false,
+ isInvite: false,
+ userDeletionObstacles: {
+ name: member.user.name,
+ obstacles: parseUserDeletionObstacles(member.user),
+ },
+ preventRemoval: false,
+ });
+ });
+
+ describe('when member is orphaned', () => {
+ it('sets `message` prop correctly', () => {
+ createComponent({
+ member: orphanedMember,
+ permissions: {
+ canRemove: true,
+ },
+ });
+
+ expect(findRemoveMemberDropdownItem().props('modalMessage')).toBe(
+ sprintf(I18N.confirmOrphanedUserRemoval, { group: orphanedMember.source.fullName }),
+ );
+ });
+ });
+
+ describe('when member is the current user', () => {
+ it('renders leave dropdown with correct text', () => {
+ createComponent({
+ isCurrentUser: true,
+ permissions: {
+ canRemove: true,
+ },
+ });
+
+ const leaveGroupDropdownItem = wrapper.findComponent(LeaveGroupDropdownItem);
+ expect(leaveGroupDropdownItem.exists()).toBe(true);
+ expect(leaveGroupDropdownItem.html()).toContain(I18N.leaveGroup);
+ });
+ });
+ });
+
+ describe('when user does not have `canRemove` permissions', () => {
+ it('does not render remove member dropdown', () => {
+ createComponent({
+ permissions: {
+ canRemove: false,
+ },
+ });
+
+ expect(findRemoveMemberDropdownItem().exists()).toBe(false);
+ });
+ });
+
+ describe('when user can remove but it is blocked by last owner', () => {
+ const permissions = {
+ canRemove: false,
+ canRemoveBlockedByLastOwner: true,
+ };
+
+ it('renders remove member dropdown', () => {
+ createComponent({
+ permissions,
+ });
+
+ expect(findRemoveMemberDropdownItem().exists()).toBe(true);
+ });
+
+ describe('when member model type is `GroupMember`', () => {
+ it('passes correct message to the modal', () => {
+ createComponent({
+ permissions,
+ });
+
+ expect(findRemoveMemberDropdownItem().props('modalMessage')).toBe(
+ I18N.lastGroupOwnerCannotBeRemoved,
+ );
+ });
+ });
+
+ describe('when member model type is `ProjectMember`', () => {
+ it('passes correct message to the modal', () => {
+ createComponent({
+ member: {
+ ...member,
+ type: MEMBER_MODEL_TYPE_PROJECT_MEMBER,
+ },
+ permissions,
+ });
+
+ expect(findRemoveMemberDropdownItem().props('modalMessage')).toBe(
+ I18N.personalProjectOwnerCannotBeRemoved,
+ );
+ });
+ });
+
+ describe('when member is the current user', () => {
+ it('renders leave dropdown with correct props', () => {
+ createComponent({
+ isCurrentUser: true,
+ permissions,
+ });
+
+ expect(wrapper.findComponent(LeaveGroupDropdownItem).props()).toEqual({
+ member,
+ permissions,
+ });
+ });
+ });
+ });
+
+ describe('when group member', () => {
+ beforeEach(() => {
+ createComponent({
+ member: {
+ ...member,
+ type: MEMBER_MODEL_TYPE_GROUP_MEMBER,
+ },
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('sets member type correctly', () => {
+ expect(findRemoveMemberDropdownItem().props().memberModelType).toBe(
+ MEMBER_MODEL_TYPE_GROUP_MEMBER,
+ );
+ });
+ });
+
+ describe('when project member', () => {
+ beforeEach(() => {
+ createComponent({
+ member: {
+ ...member,
+ type: MEMBER_MODEL_TYPE_PROJECT_MEMBER,
+ },
+ permissions: {
+ canRemove: true,
+ },
+ });
+ });
+
+ it('sets member type correctly', () => {
+ expect(findRemoveMemberDropdownItem().props().memberModelType).toBe(
+ MEMBER_MODEL_TYPE_PROJECT_MEMBER,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/members/components/modals/leave_modal_spec.js b/spec/frontend/members/components/modals/leave_modal_spec.js
index cdbabb2f646..ba587c6f0b3 100644
--- a/spec/frontend/members/components/modals/leave_modal_spec.js
+++ b/spec/frontend/members/components/modals/leave_modal_spec.js
@@ -1,11 +1,14 @@
import { GlModal, GlForm } from '@gitlab/ui';
-import { within } from '@testing-library/dom';
-import { mount, createWrapper } from '@vue/test-utils';
import { cloneDeep } from 'lodash';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import { mountExtended, extendedWrapper } from 'helpers/vue_test_utils_helper';
import LeaveModal from '~/members/components/modals/leave_modal.vue';
-import { LEAVE_MODAL_ID, MEMBER_TYPES } from '~/members/constants';
+import {
+ LEAVE_MODAL_ID,
+ MEMBER_TYPES,
+ MEMBER_MODEL_TYPE_PROJECT_MEMBER,
+} from '~/members/constants';
import UserDeletionObstaclesList from '~/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list.vue';
import { parseUserDeletionObstacles } from '~/vue_shared/components/user_deletion_obstacles/utils';
import { member } from '../../mock_data';
@@ -31,14 +34,17 @@ describe('LeaveModal', () => {
});
};
- const createComponent = (propsData = {}, state) => {
- wrapper = mount(LeaveModal, {
+ const createComponent = async (propsData = {}, state) => {
+ wrapper = mountExtended(LeaveModal, {
store: createStore(state),
provide: {
namespace: MEMBER_TYPES.user,
},
propsData: {
member,
+ permissions: {
+ canRemove: true,
+ },
...propsData,
},
attrs: {
@@ -46,39 +52,98 @@ describe('LeaveModal', () => {
visible: true,
},
});
+
+ await nextTick();
};
- const findModal = () => wrapper.findComponent(GlModal);
+ const findModal = () => extendedWrapper(wrapper.findComponent(GlModal));
const findForm = () => findModal().findComponent(GlForm);
const findUserDeletionObstaclesList = () => findModal().findComponent(UserDeletionObstaclesList);
- const getByText = (text, options) =>
- createWrapper(within(findModal().element).getByText(text, options));
-
- beforeEach(async () => {
- createComponent();
- await nextTick();
- });
-
afterEach(() => {
wrapper.destroy();
});
- it('sets modal ID', () => {
+ it('sets modal ID', async () => {
+ await createComponent();
+
expect(findModal().props('modalId')).toBe(LEAVE_MODAL_ID);
});
- it('displays modal title', () => {
- expect(getByText(`Leave "${member.source.fullName}"`).exists()).toBe(true);
+ describe('when leave is allowed', () => {
+ it('displays modal title', async () => {
+ await createComponent();
+
+ expect(findModal().findByText(`Leave "${member.source.fullName}"`).exists()).toBe(true);
+ });
+
+ it('displays modal body', async () => {
+ await createComponent();
+
+ expect(
+ findModal()
+ .findByText(`Are you sure you want to leave "${member.source.fullName}"?`)
+ .exists(),
+ ).toBe(true);
+ });
});
- it('displays modal body', () => {
- expect(getByText(`Are you sure you want to leave "${member.source.fullName}"?`).exists()).toBe(
- true,
- );
+ describe('when leave is blocked by last owner', () => {
+ const permissions = {
+ canRemove: false,
+ canRemoveBlockedByLastOwner: true,
+ };
+
+ it('does not show primary action button', async () => {
+ await createComponent({
+ permissions,
+ });
+
+ expect(findModal().props('actionPrimary')).toBe(null);
+ });
+
+ it('displays modal title', async () => {
+ await createComponent({
+ permissions,
+ });
+
+ expect(findModal().findByText(`Cannot leave "${member.source.fullName}"`).exists()).toBe(
+ true,
+ );
+ });
+
+ describe('when member model type is `GroupMember`', () => {
+ it('displays modal body', async () => {
+ await createComponent({
+ permissions,
+ });
+
+ expect(
+ findModal().findByText(LeaveModal.i18n.preventedBodyGroupMemberModelType).exists(),
+ ).toBe(true);
+ });
+ });
+
+ describe('when member model type is `ProjectMember`', () => {
+ it('displays modal body', async () => {
+ await createComponent({
+ member: {
+ ...member,
+ type: MEMBER_MODEL_TYPE_PROJECT_MEMBER,
+ },
+ permissions,
+ });
+
+ expect(
+ findModal().findByText(LeaveModal.i18n.preventedBodyProjectMemberModelType).exists(),
+ ).toBe(true);
+ });
+ });
});
- it('displays form with correct action and inputs', () => {
+ it('displays form with correct action and inputs', async () => {
+ await createComponent();
+
const form = findForm();
expect(form.attributes('action')).toBe('/groups/foo-bar/-/group_members/leave');
@@ -89,7 +154,9 @@ describe('LeaveModal', () => {
});
describe('User deletion obstacles list', () => {
- it("displays obstacles list when member's user is part of on-call management", () => {
+ it("displays obstacles list when member's user is part of on-call management", async () => {
+ await createComponent();
+
const obstaclesList = findUserDeletionObstaclesList();
expect(obstaclesList.exists()).toBe(true);
expect(obstaclesList.props()).toMatchObject({
@@ -105,17 +172,18 @@ describe('LeaveModal', () => {
delete memberWithoutOncall.user.oncallSchedules;
delete memberWithoutOncall.user.escalationPolicies;
- createComponent({ member: memberWithoutOncall });
- await nextTick();
+ await createComponent({ member: memberWithoutOncall });
expect(findUserDeletionObstaclesList().exists()).toBe(false);
});
});
- it('submits the form when "Leave" button is clicked', () => {
+ it('submits the form when "Leave" button is clicked', async () => {
+ await createComponent();
+
const submitSpy = jest.spyOn(findForm().element, 'submit');
- getByText('Leave').trigger('click');
+ findModal().findByText('Leave').trigger('click');
expect(submitSpy).toHaveBeenCalled();
diff --git a/spec/frontend/members/components/modals/remove_member_modal_spec.js b/spec/frontend/members/components/modals/remove_member_modal_spec.js
index 59b112492b8..47a03b5083a 100644
--- a/spec/frontend/members/components/modals/remove_member_modal_spec.js
+++ b/spec/frontend/members/components/modals/remove_member_modal_spec.js
@@ -3,7 +3,11 @@ import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
import RemoveMemberModal from '~/members/components/modals/remove_member_modal.vue';
-import { MEMBER_TYPES } from '~/members/constants';
+import {
+ MEMBER_TYPES,
+ MEMBER_MODEL_TYPE_GROUP_MEMBER,
+ MEMBER_MODEL_TYPE_PROJECT_MEMBER,
+} from '~/members/constants';
import { OBSTACLE_TYPES } from '~/vue_shared/components/user_deletion_obstacles/constants';
import UserDeletionObstaclesList from '~/vue_shared/components/user_deletion_obstacles/user_deletion_obstacles_list.vue';
@@ -55,16 +59,16 @@ describe('RemoveMemberModal', () => {
});
describe.each`
- state | memberType | isAccessRequest | isInvite | actionText | removeSubMembershipsCheckboxExpected | unassignIssuablesCheckboxExpected | message | userDeletionObstacles | isPartOfOncall
- ${'removing a group member'} | ${'GroupMember'} | ${false} | ${false} | ${'Remove member'} | ${true} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${{}} | ${false}
- ${'removing a project member'} | ${'ProjectMember'} | ${false} | ${false} | ${'Remove member'} | ${false} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${mockObstacles} | ${true}
- ${'denying an access request'} | ${'ProjectMember'} | ${true} | ${false} | ${'Deny access request'} | ${false} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"} | ${{}} | ${false}
- ${'revoking invite'} | ${'ProjectMember'} | ${false} | ${true} | ${'Revoke invite'} | ${false} | ${false} | ${'Are you sure you want to revoke the invitation for foo@bar.com to join the Gitlab Org / Gitlab Test project?'} | ${mockObstacles} | ${false}
+ state | memberModelType | isAccessRequest | isInvite | actionText | removeSubMembershipsCheckboxExpected | unassignIssuablesCheckboxExpected | message | userDeletionObstacles | isPartOfOncall
+ ${'removing a group member'} | ${MEMBER_MODEL_TYPE_GROUP_MEMBER} | ${false} | ${false} | ${'Remove member'} | ${true} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${{}} | ${false}
+ ${'removing a project member'} | ${MEMBER_MODEL_TYPE_PROJECT_MEMBER} | ${false} | ${false} | ${'Remove member'} | ${false} | ${true} | ${'Are you sure you want to remove Jane Doe from the Gitlab Org / Gitlab Test project?'} | ${mockObstacles} | ${true}
+ ${'denying an access request'} | ${MEMBER_MODEL_TYPE_PROJECT_MEMBER} | ${true} | ${false} | ${'Deny access request'} | ${false} | ${false} | ${"Are you sure you want to deny Jane Doe's request to join the Gitlab Org / Gitlab Test project?"} | ${{}} | ${false}
+ ${'revoking invite'} | ${MEMBER_MODEL_TYPE_PROJECT_MEMBER} | ${false} | ${true} | ${'Revoke invite'} | ${false} | ${false} | ${'Are you sure you want to revoke the invitation for foo@bar.com to join the Gitlab Org / Gitlab Test project?'} | ${mockObstacles} | ${false}
`(
'when $state',
({
actionText,
- memberType,
+ memberModelType,
isAccessRequest,
isInvite,
message,
@@ -79,7 +83,7 @@ describe('RemoveMemberModal', () => {
isInvite,
message,
memberPath,
- memberType,
+ memberModelType,
userDeletionObstacles,
});
});
@@ -133,4 +137,28 @@ describe('RemoveMemberModal', () => {
});
},
);
+
+ describe('when removal is prevented', () => {
+ const message =
+ 'A group must have at least one owner. To remove the member, assign a new owner.';
+
+ beforeEach(() => {
+ createComponent({
+ actionText: 'Remove member',
+ memberModelType: MEMBER_MODEL_TYPE_GROUP_MEMBER,
+ isAccessRequest: false,
+ isInvite: false,
+ message,
+ preventRemoval: true,
+ });
+ });
+
+ it('does not show primary action button', () => {
+ expect(findGlModal().props('actionPrimary')).toBe(null);
+ });
+
+ it('only shows the message', () => {
+ expect(findGlModal().text()).toBe(message);
+ });
+ });
});
diff --git a/spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap b/spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap
new file mode 100644
index 00000000000..a0d9bae8a0b
--- /dev/null
+++ b/spec/frontend/members/components/table/__snapshots__/member_activity_spec.js.snap
@@ -0,0 +1,61 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`MemberActivity with a member that does not have all of the fields renders \`User created\` field 1`] = `
+<div>
+ <!---->
+
+ <div>
+ <strong>
+ Access granted:
+ </strong>
+
+ <span>
+
+ Aug 06, 2020
+
+ </span>
+ </div>
+
+ <!---->
+</div>
+`;
+
+exports[`MemberActivity with a member that has all fields renders \`User created\`, \`Access granted\`, and \`Last activity\` fields 1`] = `
+<div>
+ <div>
+ <strong>
+ User created:
+ </strong>
+
+ <span>
+
+ Mar 10, 2022
+
+ </span>
+ </div>
+
+ <div>
+ <strong>
+ Access granted:
+ </strong>
+
+ <span>
+
+ Jul 17, 2020
+
+ </span>
+ </div>
+
+ <div>
+ <strong>
+ Last activity:
+ </strong>
+
+ <span>
+
+ Mar 15, 2022
+
+ </span>
+ </div>
+</div>
+`;
diff --git a/spec/frontend/members/components/table/created_at_spec.js b/spec/frontend/members/components/table/created_at_spec.js
index 793c122587d..fa31177564b 100644
--- a/spec/frontend/members/components/table/created_at_spec.js
+++ b/spec/frontend/members/components/table/created_at_spec.js
@@ -1,20 +1,18 @@
-import { within } from '@testing-library/dom';
-import { mount, createWrapper } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { useFakeDate } from 'helpers/fake_date';
import CreatedAt from '~/members/components/table/created_at.vue';
-import TimeAgoTooltip from '~/vue_shared/components/time_ago_tooltip.vue';
describe('CreatedAt', () => {
// March 15th, 2020
useFakeDate(2020, 2, 15);
const date = '2020-03-01T00:00:00.000';
- const dateTimeAgo = '2 weeks ago';
+ const formattedDate = 'Mar 01, 2020';
let wrapper;
const createComponent = (propsData) => {
- wrapper = mount(CreatedAt, {
+ wrapper = mountExtended(CreatedAt, {
propsData: {
date,
...propsData,
@@ -22,9 +20,6 @@ describe('CreatedAt', () => {
});
};
- const getByText = (text, options) =>
- createWrapper(within(wrapper.element).getByText(text, options));
-
afterEach(() => {
wrapper.destroy();
});
@@ -35,11 +30,7 @@ describe('CreatedAt', () => {
});
it('displays created at text', () => {
- expect(getByText(dateTimeAgo).exists()).toBe(true);
- });
-
- it('uses `TimeAgoTooltip` component to display tooltip', () => {
- expect(wrapper.findComponent(TimeAgoTooltip).exists()).toBe(true);
+ expect(wrapper.findByText(formattedDate).exists()).toBe(true);
});
});
@@ -52,7 +43,7 @@ describe('CreatedAt', () => {
},
});
- const link = getByText('Administrator');
+ const link = wrapper.findByRole('link', { name: 'Administrator' });
expect(link.exists()).toBe(true);
expect(link.attributes('href')).toBe('https://gitlab.com/root');
diff --git a/spec/frontend/members/components/table/member_action_buttons_spec.js b/spec/frontend/members/components/table/member_action_buttons_spec.js
index 03cfc6ca0f6..402a5e9db27 100644
--- a/spec/frontend/members/components/table/member_action_buttons_spec.js
+++ b/spec/frontend/members/components/table/member_action_buttons_spec.js
@@ -2,7 +2,7 @@ import { shallowMount } from '@vue/test-utils';
import AccessRequestActionButtons from '~/members/components/action_buttons/access_request_action_buttons.vue';
import GroupActionButtons from '~/members/components/action_buttons/group_action_buttons.vue';
import InviteActionButtons from '~/members/components/action_buttons/invite_action_buttons.vue';
-import UserActionButtons from '~/members/components/action_buttons/user_action_buttons.vue';
+import UserActionDropdown from '~/members/components/action_dropdowns/user_action_dropdown.vue';
import MemberActionButtons from '~/members/components/table/member_action_buttons.vue';
import { MEMBER_TYPES } from '~/members/constants';
import { member as memberMock, group, invite, accessRequest } from '../../mock_data';
@@ -29,7 +29,7 @@ describe('MemberActionButtons', () => {
it.each`
memberType | member | expectedComponent | expectedComponentName
- ${MEMBER_TYPES.user} | ${memberMock} | ${UserActionButtons} | ${'UserActionButtons'}
+ ${MEMBER_TYPES.user} | ${memberMock} | ${UserActionDropdown} | ${'UserActionDropdown'}
${MEMBER_TYPES.group} | ${group} | ${GroupActionButtons} | ${'GroupActionButtons'}
${MEMBER_TYPES.invite} | ${invite} | ${InviteActionButtons} | ${'InviteActionButtons'}
${MEMBER_TYPES.accessRequest} | ${accessRequest} | ${AccessRequestActionButtons} | ${'AccessRequestActionButtons'}
diff --git a/spec/frontend/members/components/table/member_activity_spec.js b/spec/frontend/members/components/table/member_activity_spec.js
new file mode 100644
index 00000000000..a372b40fd1f
--- /dev/null
+++ b/spec/frontend/members/components/table/member_activity_spec.js
@@ -0,0 +1,40 @@
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import MemberActivity from '~/members/components/table/member_activity.vue';
+import { member as memberMock, group as groupLinkMock } from '../../mock_data';
+
+describe('MemberActivity', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ member: memberMock,
+ };
+
+ const createComponent = ({ propsData = {} } = {}) => {
+ wrapper = mountExtended(MemberActivity, {
+ propsData: {
+ ...defaultPropsData,
+ ...propsData,
+ },
+ });
+ };
+
+ describe('with a member that has all fields', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders `User created`, `Access granted`, and `Last activity` fields', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+
+ describe('with a member that does not have all of the fields', () => {
+ beforeEach(() => {
+ createComponent({ propsData: { member: groupLinkMock } });
+ });
+
+ it('renders `User created` field', () => {
+ expect(wrapper.element).toMatchSnapshot();
+ });
+ });
+});
diff --git a/spec/frontend/members/components/table/member_source_spec.js b/spec/frontend/members/components/table/member_source_spec.js
index 2cd888207b1..fbfd0ca7ae7 100644
--- a/spec/frontend/members/components/table/member_source_spec.js
+++ b/spec/frontend/members/components/table/member_source_spec.js
@@ -1,19 +1,25 @@
-import { getByText as getByTextHelper } from '@testing-library/dom';
-import { mount, createWrapper } from '@vue/test-utils';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import MemberSource from '~/members/components/table/member_source.vue';
describe('MemberSource', () => {
let wrapper;
+ const memberSource = {
+ id: 102,
+ fullName: 'Foo bar',
+ webUrl: 'https://gitlab.com/groups/foo-bar',
+ };
+
+ const createdBy = {
+ name: 'Administrator',
+ webUrl: 'https://gitlab.com/root',
+ };
+
const createComponent = (propsData) => {
- wrapper = mount(MemberSource, {
+ wrapper = mountExtended(MemberSource, {
propsData: {
- memberSource: {
- id: 102,
- fullName: 'Foo bar',
- webUrl: 'https://gitlab.com/groups/foo-bar',
- },
+ memberSource,
...propsData,
},
directives: {
@@ -22,9 +28,6 @@ describe('MemberSource', () => {
});
};
- const getByText = (text, options) =>
- createWrapper(getByTextHelper(wrapper.element, text, options));
-
const getTooltipDirective = (elementWrapper) => getBinding(elementWrapper.element, 'gl-tooltip');
afterEach(() => {
@@ -32,40 +35,69 @@ describe('MemberSource', () => {
});
describe('direct member', () => {
- it('displays "Direct member"', () => {
- createComponent({
- isDirectMember: true,
+ describe('when created by is available', () => {
+ it('displays "Direct member by <user name>"', () => {
+ createComponent({
+ isDirectMember: true,
+ createdBy,
+ });
+
+ expect(wrapper.text()).toBe('Direct member by Administrator');
+ expect(wrapper.findByRole('link', { name: createdBy.name }).attributes('href')).toBe(
+ createdBy.webUrl,
+ );
});
+ });
- expect(getByText('Direct member').exists()).toBe(true);
+ describe('when created by is not available', () => {
+ it('displays "Direct member"', () => {
+ createComponent({
+ isDirectMember: true,
+ });
+
+ expect(wrapper.text()).toBe('Direct member');
+ });
});
});
describe('inherited member', () => {
- let sourceGroupLink;
-
- beforeEach(() => {
- createComponent({
- isDirectMember: false,
+ describe('when created by is available', () => {
+ beforeEach(() => {
+ createComponent({
+ isDirectMember: false,
+ createdBy,
+ });
});
- sourceGroupLink = getByText('Foo bar');
+ it('displays "<group name> by <user name>"', () => {
+ expect(wrapper.text()).toBe('Foo bar by Administrator');
+ expect(wrapper.findByRole('link', { name: memberSource.fullName }).attributes('href')).toBe(
+ memberSource.webUrl,
+ );
+ expect(wrapper.findByRole('link', { name: createdBy.name }).attributes('href')).toBe(
+ createdBy.webUrl,
+ );
+ });
});
- it('displays a link to source group', () => {
- createComponent({
- isDirectMember: false,
+ describe('when created by is not available', () => {
+ beforeEach(() => {
+ createComponent({
+ isDirectMember: false,
+ });
});
- expect(sourceGroupLink.exists()).toBe(true);
- expect(sourceGroupLink.attributes('href')).toBe('https://gitlab.com/groups/foo-bar');
- });
+ it('displays a link to source group', () => {
+ expect(wrapper.text()).toBe(memberSource.fullName);
+ expect(wrapper.attributes('href')).toBe(memberSource.webUrl);
+ });
- it('displays tooltip with "Inherited"', () => {
- const tooltipDirective = getTooltipDirective(sourceGroupLink);
+ it('displays tooltip with "Inherited"', () => {
+ const tooltipDirective = getTooltipDirective(wrapper);
- expect(tooltipDirective).not.toBeUndefined();
- expect(sourceGroupLink.attributes('title')).toBe('Inherited');
+ expect(tooltipDirective).not.toBeUndefined();
+ expect(tooltipDirective.value).toBe('Inherited');
+ });
});
});
});
diff --git a/spec/frontend/members/components/table/members_table_cell_spec.js b/spec/frontend/members/components/table/members_table_cell_spec.js
index 0b0140b0cdb..ac5d83d028d 100644
--- a/spec/frontend/members/components/table/members_table_cell_spec.js
+++ b/spec/frontend/members/components/table/members_table_cell_spec.js
@@ -3,6 +3,7 @@ import Vue from 'vue';
import Vuex from 'vuex';
import MembersTableCell from '~/members/components/table/members_table_cell.vue';
import { MEMBER_TYPES } from '~/members/constants';
+import { canRemoveBlockedByLastOwner } from '~/members/utils';
import {
member as memberMock,
directMember,
@@ -12,6 +13,11 @@ import {
accessRequest,
} from '../../mock_data';
+jest.mock('~/members/utils', () => ({
+ ...jest.requireActual('~/members/utils'),
+ canRemoveBlockedByLastOwner: jest.fn().mockImplementation(() => true),
+}));
+
describe('MembersTableCell', () => {
const WrappedComponent = {
props: {
@@ -55,6 +61,7 @@ describe('MembersTableCell', () => {
provide: {
sourceId: 1,
currentUserId: 1,
+ canManageMembers: true,
},
scopedSlots: {
default: `
@@ -179,6 +186,15 @@ describe('MembersTableCell', () => {
});
});
+ describe('canRemoveBlockedByLastOwner', () => {
+ it('calls util and returns value', () => {
+ createComponentWithDirectMember();
+
+ expect(canRemoveBlockedByLastOwner).toHaveBeenCalledWith(directMember, true);
+ expect(findWrappedComponent().props('permissions').canRemoveBlockedByLastOwner).toBe(true);
+ });
+ });
+
describe('canResend', () => {
describe('when member type is `invite`', () => {
it('returns `true` when `canResend` is `true`', () => {
diff --git a/spec/frontend/members/components/table/members_table_spec.js b/spec/frontend/members/components/table/members_table_spec.js
index 0ed01396fcb..1d18026a410 100644
--- a/spec/frontend/members/components/table/members_table_spec.js
+++ b/spec/frontend/members/components/table/members_table_spec.js
@@ -8,9 +8,9 @@ import ExpirationDatepicker from '~/members/components/table/expiration_datepick
import MemberActionButtons from '~/members/components/table/member_action_buttons.vue';
import MemberAvatar from '~/members/components/table/member_avatar.vue';
import MemberSource from '~/members/components/table/member_source.vue';
+import MemberActivity from '~/members/components/table/member_activity.vue';
import MembersTable from '~/members/components/table/members_table.vue';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
-import UserDate from '~/vue_shared/components/user_date.vue';
import {
MEMBER_TYPES,
MEMBER_STATE_CREATED,
@@ -63,6 +63,7 @@ describe('MembersTable', () => {
provide: {
sourceId: 1,
currentUserId: 1,
+ canManageMembers: true,
namespace: MEMBER_TYPES.invite,
...provide,
},
@@ -106,16 +107,14 @@ describe('MembersTable', () => {
};
it.each`
- field | label | member | expectedComponent
- ${'account'} | ${'Account'} | ${memberMock} | ${MemberAvatar}
- ${'source'} | ${'Source'} | ${memberMock} | ${MemberSource}
- ${'granted'} | ${'Access granted'} | ${memberMock} | ${CreatedAt}
- ${'invited'} | ${'Invited'} | ${invite} | ${CreatedAt}
- ${'requested'} | ${'Requested'} | ${accessRequest} | ${CreatedAt}
- ${'maxRole'} | ${'Max role'} | ${memberCanUpdate} | ${RoleDropdown}
- ${'expiration'} | ${'Expiration'} | ${memberMock} | ${ExpirationDatepicker}
- ${'userCreatedAt'} | ${'Created on'} | ${memberMock} | ${UserDate}
- ${'lastActivityOn'} | ${'Last activity'} | ${memberMock} | ${UserDate}
+ field | label | member | expectedComponent
+ ${'account'} | ${'Account'} | ${memberMock} | ${MemberAvatar}
+ ${'source'} | ${'Source'} | ${memberMock} | ${MemberSource}
+ ${'invited'} | ${'Invited'} | ${invite} | ${CreatedAt}
+ ${'requested'} | ${'Requested'} | ${accessRequest} | ${CreatedAt}
+ ${'maxRole'} | ${'Max role'} | ${memberCanUpdate} | ${RoleDropdown}
+ ${'expiration'} | ${'Expiration'} | ${memberMock} | ${ExpirationDatepicker}
+ ${'activity'} | ${'Activity'} | ${memberMock} | ${MemberActivity}
`('renders the $label field', ({ field, label, member, expectedComponent }) => {
createComponent({
members: [member],
@@ -202,16 +201,23 @@ describe('MembersTable', () => {
canRemove: true,
};
+ const memberCanRemoveBlockedLastOwner = {
+ ...directMember,
+ canRemove: false,
+ isLastOwner: true,
+ };
+
const memberNoPermissions = {
...memberMock,
id: 2,
};
describe.each`
- permission | members
- ${'canUpdate'} | ${[memberNoPermissions, memberCanUpdate]}
- ${'canRemove'} | ${[memberNoPermissions, memberCanRemove]}
- ${'canResend'} | ${[memberNoPermissions, invite]}
+ permission | members
+ ${'canUpdate'} | ${[memberNoPermissions, memberCanUpdate]}
+ ${'canRemove'} | ${[memberNoPermissions, memberCanRemove]}
+ ${'canRemoveBlockedByLastOwner'} | ${[memberNoPermissions, memberCanRemoveBlockedLastOwner]}
+ ${'canResend'} | ${[memberNoPermissions, invite]}
`('when one of the members has $permission permissions', ({ members }) => {
it('renders the "Actions" field', () => {
createComponent({ members, tableFields: ['actions'] });
@@ -230,10 +236,11 @@ describe('MembersTable', () => {
});
describe.each`
- permission | members
- ${'canUpdate'} | ${[memberMock]}
- ${'canRemove'} | ${[memberMock]}
- ${'canResend'} | ${[{ ...invite, invite: { ...invite.invite, canResend: false } }]}
+ permission | members
+ ${'canUpdate'} | ${[memberMock]}
+ ${'canRemove'} | ${[memberMock]}
+ ${'canRemoveBlockedByLastOwner'} | ${[memberMock]}
+ ${'canResend'} | ${[{ ...invite, invite: { ...invite.invite, canResend: false } }]}
`('when none of the members have $permission permissions', ({ members }) => {
it('does not render the "Actions" field', () => {
createComponent({ members, tableFields: ['actions'] });
diff --git a/spec/frontend/members/components/table/role_dropdown_spec.js b/spec/frontend/members/components/table/role_dropdown_spec.js
index b254cce4d72..a11f67be8f5 100644
--- a/spec/frontend/members/components/table/role_dropdown_spec.js
+++ b/spec/frontend/members/components/table/role_dropdown_spec.js
@@ -4,11 +4,14 @@ import { within } from '@testing-library/dom';
import { mount, createWrapper } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
+import waitForPromises from 'helpers/wait_for_promises';
import RoleDropdown from '~/members/components/table/role_dropdown.vue';
import { MEMBER_TYPES } from '~/members/constants';
+import { guestOverageConfirmAction } from 'ee_else_ce/members/guest_overage_confirm_action';
import { member } from '../../mock_data';
Vue.use(Vuex);
+jest.mock('ee_else_ce/members/guest_overage_confirm_action');
describe('RoleDropdown', () => {
let wrapper;
@@ -33,6 +36,10 @@ describe('RoleDropdown', () => {
wrapper = mount(RoleDropdown, {
provide: {
namespace: MEMBER_TYPES.user,
+ group: {
+ name: 'groupname',
+ path: '/grouppath/',
+ },
},
propsData: {
member,
@@ -63,12 +70,21 @@ describe('RoleDropdown', () => {
const findDropdownToggle = () => wrapper.find('button[aria-haspopup="true"]');
const findDropdown = () => wrapper.findComponent(GlDropdown);
+ let originalGon;
+
+ beforeEach(() => {
+ originalGon = window.gon;
+ gon.features = { showOverageOnRolePromotion: true };
+ });
+
afterEach(() => {
+ window.gon = originalGon;
wrapper.destroy();
});
describe('when dropdown is open', () => {
beforeEach(() => {
+ guestOverageConfirmAction.mockReturnValue(true);
createComponent();
return findDropdownToggle().trigger('click');
@@ -113,12 +129,16 @@ describe('RoleDropdown', () => {
expect($toast.show).toHaveBeenCalledWith('Role updated successfully.');
});
- it('disables dropdown while waiting for `updateMemberRole` to resolve', async () => {
+ it('puts dropdown in loading state while waiting for `updateMemberRole` to resolve', async () => {
await getDropdownItemByText('Developer').trigger('click');
- expect(findDropdown().props('disabled')).toBe(true);
+ expect(findDropdown().props('loading')).toBe(true);
+ });
+
+ it('enables dropdown after `updateMemberRole` resolves', async () => {
+ await getDropdownItemByText('Developer').trigger('click');
- await nextTick();
+ await waitForPromises();
expect(findDropdown().props('disabled')).toBe(false);
});
@@ -148,4 +168,44 @@ describe('RoleDropdown', () => {
expect(findDropdown().props('right')).toBe(false);
});
+
+ describe('guestOverageConfirmAction', () => {
+ const mockConfirmAction = ({ confirmed }) => {
+ guestOverageConfirmAction.mockResolvedValueOnce(confirmed);
+ };
+
+ beforeEach(() => {
+ createComponent();
+
+ findDropdownToggle().trigger('click');
+ });
+
+ afterEach(() => {
+ guestOverageConfirmAction.mockReset();
+ });
+
+ describe('when guestOverageConfirmAction returns true', () => {
+ beforeEach(() => {
+ mockConfirmAction({ confirmed: true });
+
+ getDropdownItemByText('Reporter').trigger('click');
+ });
+
+ it('calls updateMemberRole', () => {
+ expect(actions.updateMemberRole).toHaveBeenCalled();
+ });
+ });
+
+ describe('when guestOverageConfirmAction returns false', () => {
+ beforeEach(() => {
+ mockConfirmAction({ confirmed: false });
+
+ getDropdownItemByText('Reporter').trigger('click');
+ });
+
+ it('does not call updateMemberRole', () => {
+ expect(actions.updateMemberRole).not.toHaveBeenCalled();
+ });
+ });
+ });
});
diff --git a/spec/frontend/members/guest_overage_confirm_action_spec.js b/spec/frontend/members/guest_overage_confirm_action_spec.js
new file mode 100644
index 00000000000..d7ab54fa13b
--- /dev/null
+++ b/spec/frontend/members/guest_overage_confirm_action_spec.js
@@ -0,0 +1,7 @@
+import { guestOverageConfirmAction } from '~/members/guest_overage_confirm_action';
+
+describe('guestOverageConfirmAction', () => {
+ it('returns true', () => {
+ expect(guestOverageConfirmAction()).toBe(true);
+ });
+});
diff --git a/spec/frontend/members/mock_data.js b/spec/frontend/members/mock_data.js
index 49c4c46c3ac..161e96c0c48 100644
--- a/spec/frontend/members/mock_data.js
+++ b/spec/frontend/members/mock_data.js
@@ -1,4 +1,8 @@
-import { MEMBER_TYPES, MEMBER_STATE_CREATED } from '~/members/constants';
+import {
+ MEMBER_TYPES,
+ MEMBER_STATE_CREATED,
+ MEMBER_MODEL_TYPE_GROUP_MEMBER,
+} from '~/members/constants';
export const member = {
requestedAt: null,
@@ -13,7 +17,7 @@ export const member = {
fullName: 'Foo Bar',
webUrl: 'https://gitlab.com/groups/foo-bar',
},
- type: 'GroupMember',
+ type: MEMBER_MODEL_TYPE_GROUP_MEMBER,
state: MEMBER_STATE_CREATED,
user: {
id: 123,
@@ -69,7 +73,7 @@ export const modalData = {
isAccessRequest: true,
isInvite: true,
memberPath: '/groups/foo-bar/-/group_members/1',
- memberType: 'GroupMember',
+ memberModelType: MEMBER_MODEL_TYPE_GROUP_MEMBER,
message: 'Are you sure you want to remove John Smith?',
userDeletionObstacles: { name: 'user', obstacles: [] },
};
@@ -123,7 +127,15 @@ export const dataAttribute = JSON.stringify({
pagination: paginationData,
member_path: '/groups/foo-bar/-/group_members/:id',
ldap_override_path: '/groups/ldap-group/-/group_members/:id/override',
+ disable_two_factor_path: '/groups/ldap-group/-/two_factor_auth',
},
source_id: 234,
can_manage_members: true,
});
+
+export const permissions = {
+ canRemove: true,
+ canRemoveBlockedByLastOwner: false,
+ canResend: true,
+ canUpdate: true,
+};
diff --git a/spec/frontend/members/store/actions_spec.js b/spec/frontend/members/store/actions_spec.js
index 20dce639177..38214048b23 100644
--- a/spec/frontend/members/store/actions_spec.js
+++ b/spec/frontend/members/store/actions_spec.js
@@ -4,7 +4,7 @@ import { noop } from 'lodash';
import { useFakeDate } from 'helpers/fake_date';
import testAction from 'helpers/vuex_action_helper';
import { members, group, modalData } from 'jest/members/mock_data';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import {
updateMemberRole,
showRemoveGroupLinkModal,
@@ -44,7 +44,7 @@ describe('Vuex members actions', () => {
describe('successful request', () => {
it(`commits ${types.RECEIVE_MEMBER_ROLE_SUCCESS} mutation`, async () => {
- mock.onPut().replyOnce(httpStatusCodes.OK);
+ mock.onPut().replyOnce(HTTP_STATUS_OK);
await testAction(updateMemberRole, payload, state, [
{
@@ -83,7 +83,7 @@ describe('Vuex members actions', () => {
describe('successful request', () => {
describe('changing expiration date', () => {
it(`commits ${types.RECEIVE_MEMBER_EXPIRATION_SUCCESS} mutation`, async () => {
- mock.onPut().replyOnce(httpStatusCodes.OK);
+ mock.onPut().replyOnce(HTTP_STATUS_OK);
await testAction(updateMemberExpiration, { memberId, expiresAt }, state, [
{
@@ -98,7 +98,7 @@ describe('Vuex members actions', () => {
describe('removing the expiration date', () => {
it(`commits ${types.RECEIVE_MEMBER_EXPIRATION_SUCCESS} mutation`, async () => {
- mock.onPut().replyOnce(httpStatusCodes.OK);
+ mock.onPut().replyOnce(HTTP_STATUS_OK);
await testAction(updateMemberExpiration, { memberId, expiresAt: null }, state, [
{
diff --git a/spec/frontend/members/utils_spec.js b/spec/frontend/members/utils_spec.js
index 8bef2096a2a..9f200324c02 100644
--- a/spec/frontend/members/utils_spec.js
+++ b/spec/frontend/members/utils_spec.js
@@ -13,8 +13,10 @@ import {
isDirectMember,
isCurrentUser,
canRemove,
+ canRemoveBlockedByLastOwner,
canResend,
canUpdate,
+ canDisableTwoFactor,
canOverride,
parseSortParam,
buildSortHref,
@@ -129,6 +131,17 @@ describe('Members Utils', () => {
});
});
+ describe('canRemoveBlockedByLastOwner', () => {
+ it.each`
+ member | canManageMembers | expected
+ ${{ ...directMember, isLastOwner: true }} | ${true} | ${true}
+ ${{ ...inheritedMember, isLastOwner: false }} | ${true} | ${false}
+ ${{ ...directMember, isLastOwner: true }} | ${false} | ${false}
+ `('returns $expected', ({ member, canManageMembers, expected }) => {
+ expect(canRemoveBlockedByLastOwner(member, canManageMembers)).toBe(expected);
+ });
+ });
+
describe('canResend', () => {
it.each`
member | expected
@@ -151,6 +164,19 @@ describe('Members Utils', () => {
});
});
+ describe('canDisableTwoFactor', () => {
+ it.each`
+ member | expected
+ ${{ ...memberMock, canGetTwoFactorDisabled: true }} | ${false}
+ ${{ ...memberMock, canGetTwoFactorDisabled: false }} | ${false}
+ `(
+ 'returns $expected for members whose two factor authentication can be disabled',
+ ({ member, expected }) => {
+ expect(canDisableTwoFactor(member)).toBe(expected);
+ },
+ );
+ });
+
describe('canOverride', () => {
it('returns `false`', () => {
expect(canOverride(memberMock)).toBe(false);
diff --git a/spec/frontend/merge_request_tabs_spec.js b/spec/frontend/merge_request_tabs_spec.js
index 69ff5e47689..6d434d7e654 100644
--- a/spec/frontend/merge_request_tabs_spec.js
+++ b/spec/frontend/merge_request_tabs_spec.js
@@ -5,6 +5,7 @@ import initMrPage from 'helpers/init_vue_mr_page_helper';
import { stubPerformanceWebAPI } from 'helpers/performance';
import axios from '~/lib/utils/axios_utils';
import MergeRequestTabs from '~/merge_request_tabs';
+import Diff from '~/diff';
import '~/lib/utils/common_utils';
import '~/lib/utils/url_utility';
@@ -389,4 +390,73 @@ describe('MergeRequestTabs', () => {
});
});
});
+
+ describe('tabs <-> diff interactions', () => {
+ beforeEach(() => {
+ jest.spyOn(testContext.class, 'loadDiff').mockImplementation(() => {});
+ });
+
+ describe('switchViewType', () => {
+ it('marks the class as having not loaded diffs already', () => {
+ testContext.class.diffsLoaded = true;
+
+ testContext.class.switchViewType({});
+
+ expect(testContext.class.diffsLoaded).toBe(false);
+ });
+
+ it('reloads the diffs', () => {
+ testContext.class.switchViewType({ source: 'a new url' });
+
+ expect(testContext.class.loadDiff).toHaveBeenCalledWith({
+ endpoint: 'a new url',
+ strip: false,
+ });
+ });
+ });
+
+ describe('createDiff', () => {
+ it("creates a Diff if there isn't one", () => {
+ expect(testContext.class.diffsClass).toBe(null);
+
+ testContext.class.createDiff();
+
+ expect(testContext.class.diffsClass).toBeInstanceOf(Diff);
+ });
+
+ it("doesn't create a Diff if one already exists", () => {
+ testContext.class.diffsClass = 'truthy';
+
+ testContext.class.createDiff();
+
+ expect(testContext.class.diffsClass).toBe('truthy');
+ });
+
+ it('sets the available MR Tabs event hub to the new Diff', () => {
+ expect(testContext.class.diffsClass).toBe(null);
+
+ testContext.class.createDiff();
+
+ expect(testContext.class.diffsClass.mrHub).toBe(testContext.class.eventHub);
+ });
+ });
+
+ describe('setHubToDiff', () => {
+ it('sets the MR Tabs event hub to the child Diff', () => {
+ testContext.class.diffsClass = {};
+
+ testContext.class.setHubToDiff();
+
+ expect(testContext.class.diffsClass.mrHub).toBe(testContext.class.eventHub);
+ });
+
+ it('does not fatal if theres no child Diff', () => {
+ testContext.class.diffsClass = null;
+
+ expect(() => {
+ testContext.class.setHubToDiff();
+ }).not.toThrow();
+ });
+ });
+ });
});
diff --git a/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_candidate_spec.js.snap b/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_candidate_spec.js.snap
index 8af0753f929..0c3d3e78038 100644
--- a/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_candidate_spec.js.snap
+++ b/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_candidate_spec.js.snap
@@ -163,8 +163,8 @@ exports[`MlCandidate renders correctly 1`] = `
class="gl-text-secondary gl-font-weight-bold"
>
- Parameters
-
+ Parameters
+
</td>
<td
@@ -190,7 +190,6 @@ exports[`MlCandidate renders correctly 1`] = `
3
</td>
</tr>
-
<tr
class="divider"
/>
@@ -200,8 +199,8 @@ exports[`MlCandidate renders correctly 1`] = `
class="gl-text-secondary gl-font-weight-bold"
>
- Metrics
-
+ Metrics
+
</td>
<td
@@ -227,6 +226,42 @@ exports[`MlCandidate renders correctly 1`] = `
.99
</td>
</tr>
+ <tr
+ class="divider"
+ />
+
+ <tr>
+ <td
+ class="gl-text-secondary gl-font-weight-bold"
+ >
+
+ Metadata
+
+ </td>
+
+ <td
+ class="gl-font-weight-bold"
+ >
+ FileName
+ </td>
+
+ <td>
+ test.py
+ </td>
+ </tr>
+ <tr>
+ <td />
+
+ <td
+ class="gl-font-weight-bold"
+ >
+ ExecutionTime
+ </td>
+
+ <td>
+ .0856
+ </td>
+ </tr>
</tbody>
</table>
</div>
diff --git a/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_experiment_spec.js.snap b/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_experiment_spec.js.snap
index e253a0afc6c..3ee2c1cc075 100644
--- a/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_experiment_spec.js.snap
+++ b/spec/frontend/ml/experiment_tracking/components/__snapshots__/ml_experiment_spec.js.snap
@@ -95,8 +95,8 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
<table
aria-busy="false"
- aria-colcount="6"
- class="table b-table gl-table gl-mt-0!"
+ aria-colcount="9"
+ class="table b-table gl-table gl-mt-0! ml-candidate-table table-sm"
role="table"
>
<!---->
@@ -117,7 +117,7 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
scope="col"
>
<div>
- L1 Ratio
+ Name
</div>
</th>
<th
@@ -127,7 +127,7 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
scope="col"
>
<div>
- Rmse
+ Created at
</div>
</th>
<th
@@ -137,7 +137,7 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
scope="col"
>
<div>
- Auc
+ User
</div>
</th>
<th
@@ -147,11 +147,41 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
scope="col"
>
<div>
- Mae
+ L1 Ratio
</div>
</th>
<th
aria-colindex="5"
+ class=""
+ role="columnheader"
+ scope="col"
+ >
+ <div>
+ Rmse
+ </div>
+ </th>
+ <th
+ aria-colindex="6"
+ class=""
+ role="columnheader"
+ scope="col"
+ >
+ <div>
+ Auc
+ </div>
+ </th>
+ <th
+ aria-colindex="7"
+ class=""
+ role="columnheader"
+ scope="col"
+ >
+ <div>
+ Mae
+ </div>
+ </th>
+ <th
+ aria-colindex="8"
aria-label="Details"
class=""
role="columnheader"
@@ -160,7 +190,7 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
<div />
</th>
<th
- aria-colindex="6"
+ aria-colindex="9"
aria-label="Artifact"
class=""
role="columnheader"
@@ -183,39 +213,97 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
class=""
role="cell"
>
- 0.4
+ <div
+ title="aCandidate"
+ >
+ aCandidate
+ </div>
</td>
<td
aria-colindex="2"
class=""
role="cell"
>
- 1
+ <time
+ class=""
+ datetime="2023-01-05T14:07:02.975Z"
+ title="2023-01-05T14:07:02.975Z"
+ >
+ in 2 years
+ </time>
</td>
<td
aria-colindex="3"
class=""
role="cell"
- />
+ >
+ <a
+ class="gl-link"
+ href="/root"
+ title="root"
+ >
+ @root
+ </a>
+ </td>
<td
aria-colindex="4"
class=""
role="cell"
- />
+ >
+ <div
+ title="0.4"
+ >
+ 0.4
+ </div>
+ </td>
<td
aria-colindex="5"
class=""
role="cell"
>
+ <div
+ title="1"
+ >
+ 1
+ </div>
+ </td>
+ <td
+ aria-colindex="6"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="7"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="8"
+ class=""
+ role="cell"
+ >
<a
class="gl-link"
href="link_to_candidate1"
+ title="Details"
>
Details
</a>
</td>
<td
- aria-colindex="6"
+ aria-colindex="9"
class=""
role="cell"
>
@@ -224,6 +312,7 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
href="link_to_artifact"
rel="noopener"
target="_blank"
+ title="Artifacts"
>
Artifacts
</a>
@@ -238,47 +327,435 @@ exports[`MlExperiment with candidates renders correctly 1`] = `
class=""
role="cell"
>
- 0.5
+ <div
+ title=""
+ >
+
+ </div>
</td>
<td
aria-colindex="2"
class=""
role="cell"
- />
+ >
+ <time
+ class=""
+ datetime="2023-01-05T14:07:02.975Z"
+ title="2023-01-05T14:07:02.975Z"
+ >
+ in 2 years
+ </time>
+ </td>
<td
aria-colindex="3"
class=""
role="cell"
>
- 0.3
+ <div>
+ -
+ </div>
</td>
<td
aria-colindex="4"
class=""
role="cell"
- />
+ >
+ <div
+ title="0.5"
+ >
+ 0.5
+ </div>
+ </td>
<td
aria-colindex="5"
class=""
role="cell"
>
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="6"
+ class=""
+ role="cell"
+ >
+ <div
+ title="0.3"
+ >
+ 0.3
+ </div>
+ </td>
+ <td
+ aria-colindex="7"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="8"
+ class=""
+ role="cell"
+ >
<a
class="gl-link"
href="link_to_candidate2"
+ title="Details"
>
Details
</a>
</td>
<td
+ aria-colindex="9"
+ class=""
+ role="cell"
+ >
+ <div
+ title="Artifacts"
+ >
+
+ -
+
+ </div>
+ </td>
+ </tr>
+ <tr
+ class=""
+ role="row"
+ >
+ <td
+ aria-colindex="1"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="2"
+ class=""
+ role="cell"
+ >
+ <time
+ class=""
+ datetime="2023-01-05T14:07:02.975Z"
+ title="2023-01-05T14:07:02.975Z"
+ >
+ in 2 years
+ </time>
+ </td>
+ <td
+ aria-colindex="3"
+ class=""
+ role="cell"
+ >
+ <div>
+ -
+ </div>
+ </td>
+ <td
+ aria-colindex="4"
+ class=""
+ role="cell"
+ >
+ <div
+ title="0.5"
+ >
+ 0.5
+ </div>
+ </td>
+ <td
+ aria-colindex="5"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
aria-colindex="6"
class=""
role="cell"
- />
+ >
+ <div
+ title="0.3"
+ >
+ 0.3
+ </div>
+ </td>
+ <td
+ aria-colindex="7"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="8"
+ class=""
+ role="cell"
+ >
+ <a
+ class="gl-link"
+ href="link_to_candidate3"
+ title="Details"
+ >
+ Details
+ </a>
+ </td>
+ <td
+ aria-colindex="9"
+ class=""
+ role="cell"
+ >
+ <div
+ title="Artifacts"
+ >
+
+ -
+
+ </div>
+ </td>
+ </tr>
+ <tr
+ class=""
+ role="row"
+ >
+ <td
+ aria-colindex="1"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="2"
+ class=""
+ role="cell"
+ >
+ <time
+ class=""
+ datetime="2023-01-05T14:07:02.975Z"
+ title="2023-01-05T14:07:02.975Z"
+ >
+ in 2 years
+ </time>
+ </td>
+ <td
+ aria-colindex="3"
+ class=""
+ role="cell"
+ >
+ <div>
+ -
+ </div>
+ </td>
+ <td
+ aria-colindex="4"
+ class=""
+ role="cell"
+ >
+ <div
+ title="0.5"
+ >
+ 0.5
+ </div>
+ </td>
+ <td
+ aria-colindex="5"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="6"
+ class=""
+ role="cell"
+ >
+ <div
+ title="0.3"
+ >
+ 0.3
+ </div>
+ </td>
+ <td
+ aria-colindex="7"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="8"
+ class=""
+ role="cell"
+ >
+ <a
+ class="gl-link"
+ href="link_to_candidate4"
+ title="Details"
+ >
+ Details
+ </a>
+ </td>
+ <td
+ aria-colindex="9"
+ class=""
+ role="cell"
+ >
+ <div
+ title="Artifacts"
+ >
+
+ -
+
+ </div>
+ </td>
+ </tr>
+ <tr
+ class=""
+ role="row"
+ >
+ <td
+ aria-colindex="1"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="2"
+ class=""
+ role="cell"
+ >
+ <time
+ class=""
+ datetime="2023-01-05T14:07:02.975Z"
+ title="2023-01-05T14:07:02.975Z"
+ >
+ in 2 years
+ </time>
+ </td>
+ <td
+ aria-colindex="3"
+ class=""
+ role="cell"
+ >
+ <div>
+ -
+ </div>
+ </td>
+ <td
+ aria-colindex="4"
+ class=""
+ role="cell"
+ >
+ <div
+ title="0.5"
+ >
+ 0.5
+ </div>
+ </td>
+ <td
+ aria-colindex="5"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="6"
+ class=""
+ role="cell"
+ >
+ <div
+ title="0.3"
+ >
+ 0.3
+ </div>
+ </td>
+ <td
+ aria-colindex="7"
+ class=""
+ role="cell"
+ >
+ <div
+ title=""
+ >
+
+ </div>
+ </td>
+ <td
+ aria-colindex="8"
+ class=""
+ role="cell"
+ >
+ <a
+ class="gl-link"
+ href="link_to_candidate5"
+ title="Details"
+ >
+ Details
+ </a>
+ </td>
+ <td
+ aria-colindex="9"
+ class=""
+ role="cell"
+ >
+ <div
+ title="Artifacts"
+ >
+
+ -
+
+ </div>
+ </td>
</tr>
<!---->
<!---->
</tbody>
<!---->
</table>
+
+ <!---->
</div>
`;
diff --git a/spec/frontend/ml/experiment_tracking/components/ml_candidate_spec.js b/spec/frontend/ml/experiment_tracking/components/ml_candidate_spec.js
index 4b16312815a..fb45c4b07a4 100644
--- a/spec/frontend/ml/experiment_tracking/components/ml_candidate_spec.js
+++ b/spec/frontend/ml/experiment_tracking/components/ml_candidate_spec.js
@@ -15,6 +15,10 @@ describe('MlCandidate', () => {
{ name: 'AUC', value: '.55' },
{ name: 'Accuracy', value: '.99' },
],
+ metadata: [
+ { name: 'FileName', value: 'test.py' },
+ { name: 'ExecutionTime', value: '.0856' },
+ ],
info: {
iid: 'candidate_iid',
artifact_link: 'path_to_artifact',
diff --git a/spec/frontend/ml/experiment_tracking/components/ml_experiment_spec.js b/spec/frontend/ml/experiment_tracking/components/ml_experiment_spec.js
index 50539440f25..abcaf17303f 100644
--- a/spec/frontend/ml/experiment_tracking/components/ml_experiment_spec.js
+++ b/spec/frontend/ml/experiment_tracking/components/ml_experiment_spec.js
@@ -1,12 +1,19 @@
-import { GlAlert } from '@gitlab/ui';
+import { GlAlert, GlPagination } from '@gitlab/ui';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import MlExperiment from '~/ml/experiment_tracking/components/ml_experiment.vue';
describe('MlExperiment', () => {
let wrapper;
- const createWrapper = (candidates = [], metricNames = [], paramNames = []) => {
- return mountExtended(MlExperiment, { provide: { candidates, metricNames, paramNames } });
+ const createWrapper = (
+ candidates = [],
+ metricNames = [],
+ paramNames = [],
+ pagination = { page: 1, isLastPage: false, per_page: 2, totalItems: 0 },
+ ) => {
+ return mountExtended(MlExperiment, {
+ provide: { candidates, metricNames, paramNames, pagination },
+ });
};
const findAlert = () => wrapper.findComponent(GlAlert);
@@ -25,20 +32,110 @@ describe('MlExperiment', () => {
expect(findEmptyState().exists()).toBe(true);
});
+
+ it('does not show pagination', () => {
+ wrapper = createWrapper();
+
+ expect(wrapper.findComponent(GlPagination).exists()).toBe(false);
+ });
});
describe('with candidates', () => {
- it('renders correctly', () => {
- wrapper = createWrapper(
+ const defaultPagination = { page: 1, isLastPage: false, per_page: 2, totalItems: 5 };
+
+ const createWrapperWithCandidates = (pagination = defaultPagination) => {
+ return createWrapper(
[
- { rmse: 1, l1_ratio: 0.4, details: 'link_to_candidate1', artifact: 'link_to_artifact' },
- { auc: 0.3, l1_ratio: 0.5, details: 'link_to_candidate2' },
+ {
+ rmse: 1,
+ l1_ratio: 0.4,
+ details: 'link_to_candidate1',
+ artifact: 'link_to_artifact',
+ name: 'aCandidate',
+ created_at: '2023-01-05T14:07:02.975Z',
+ user: { username: 'root', path: '/root' },
+ },
+ {
+ auc: 0.3,
+ l1_ratio: 0.5,
+ details: 'link_to_candidate2',
+ created_at: '2023-01-05T14:07:02.975Z',
+ name: null,
+ user: null,
+ },
+ {
+ auc: 0.3,
+ l1_ratio: 0.5,
+ details: 'link_to_candidate3',
+ created_at: '2023-01-05T14:07:02.975Z',
+ name: null,
+ user: null,
+ },
+ {
+ auc: 0.3,
+ l1_ratio: 0.5,
+ details: 'link_to_candidate4',
+ created_at: '2023-01-05T14:07:02.975Z',
+ name: null,
+ user: null,
+ },
+ {
+ auc: 0.3,
+ l1_ratio: 0.5,
+ details: 'link_to_candidate5',
+ created_at: '2023-01-05T14:07:02.975Z',
+ name: null,
+ user: null,
+ },
],
['rmse', 'auc', 'mae'],
['l1_ratio'],
+ pagination,
);
+ };
+
+ it('renders correctly', () => {
+ wrapper = createWrapperWithCandidates();
expect(wrapper.element).toMatchSnapshot();
});
+
+ describe('Pagination behaviour', () => {
+ it('should show', () => {
+ wrapper = createWrapperWithCandidates();
+
+ expect(wrapper.findComponent(GlPagination).exists()).toBe(true);
+ });
+
+ it('should get the page number from the URL', () => {
+ wrapper = createWrapperWithCandidates({ ...defaultPagination, page: 2 });
+
+ expect(wrapper.findComponent(GlPagination).props().value).toBe(2);
+ });
+
+ it('should not have a prevPage if the page is 1', () => {
+ wrapper = createWrapperWithCandidates();
+
+ expect(wrapper.findComponent(GlPagination).props().prevPage).toBe(null);
+ });
+
+ it('should set the prevPage to 1 if the page is 2', () => {
+ wrapper = createWrapperWithCandidates({ ...defaultPagination, page: 2 });
+
+ expect(wrapper.findComponent(GlPagination).props().prevPage).toBe(1);
+ });
+
+ it('should not have a nextPage if isLastPage is true', async () => {
+ wrapper = createWrapperWithCandidates({ ...defaultPagination, isLastPage: true });
+
+ expect(wrapper.findComponent(GlPagination).props().nextPage).toBe(null);
+ });
+
+ it('should set the nextPage to 2 if the page is 1', () => {
+ wrapper = createWrapperWithCandidates();
+
+ expect(wrapper.findComponent(GlPagination).props().nextPage).toBe(2);
+ });
+ });
});
});
diff --git a/spec/frontend/monitoring/requests/index_spec.js b/spec/frontend/monitoring/requests/index_spec.js
index def4bfe9443..cf7df3dd9d5 100644
--- a/spec/frontend/monitoring/requests/index_spec.js
+++ b/spec/frontend/monitoring/requests/index_spec.js
@@ -2,8 +2,12 @@ import MockAdapter from 'axios-mock-adapter';
import { backoffMockImplementation } from 'helpers/backoff_helper';
import axios from '~/lib/utils/axios_utils';
import * as commonUtils from '~/lib/utils/common_utils';
-import statusCodes, {
+import {
+ HTTP_STATUS_BAD_REQUEST,
HTTP_STATUS_NO_CONTENT,
+ HTTP_STATUS_OK,
+ HTTP_STATUS_SERVICE_UNAVAILABLE,
+ HTTP_STATUS_UNAUTHORIZED,
HTTP_STATUS_UNPROCESSABLE_ENTITY,
} from '~/lib/utils/http_status';
import { getDashboard, getPrometheusQueryData } from '~/monitoring/requests';
@@ -32,7 +36,7 @@ describe('monitoring metrics_requests', () => {
};
it('returns a dashboard response', () => {
- mock.onGet(dashboardEndpoint).reply(statusCodes.OK, response);
+ mock.onGet(dashboardEndpoint).reply(HTTP_STATUS_OK, response);
return getDashboard(dashboardEndpoint, params).then((data) => {
expect(data).toEqual(metricsDashboardResponse);
@@ -42,7 +46,7 @@ describe('monitoring metrics_requests', () => {
it('returns a dashboard response after retrying twice', () => {
mock.onGet(dashboardEndpoint).replyOnce(HTTP_STATUS_NO_CONTENT);
mock.onGet(dashboardEndpoint).replyOnce(HTTP_STATUS_NO_CONTENT);
- mock.onGet(dashboardEndpoint).reply(statusCodes.OK, response);
+ mock.onGet(dashboardEndpoint).reply(HTTP_STATUS_OK, response);
return getDashboard(dashboardEndpoint, params).then((data) => {
expect(data).toEqual(metricsDashboardResponse);
@@ -75,7 +79,7 @@ describe('monitoring metrics_requests', () => {
};
it('returns a dashboard response', () => {
- mock.onGet(prometheusEndpoint).reply(statusCodes.OK, response);
+ mock.onGet(prometheusEndpoint).reply(HTTP_STATUS_OK, response);
return getPrometheusQueryData(prometheusEndpoint, params).then((data) => {
expect(data).toEqual(response.data);
@@ -86,7 +90,7 @@ describe('monitoring metrics_requests', () => {
// Mock multiple attempts while the cache is filling up
mock.onGet(prometheusEndpoint).replyOnce(HTTP_STATUS_NO_CONTENT);
mock.onGet(prometheusEndpoint).replyOnce(HTTP_STATUS_NO_CONTENT);
- mock.onGet(prometheusEndpoint).reply(statusCodes.OK, response); // 3rd attempt
+ mock.onGet(prometheusEndpoint).reply(HTTP_STATUS_OK, response); // 3rd attempt
return getPrometheusQueryData(prometheusEndpoint, params).then((data) => {
expect(data).toEqual(response.data);
@@ -107,7 +111,7 @@ describe('monitoring metrics_requests', () => {
it('rejects after retrying twice and getting an HTTP 401 error', () => {
// Mock multiple attempts while the cache is filling up and fails
- mock.onGet(prometheusEndpoint).reply(statusCodes.UNAUTHORIZED, {
+ mock.onGet(prometheusEndpoint).reply(HTTP_STATUS_UNAUTHORIZED, {
status: 'error',
error: 'An error occurred',
});
@@ -134,9 +138,9 @@ describe('monitoring metrics_requests', () => {
it.each`
code | reason
- ${statusCodes.BAD_REQUEST} | ${'Parameters are missing or incorrect'}
+ ${HTTP_STATUS_BAD_REQUEST} | ${'Parameters are missing or incorrect'}
${HTTP_STATUS_UNPROCESSABLE_ENTITY} | ${"Expression can't be executed"}
- ${statusCodes.SERVICE_UNAVAILABLE} | ${'Query timed out or aborted'}
+ ${HTTP_STATUS_SERVICE_UNAVAILABLE} | ${'Query timed out or aborted'}
`('rejects with details: "$reason" after getting an HTTP $code error', ({ code, reason }) => {
mock.onGet(prometheusEndpoint).reply(code, {
status: 'error',
diff --git a/spec/frontend/monitoring/store/actions_spec.js b/spec/frontend/monitoring/store/actions_spec.js
index 93af6526c67..fbe030b1a7d 100644
--- a/spec/frontend/monitoring/store/actions_spec.js
+++ b/spec/frontend/monitoring/store/actions_spec.js
@@ -4,8 +4,10 @@ import testAction from 'helpers/vuex_action_helper';
import { createAlert } from '~/flash';
import axios from '~/lib/utils/axios_utils';
import * as commonUtils from '~/lib/utils/common_utils';
-import statusCodes, {
+import {
+ HTTP_STATUS_BAD_REQUEST,
HTTP_STATUS_CREATED,
+ HTTP_STATUS_OK,
HTTP_STATUS_UNPROCESSABLE_ENTITY,
} from '~/lib/utils/http_status';
import { ENVIRONMENT_AVAILABLE_STATE } from '~/monitoring/constants';
@@ -983,7 +985,7 @@ describe('Monitoring store actions', () => {
});
it('Failed POST request throws an error', async () => {
- mock.onPost(state.dashboardsEndpoint).reply(statusCodes.BAD_REQUEST);
+ mock.onPost(state.dashboardsEndpoint).reply(HTTP_STATUS_BAD_REQUEST);
await expect(testAction(duplicateSystemDashboard, {}, state, [], [])).rejects.toEqual(
'There was an error creating the dashboard.',
@@ -994,7 +996,7 @@ describe('Monitoring store actions', () => {
it('Failed POST request throws an error with a description', async () => {
const backendErrorMsg = 'This file already exists!';
- mock.onPost(state.dashboardsEndpoint).reply(statusCodes.BAD_REQUEST, {
+ mock.onPost(state.dashboardsEndpoint).reply(HTTP_STATUS_BAD_REQUEST, {
error: backendErrorMsg,
});
@@ -1116,7 +1118,7 @@ describe('Monitoring store actions', () => {
mock
.onPost(panelPreviewEndpoint, { panel_yaml: mockYmlContent })
- .reply(statusCodes.OK, mockPanel);
+ .reply(HTTP_STATUS_OK, mockPanel);
testAction(
fetchPanelPreview,
diff --git a/spec/frontend/monitoring/store/mutations_spec.js b/spec/frontend/monitoring/store/mutations_spec.js
index 49e8ab9ebd4..3baef743f42 100644
--- a/spec/frontend/monitoring/store/mutations_spec.js
+++ b/spec/frontend/monitoring/store/mutations_spec.js
@@ -1,5 +1,4 @@
-import httpStatusCodes from '~/lib/utils/http_status';
-
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_SERVICE_UNAVAILABLE } from '~/lib/utils/http_status';
import { dashboardEmptyStates, metricStates } from '~/monitoring/constants';
import * as types from '~/monitoring/stores/mutation_types';
import mutations from '~/monitoring/stores/mutations';
@@ -318,7 +317,7 @@ describe('Monitoring mutations', () => {
metricId,
error: {
response: {
- status: httpStatusCodes.SERVICE_UNAVAILABLE,
+ status: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
},
});
@@ -336,7 +335,7 @@ describe('Monitoring mutations', () => {
metricId,
error: {
response: {
- status: httpStatusCodes.BAD_REQUEST,
+ status: HTTP_STATUS_BAD_REQUEST,
},
},
});
diff --git a/spec/frontend/nav/components/new_nav_toggle_spec.js b/spec/frontend/nav/components/new_nav_toggle_spec.js
index f09bdef8caa..ee75dfb70e4 100644
--- a/spec/frontend/nav/components/new_nav_toggle_spec.js
+++ b/spec/frontend/nav/components/new_nav_toggle_spec.js
@@ -14,6 +14,8 @@ jest.mock('~/flash');
const TEST_ENDPONT = 'https://example.com/toggle';
describe('NewNavToggle', () => {
+ useMockLocationHelper();
+
let wrapper;
const findToggle = () => wrapper.findComponent(GlToggle);
@@ -59,18 +61,22 @@ describe('NewNavToggle', () => {
});
});
- describe('changing the toggle', () => {
- useMockLocationHelper();
+ describe.each`
+ desc | actFn
+ ${'when toggle button is clicked'} | ${() => findToggle().trigger('click')}
+ ${'when menu item text is clicked'} | ${() => getByText('New navigation').trigger('click')}
+ `('$desc', ({ actFn }) => {
let mock;
beforeEach(() => {
mock = new MockAdapter(axios);
- createComponent();
+ createComponent({ enabled: false });
});
it('reloads the page on success', async () => {
mock.onPut(TEST_ENDPONT).reply(200);
- findToggle().vm.$emit('change');
+
+ actFn();
await waitForPromises();
expect(window.location.reload).toHaveBeenCalled();
@@ -78,7 +84,8 @@ describe('NewNavToggle', () => {
it('shows an alert on error', async () => {
mock.onPut(TEST_ENDPONT).reply(500);
- findToggle().vm.$emit('change');
+
+ actFn();
await waitForPromises();
expect(createAlert).toHaveBeenCalledWith(
@@ -91,6 +98,12 @@ describe('NewNavToggle', () => {
expect(window.location.reload).not.toHaveBeenCalled();
});
+ it('changes the toggle', async () => {
+ await actFn();
+
+ expect(findToggle().props('value')).toBe(true);
+ });
+
afterEach(() => {
mock.restore();
});
diff --git a/spec/frontend/notes/components/comment_form_spec.js b/spec/frontend/notes/components/comment_form_spec.js
index 701ff492702..e13985ef469 100644
--- a/spec/frontend/notes/components/comment_form_spec.js
+++ b/spec/frontend/notes/components/comment_form_spec.js
@@ -5,6 +5,7 @@ import MockAdapter from 'axios-mock-adapter';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import Autosave from '~/autosave';
import batchComments from '~/batch_comments/stores/modules/batch_comments';
import { refreshUserMergeRequestCounts } from '~/commons/nav/user_merge_requests';
import { createAlert } from '~/flash';
@@ -20,6 +21,7 @@ import { loggedOutnoteableData, notesDataMock, userDataMock, noteableDataMock }
jest.mock('autosize');
jest.mock('~/commons/nav/user_merge_requests');
jest.mock('~/flash');
+jest.mock('~/autosave');
Vue.use(Vuex);
@@ -336,8 +338,11 @@ describe('issue_comment_form component', () => {
});
it('inits autosave', () => {
- expect(wrapper.vm.autosave).toBeDefined();
- expect(wrapper.vm.autosave.key).toBe(`autosave/Note/Issue/${noteableDataMock.id}`);
+ expect(Autosave).toHaveBeenCalledWith(expect.any(Element), [
+ 'Note',
+ 'Issue',
+ noteableDataMock.id,
+ ]);
});
});
diff --git a/spec/frontend/notes/components/note_body_spec.js b/spec/frontend/notes/components/note_body_spec.js
index 3b5313744ff..c71cf7666ab 100644
--- a/spec/frontend/notes/components/note_body_spec.js
+++ b/spec/frontend/notes/components/note_body_spec.js
@@ -7,11 +7,14 @@ import NoteAwardsList from '~/notes/components/note_awards_list.vue';
import NoteForm from '~/notes/components/note_form.vue';
import createStore from '~/notes/stores';
import notes from '~/notes/stores/modules/index';
+import Autosave from '~/autosave';
import Suggestions from '~/vue_shared/components/markdown/suggestions.vue';
import { noteableDataMock, notesDataMock, note } from '../mock_data';
+jest.mock('~/autosave');
+
const createComponent = ({
props = {},
noteableData = noteableDataMock,
@@ -84,13 +87,8 @@ describe('issue_note_body component', () => {
});
it('adds autosave', () => {
- const autosaveKey = `autosave/Note/${note.noteable_type}/${note.id}`;
-
- // While we discourage testing wrapper props
- // here we aren't testing a component prop
- // but instead an instance object property
- // which is defined in `app/assets/javascripts/notes/mixins/autosave.js`
- expect(wrapper.vm.autosave.key).toEqual(autosaveKey);
+ // passing undefined instead of an element because of shallowMount
+ expect(Autosave).toHaveBeenCalledWith(undefined, ['Note', note.noteable_type, note.id]);
});
describe('isInternalNote', () => {
diff --git a/spec/frontend/notes/stores/actions_spec.js b/spec/frontend/notes/stores/actions_spec.js
index dce2e5d370d..0b2623f3d77 100644
--- a/spec/frontend/notes/stores/actions_spec.js
+++ b/spec/frontend/notes/stores/actions_spec.js
@@ -1442,7 +1442,7 @@ describe('Actions Notes Store', () => {
return testAction(
actions.fetchDiscussions,
{},
- { noteableType: notesConstants.MERGE_REQUEST_NOTEABLE_TYPE },
+ { noteableType: notesConstants.EPIC_NOTEABLE_TYPE },
[
{ type: mutationTypes.ADD_OR_UPDATE_DISCUSSIONS, payload: { discussion } },
{ type: mutationTypes.SET_FETCHING_DISCUSSIONS, payload: false },
@@ -1472,9 +1472,7 @@ describe('Actions Notes Store', () => {
);
});
- it('dispatches `fetchDiscussionsBatch` action if `paginatedMrDiscussions` feature flag is enabled', () => {
- window.gon = { features: { paginatedMrDiscussions: true } };
-
+ it('dispatches `fetchDiscussionsBatch` action if noteable is a MergeRequest', () => {
return testAction(
actions.fetchDiscussions,
{ path: 'test-path', filter: 'test-filter', persistFilter: 'test-persist-filter' },
diff --git a/spec/frontend/notifications/components/custom_notifications_modal_spec.js b/spec/frontend/notifications/components/custom_notifications_modal_spec.js
index cd04adac72d..70749557e61 100644
--- a/spec/frontend/notifications/components/custom_notifications_modal_spec.js
+++ b/spec/frontend/notifications/components/custom_notifications_modal_spec.js
@@ -5,7 +5,7 @@ import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import CustomNotificationsModal from '~/notifications/components/custom_notifications_modal.vue';
import { i18n } from '~/notifications/constants';
@@ -138,7 +138,7 @@ describe('CustomNotificationsModal', () => {
mockAxios
.onGet(endpointUrl)
- .reply(httpStatus.OK, mockNotificationSettingsResponses.default);
+ .reply(HTTP_STATUS_OK, mockNotificationSettingsResponses.default);
wrapper = createComponent({ injectedProperties });
@@ -155,7 +155,7 @@ describe('CustomNotificationsModal', () => {
mockAxios
.onGet(endpointUrl)
- .reply(httpStatus.OK, mockNotificationSettingsResponses.default);
+ .reply(HTTP_STATUS_OK, mockNotificationSettingsResponses.default);
wrapper = createComponent();
@@ -173,7 +173,7 @@ describe('CustomNotificationsModal', () => {
});
it('shows a toast message when the request fails', async () => {
- mockAxios.onGet('/api/v4/notification_settings').reply(httpStatus.NOT_FOUND, {});
+ mockAxios.onGet('/api/v4/notification_settings').reply(HTTP_STATUS_NOT_FOUND, {});
wrapper = createComponent();
wrapper.findComponent(GlModal).vm.$emit('show');
@@ -201,11 +201,11 @@ describe('CustomNotificationsModal', () => {
async ({ projectId, groupId, endpointUrl }) => {
mockAxios
.onGet(endpointUrl)
- .reply(httpStatus.OK, mockNotificationSettingsResponses.default);
+ .reply(HTTP_STATUS_OK, mockNotificationSettingsResponses.default);
mockAxios
.onPut(endpointUrl)
- .reply(httpStatus.OK, mockNotificationSettingsResponses.updated);
+ .reply(HTTP_STATUS_OK, mockNotificationSettingsResponses.updated);
const injectedProperties = {
projectId,
@@ -241,7 +241,7 @@ describe('CustomNotificationsModal', () => {
);
it('shows a toast message when the request fails', async () => {
- mockAxios.onPut('/api/v4/notification_settings').reply(httpStatus.NOT_FOUND, {});
+ mockAxios.onPut('/api/v4/notification_settings').reply(HTTP_STATUS_NOT_FOUND, {});
wrapper = createComponent();
// setData usage is discouraged. See https://gitlab.com/groups/gitlab-org/-/epics/7330 for details
diff --git a/spec/frontend/notifications/components/notification_email_listbox_input_spec.js b/spec/frontend/notifications/components/notification_email_listbox_input_spec.js
new file mode 100644
index 00000000000..c490c737cf1
--- /dev/null
+++ b/spec/frontend/notifications/components/notification_email_listbox_input_spec.js
@@ -0,0 +1,81 @@
+import { shallowMount } from '@vue/test-utils';
+import { nextTick } from 'vue';
+import ListboxInput from '~/vue_shared/components/listbox_input/listbox_input.vue';
+import NotificationEmailListboxInput from '~/notifications/components/notification_email_listbox_input.vue';
+
+describe('NotificationEmailListboxInput', () => {
+ let wrapper;
+
+ // Props
+ const label = 'label';
+ const name = 'name';
+ const emails = ['test@gitlab.com'];
+ const emptyValueText = 'emptyValueText';
+ const value = 'value';
+ const disabled = false;
+
+ // Finders
+ const findListboxInput = () => wrapper.findComponent(ListboxInput);
+
+ const createComponent = (attachTo) => {
+ wrapper = shallowMount(NotificationEmailListboxInput, {
+ provide: {
+ label,
+ name,
+ emails,
+ emptyValueText,
+ value,
+ disabled,
+ },
+ attachTo,
+ });
+ };
+
+ describe('props', () => {
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it.each`
+ propName | propValue
+ ${'label'} | ${label}
+ ${'name'} | ${name}
+ ${'selected'} | ${value}
+ ${'disabled'} | ${disabled}
+ `('passes the $propName prop to ListboxInput', ({ propName, propValue }) => {
+ expect(findListboxInput().props(propName)).toBe(propValue);
+ });
+
+ it('passes the options to ListboxInput', () => {
+ expect(findListboxInput().props('items')).toStrictEqual([
+ { text: emptyValueText, value: '' },
+ { text: emails[0], value: emails[0] },
+ ]);
+ });
+ });
+
+ describe('form', () => {
+ let form;
+
+ beforeEach(() => {
+ form = document.createElement('form');
+ const root = document.createElement('div');
+ form.appendChild(root);
+ createComponent(root);
+ });
+
+ afterEach(() => {
+ form = null;
+ });
+
+ it('submits the parent form when the value changes', async () => {
+ jest.spyOn(form, 'submit');
+ expect(form.submit).not.toHaveBeenCalled();
+
+ findListboxInput().vm.$emit('select');
+ await nextTick();
+
+ expect(form.submit).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/spec/frontend/notifications/components/notifications_dropdown_spec.js b/spec/frontend/notifications/components/notifications_dropdown_spec.js
index 7a98b374095..0f13de0e6d8 100644
--- a/spec/frontend/notifications/components/notifications_dropdown_spec.js
+++ b/spec/frontend/notifications/components/notifications_dropdown_spec.js
@@ -4,7 +4,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
import waitForPromises from 'helpers/wait_for_promises';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_NOT_FOUND, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import CustomNotificationsModal from '~/notifications/components/custom_notifications_modal.vue';
import NotificationsDropdown from '~/notifications/components/notifications_dropdown.vue';
import NotificationsDropdownItem from '~/notifications/components/notifications_dropdown_item.vue';
@@ -98,7 +98,7 @@ describe('NotificationsDropdown', () => {
it('opens the modal when the user clicks the button', async () => {
jest.spyOn(axios, 'put');
- mockAxios.onPut('/api/v4/notification_settings').reply(httpStatus.OK, {});
+ mockAxios.onPut('/api/v4/notification_settings').reply(HTTP_STATUS_OK, {});
wrapper = createComponent({
initialNotificationLevel: 'custom',
@@ -233,7 +233,7 @@ describe('NotificationsDropdown', () => {
);
it('updates the selectedNotificationLevel and marks the item with a checkmark', async () => {
- mockAxios.onPut('/api/v4/notification_settings').reply(httpStatus.OK, {});
+ mockAxios.onPut('/api/v4/notification_settings').reply(HTTP_STATUS_OK, {});
wrapper = createComponent();
const dropdownItem = findDropdownItemAt(1);
@@ -245,7 +245,7 @@ describe('NotificationsDropdown', () => {
});
it("won't update the selectedNotificationLevel and shows a toast message when the request fails and", async () => {
- mockAxios.onPut('/api/v4/notification_settings').reply(httpStatus.NOT_FOUND, {});
+ mockAxios.onPut('/api/v4/notification_settings').reply(HTTP_STATUS_NOT_FOUND, {});
wrapper = createComponent();
await clickDropdownItemAt(1);
@@ -257,7 +257,7 @@ describe('NotificationsDropdown', () => {
});
it('opens the modal when the user clicks on the "Custom" dropdown item', async () => {
- mockAxios.onPut('/api/v4/notification_settings').reply(httpStatus.OK, {});
+ mockAxios.onPut('/api/v4/notification_settings').reply(HTTP_STATUS_OK, {});
wrapper = createComponent();
await clickDropdownItemAt(5);
diff --git a/spec/frontend/observability/observability_app_spec.js b/spec/frontend/observability/observability_app_spec.js
index 248b0a2057c..e3bcd140d60 100644
--- a/spec/frontend/observability/observability_app_spec.js
+++ b/spec/frontend/observability/observability_app_spec.js
@@ -2,11 +2,7 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ObservabilityApp from '~/observability/components/observability_app.vue';
import ObservabilitySkeleton from '~/observability/components/skeleton/index.vue';
-import {
- MESSAGE_EVENT_TYPE,
- OBSERVABILITY_ROUTES,
- SKELETON_VARIANT,
-} from '~/observability/constants';
+import { MESSAGE_EVENT_TYPE, SKELETON_VARIANTS_BY_ROUTE } from '~/observability/constants';
import { darkModeEnabled } from '~/lib/utils/color_utils';
@@ -20,6 +16,7 @@ describe('Observability root app', () => {
};
const $route = {
pathname: 'https://gitlab.com/gitlab-org/',
+ path: 'https://gitlab.com/gitlab-org/-/observability/dashboards',
query: { otherQuery: 100 },
};
@@ -29,6 +26,10 @@ describe('Observability root app', () => {
const TEST_IFRAME_SRC = 'https://observe.gitlab.com/9970/?groupId=14485840';
+ const OBSERVABILITY_ROUTES = Object.keys(SKELETON_VARIANTS_BY_ROUTE);
+
+ const SKELETON_VARIANTS = Object.values(SKELETON_VARIANTS_BY_ROUTE);
+
const mountComponent = (route = $route) => {
wrapper = shallowMountExtended(ObservabilityApp, {
propsData: {
@@ -139,9 +140,9 @@ describe('Observability root app', () => {
describe('on GOUI_LOADED', () => {
beforeEach(() => {
mountComponent();
- wrapper.vm.$refs.iframeSkeleton.handleSkeleton = mockHandleSkeleton;
+ wrapper.vm.$refs.observabilitySkeleton.onContentLoaded = mockHandleSkeleton;
});
- it('should call handleSkeleton method', () => {
+ it('should call onContentLoaded method', () => {
dispatchMessageEvent({
data: { type: MESSAGE_EVENT_TYPE.GOUI_LOADED },
origin: 'https://observe.gitlab.com',
@@ -149,7 +150,7 @@ describe('Observability root app', () => {
expect(mockHandleSkeleton).toHaveBeenCalled();
});
- it('should not call handleSkeleton method if origin is different', () => {
+ it('should not call onContentLoaded method if origin is different', () => {
dispatchMessageEvent({
data: { type: MESSAGE_EVENT_TYPE.GOUI_LOADED },
origin: 'https://example.com',
@@ -157,7 +158,7 @@ describe('Observability root app', () => {
expect(mockHandleSkeleton).not.toHaveBeenCalled();
});
- it('should not call handleSkeleton method if event type is different', () => {
+ it('should not call onContentLoaded method if event type is different', () => {
dispatchMessageEvent({
data: { type: 'UNKNOWN_EVENT' },
origin: 'https://observe.gitlab.com',
@@ -168,11 +169,11 @@ describe('Observability root app', () => {
describe('skeleton variant', () => {
it.each`
- pathDescription | path | variant
- ${'dashboards'} | ${OBSERVABILITY_ROUTES.DASHBOARDS} | ${SKELETON_VARIANT.DASHBOARDS}
- ${'explore'} | ${OBSERVABILITY_ROUTES.EXPLORE} | ${SKELETON_VARIANT.EXPLORE}
- ${'manage dashboards'} | ${OBSERVABILITY_ROUTES.MANAGE} | ${SKELETON_VARIANT.MANAGE}
- ${'any other'} | ${'unknown/route'} | ${SKELETON_VARIANT.DASHBOARDS}
+ pathDescription | path | variant
+ ${'dashboards'} | ${OBSERVABILITY_ROUTES[0]} | ${SKELETON_VARIANTS[0]}
+ ${'explore'} | ${OBSERVABILITY_ROUTES[1]} | ${SKELETON_VARIANTS[1]}
+ ${'manage dashboards'} | ${OBSERVABILITY_ROUTES[2]} | ${SKELETON_VARIANTS[2]}
+ ${'any other'} | ${'unknown/route'} | ${SKELETON_VARIANTS[0]}
`('renders the $variant skeleton variant for $pathDescription path', ({ path, variant }) => {
mountComponent({ ...$route, path });
const props = wrapper.findComponent(ObservabilitySkeleton).props();
diff --git a/spec/frontend/observability/skeleton_spec.js b/spec/frontend/observability/skeleton_spec.js
index 5637c0e6d70..a95597d8516 100644
--- a/spec/frontend/observability/skeleton_spec.js
+++ b/spec/frontend/observability/skeleton_spec.js
@@ -1,96 +1,127 @@
-import { GlSkeletonLoader } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { GlSkeletonLoader, GlAlert } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
-import ObservabilitySkeleton from '~/observability/components/skeleton/index.vue';
+import Skeleton from '~/observability/components/skeleton/index.vue';
import DashboardsSkeleton from '~/observability/components/skeleton/dashboards.vue';
import ExploreSkeleton from '~/observability/components/skeleton/explore.vue';
import ManageSkeleton from '~/observability/components/skeleton/manage.vue';
-import { SKELETON_VARIANT } from '~/observability/constants';
+import { SKELETON_VARIANTS_BY_ROUTE, DEFAULT_TIMERS } from '~/observability/constants';
-describe('ObservabilitySkeleton component', () => {
+describe('Skeleton component', () => {
let wrapper;
+ const SKELETON_VARIANTS = Object.values(SKELETON_VARIANTS_BY_ROUTE);
+
+ const findContentWrapper = () => wrapper.findByTestId('observability-wrapper');
+
+ const findExploreSkeleton = () => wrapper.findComponent(ExploreSkeleton);
+
+ const findDashboardsSkeleton = () => wrapper.findComponent(DashboardsSkeleton);
+
+ const findManageSkeleton = () => wrapper.findComponent(ManageSkeleton);
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+
const mountComponent = ({ ...props } = {}) => {
- wrapper = shallowMountExtended(ObservabilitySkeleton, {
+ wrapper = shallowMountExtended(Skeleton, {
propsData: props,
});
};
- afterEach(() => {
- wrapper.destroy();
- wrapper = null;
- });
-
describe('on mount', () => {
beforeEach(() => {
- jest.spyOn(global, 'setTimeout');
- mountComponent();
+ mountComponent({ variant: 'explore' });
});
- it('should call setTimeout on mount and show ObservabilitySkeleton if Observability UI is not loaded yet', () => {
- jest.runAllTimers();
+ describe('loading timers', () => {
+ it('show Skeleton if content is not loaded within CONTENT_WAIT_MS', async () => {
+ expect(findExploreSkeleton().exists()).toBe(false);
+ expect(findContentWrapper().isVisible()).toBe(false);
- expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 500);
- expect(wrapper.vm.loading).toBe(true);
- expect(wrapper.vm.timerId).not.toBeNull();
- });
+ jest.advanceTimersByTime(DEFAULT_TIMERS.CONTENT_WAIT_MS);
- it('should call setTimeout on mount and dont show ObservabilitySkeleton if Observability UI is loaded', () => {
- wrapper.vm.loading = false;
- jest.runAllTimers();
+ await nextTick();
- expect(setTimeout).toHaveBeenCalledWith(expect.any(Function), 500);
- expect(wrapper.vm.loading).toBe(false);
- expect(wrapper.vm.timerId).not.toBeNull();
- });
- });
+ expect(findExploreSkeleton().exists()).toBe(true);
+ expect(findContentWrapper().isVisible()).toBe(false);
+ });
- describe('handleSkeleton', () => {
- it('will not show the skeleton if Observability UI is loaded before', () => {
- jest.spyOn(global, 'clearTimeout');
- mountComponent();
- wrapper.vm.handleSkeleton();
- expect(clearTimeout).toHaveBeenCalledWith(wrapper.vm.timerId);
+ it('does not show the skeleton if content has loaded within CONTENT_WAIT_MS', async () => {
+ expect(findExploreSkeleton().exists()).toBe(false);
+ expect(findContentWrapper().isVisible()).toBe(false);
+
+ wrapper.vm.onContentLoaded();
+
+ await nextTick();
+
+ expect(findContentWrapper().isVisible()).toBe(true);
+ expect(findExploreSkeleton().exists()).toBe(false);
+
+ jest.advanceTimersByTime(DEFAULT_TIMERS.CONTENT_WAIT_MS);
+
+ await nextTick();
+
+ expect(findContentWrapper().isVisible()).toBe(true);
+ expect(findExploreSkeleton().exists()).toBe(false);
+ });
});
- it('will hide skeleton gracefully after 400ms if skeleton was present on screen before Observability UI', () => {
- jest.spyOn(global, 'setTimeout');
- mountComponent();
- jest.runAllTimers();
- wrapper.vm.handleSkeleton();
- jest.runAllTimers();
+ describe('error timeout', () => {
+ it('shows the error dialog if content has not loaded within TIMEOUT_MS', async () => {
+ expect(findAlert().exists()).toBe(false);
+ jest.advanceTimersByTime(DEFAULT_TIMERS.TIMEOUT_MS);
+
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(true);
+ expect(findContentWrapper().isVisible()).toBe(false);
+ });
+
+ it('does not show the error dialog if content has loaded within TIMEOUT_MS', async () => {
+ wrapper.vm.onContentLoaded();
+ jest.advanceTimersByTime(DEFAULT_TIMERS.TIMEOUT_MS);
- expect(setTimeout).toHaveBeenCalledWith(wrapper.vm.hideSkeleton, 400);
- expect(wrapper.vm.loading).toBe(false);
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(false);
+ expect(findContentWrapper().isVisible()).toBe(true);
+ });
});
});
describe('skeleton variant', () => {
it.each`
skeletonType | condition | variant
- ${'dashboards'} | ${'variant is dashboards'} | ${SKELETON_VARIANT.DASHBOARDS}
- ${'explore'} | ${'variant is explore'} | ${SKELETON_VARIANT.EXPLORE}
- ${'manage'} | ${'variant is manage'} | ${SKELETON_VARIANT.MANAGE}
+ ${'dashboards'} | ${'variant is dashboards'} | ${SKELETON_VARIANTS[0]}
+ ${'explore'} | ${'variant is explore'} | ${SKELETON_VARIANTS[1]}
+ ${'manage'} | ${'variant is manage'} | ${SKELETON_VARIANTS[2]}
${'default'} | ${'variant is not manage, dashboards or explore'} | ${'unknown'}
`('should render $skeletonType skeleton if $condition', async ({ skeletonType, variant }) => {
mountComponent({ variant });
- const showsDefaultSkeleton = ![
- SKELETON_VARIANT.DASHBOARDS,
- SKELETON_VARIANT.EXPLORE,
- SKELETON_VARIANT.MANAGE,
- ].includes(variant);
- expect(wrapper.findComponent(DashboardsSkeleton).exists()).toBe(
- skeletonType === SKELETON_VARIANT.DASHBOARDS,
- );
- expect(wrapper.findComponent(ExploreSkeleton).exists()).toBe(
- skeletonType === SKELETON_VARIANT.EXPLORE,
- );
- expect(wrapper.findComponent(ManageSkeleton).exists()).toBe(
- skeletonType === SKELETON_VARIANT.MANAGE,
- );
+ jest.advanceTimersByTime(DEFAULT_TIMERS.CONTENT_WAIT_MS);
+ await nextTick();
+ const showsDefaultSkeleton = !SKELETON_VARIANTS.includes(variant);
+
+ expect(findDashboardsSkeleton().exists()).toBe(skeletonType === SKELETON_VARIANTS[0]);
+ expect(findExploreSkeleton().exists()).toBe(skeletonType === SKELETON_VARIANTS[1]);
+ expect(findManageSkeleton().exists()).toBe(skeletonType === SKELETON_VARIANTS[2]);
expect(wrapper.findComponent(GlSkeletonLoader).exists()).toBe(showsDefaultSkeleton);
});
});
+
+ describe('on destroy', () => {
+ it('should clear init timer and timeout timer', () => {
+ jest.spyOn(global, 'clearTimeout');
+ mountComponent();
+ wrapper.destroy();
+ expect(clearTimeout).toHaveBeenCalledTimes(2);
+ expect(clearTimeout.mock.calls).toEqual([
+ [wrapper.vm.loadingTimeout], // First call
+ [wrapper.vm.errorTimeout], // Second call
+ ]);
+ });
+ });
});
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
index 96c670eaad2..fa0d76762df 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/details_page/tags_list_row_spec.js
@@ -335,10 +335,10 @@ describe('tags list row', () => {
});
describe.each`
- name | finderFunction | text | icon | clipboard
- ${'published date detail'} | ${findPublishedDateDetail} | ${'Published to the gitlab-org/gitlab-test/rails-12009 image repository at 01:29 UTC on 2020-11-03'} | ${'clock'} | ${false}
- ${'manifest detail'} | ${findManifestDetail} | ${'Manifest digest: sha256:2cf3d2fdac1b04a14301d47d51cb88dcd26714c74f91440eeee99ce399089062'} | ${'log'} | ${true}
- ${'configuration detail'} | ${findConfigurationDetail} | ${'Configuration digest: sha256:c2613843ab33aabf847965442b13a8b55a56ae28837ce182627c0716eb08c02b'} | ${'cloud-gear'} | ${true}
+ name | finderFunction | text | icon | clipboard
+ ${'published date detail'} | ${findPublishedDateDetail} | ${'Published to the gitlab-org/gitlab-test/rails-12009 image repository at 13:29:38 UTC on 2020-11-03'} | ${'clock'} | ${false}
+ ${'manifest detail'} | ${findManifestDetail} | ${'Manifest digest: sha256:2cf3d2fdac1b04a14301d47d51cb88dcd26714c74f91440eeee99ce399089062'} | ${'log'} | ${true}
+ ${'configuration detail'} | ${findConfigurationDetail} | ${'Configuration digest: sha256:c2613843ab33aabf847965442b13a8b55a56ae28837ce182627c0716eb08c02b'} | ${'cloud-gear'} | ${true}
`('$name details row', ({ finderFunction, text, icon, clipboard }) => {
it(`has ${text} as text`, async () => {
mountComponent();
diff --git a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
index 7da91c4af96..75068591007 100644
--- a/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
+++ b/spec/frontend/packages_and_registries/container_registry/explorer/components/list_page/image_list_row_spec.js
@@ -1,6 +1,6 @@
import { GlIcon, GlSprintf, GlSkeletonLoader, GlButton } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { createMockDirective, getBinding } from 'helpers/vue_mock_directive';
+import { createMockDirective } from 'helpers/vue_mock_directive';
import { mockTracking } from 'helpers/tracking_helper';
import { getIdFromGraphQLId } from '~/graphql_shared/utils';
import DeleteButton from '~/packages_and_registries/container_registry/explorer/components/delete_button.vue';
@@ -59,31 +59,6 @@ describe('Image List Row', () => {
wrapper = null;
});
- describe('list item component', () => {
- describe('tooltip', () => {
- it(`the title is ${ROW_SCHEDULED_FOR_DELETION}`, () => {
- mountComponent();
-
- const tooltip = getBinding(wrapper.element, 'gl-tooltip');
- expect(tooltip).toBeDefined();
- expect(tooltip.value.title).toBe(ROW_SCHEDULED_FOR_DELETION);
- });
-
- it('is disabled when item is being deleted', () => {
- mountComponent({ item: { ...item, status: IMAGE_DELETE_SCHEDULED_STATUS } });
-
- const tooltip = getBinding(wrapper.element, 'gl-tooltip');
- expect(tooltip.value.disabled).toBe(false);
- });
- });
-
- it('is disabled when the item is in deleting status', () => {
- mountComponent({ item: { ...item, status: IMAGE_DELETE_SCHEDULED_STATUS } });
-
- expect(findListItemComponent().props('disabled')).toBe(true);
- });
- });
-
describe('image title and path', () => {
it('renders shortened name of image and contains a link to the details page', () => {
mountComponent();
@@ -158,10 +133,22 @@ describe('Image List Row', () => {
mountComponent({ item: { ...item, status: IMAGE_DELETE_SCHEDULED_STATUS } });
});
- it('the router link is disabled', () => {
- // we check the event prop as is the only workaround to disable a router link
- expect(findDetailsLink().props('event')).toBe('');
+ it('the router link does not exist', () => {
+ expect(findDetailsLink().exists()).toBe(false);
+ });
+
+ it('image name exists', () => {
+ expect(findListItemComponent().text()).toContain('gitlab-test/rails-12009');
+ });
+
+ it(`contains secondary text ${ROW_SCHEDULED_FOR_DELETION}`, () => {
+ expect(findListItemComponent().text()).toContain(ROW_SCHEDULED_FOR_DELETION);
});
+
+ it('the tags count does not exist', () => {
+ expect(findTagsCount().exists()).toBe(false);
+ });
+
it('the clipboard button is disabled', () => {
expect(findClipboardButton().attributes('disabled')).toBe('true');
});
diff --git a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js
index c6b5138639e..0cbe2755f7e 100644
--- a/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js
+++ b/spec/frontend/packages_and_registries/infrastructure_registry/components/details/components/package_history_spec.js
@@ -61,14 +61,14 @@ describe('Package History', () => {
);
});
describe.each`
- name | amount | icon | text | timeAgoTooltip | link
- ${'created-on'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'clock'} | ${'Test package version 1.0.0 was first created'} | ${mavenPackage.created_at} | ${null}
- ${'first-pipeline-commit'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'commit'} | ${'Created by commit #sha-baz on branch branch-name'} | ${null} | ${mockPipelineInfo.project.commit_url}
- ${'first-pipeline-pipeline'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pipeline'} | ${'Built by pipeline #1 triggered by foo'} | ${mockPipelineInfo.created_at} | ${mockPipelineInfo.project.pipeline_url}
- ${'published'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'package'} | ${'Published to the baz project Package Registry'} | ${mavenPackage.created_at} | ${null}
- ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'history'} | ${'Package has 1 archived update'} | ${null} | ${null}
- ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 3} | ${'history'} | ${'Package has 2 archived updates'} | ${null} | ${null}
- ${'pipeline-entry'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pencil'} | ${'Package updated by commit #sha-baz on branch branch-name, built by pipeline #3, and published to the registry'} | ${mavenPackage.created_at} | ${mockPipelineInfo.project.commit_url}
+ name | amount | icon | text | timeAgoTooltip | link
+ ${'created-on'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'clock'} | ${'Test package version 1.0.0 was first created'} | ${mavenPackage.created_at} | ${null}
+ ${'first-pipeline-commit'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'commit'} | ${'Created by commit sha-baz on branch branch-name'} | ${null} | ${mockPipelineInfo.project.commit_url}
+ ${'first-pipeline-pipeline'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pipeline'} | ${'Built by pipeline #1 triggered by foo'} | ${mockPipelineInfo.created_at} | ${mockPipelineInfo.project.pipeline_url}
+ ${'published'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'package'} | ${'Published to the baz project Package Registry'} | ${mavenPackage.created_at} | ${null}
+ ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'history'} | ${'Package has 1 archived update'} | ${null} | ${null}
+ ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 3} | ${'history'} | ${'Package has 2 archived updates'} | ${null} | ${null}
+ ${'pipeline-entry'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pencil'} | ${'Package updated by commit sha-baz on branch branch-name, built by pipeline #3, and published to the registry'} | ${mavenPackage.created_at} | ${mockPipelineInfo.project.commit_url}
`(
'with $amount pipelines history element $name',
({ name, icon, text, timeAgoTooltip, link, amount }) => {
diff --git a/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js b/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js
index e0e26434680..9c1ebf5a2eb 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/delete_modal_spec.js
@@ -63,6 +63,14 @@ describe('DeleteModal', () => {
expect(wrapper.emitted('confirm')).toHaveLength(1);
});
+ it('emits cancel when cancel event is emitted', () => {
+ expect(wrapper.emitted('cancel')).toBeUndefined();
+
+ findModal().vm.$emit('cancel');
+
+ expect(wrapper.emitted('cancel')).toHaveLength(1);
+ });
+
it('show calls gl-modal show', () => {
findModal().vm.show();
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
index c4020eeb75f..b2375da7b11 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/__snapshots__/pypi_installation_spec.js.snap
@@ -114,7 +114,7 @@ exports[`PypiInstallation renders all the messages 1`] = `
aria-live="polite"
class="btn input-group-text btn-default btn-md gl-button btn-default-secondary btn-icon"
data-clipboard-handle-tooltip="false"
- data-clipboard-text="pip install @gitlab-org/package-15 --extra-index-url http://__token__:<your_personal_token>@gdk.test:3000/api/v4/projects/1/packages/pypi/simple"
+ data-clipboard-text="pip install @gitlab-org/package-15 --index-url http://__token__:<your_personal_token>@gdk.test:3000/api/v4/projects/1/packages/pypi/simple"
id="clipboard-button-6"
title="Copy Pip command"
type="button"
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
index ec2e833552a..bb2fa9eb6f5 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_history_spec.js
@@ -131,14 +131,14 @@ describe('Package History', () => {
});
describe.each`
- name | amount | icon | text | timeAgoTooltip | link
- ${'created-on'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'clock'} | ${'@gitlab-org/package-15 version 1.0.0 was first created'} | ${packageData().createdAt} | ${null}
- ${'first-pipeline-commit'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'commit'} | ${'Created by commit #b83d6e39 on branch master'} | ${null} | ${onePipeline.commitPath}
- ${'first-pipeline-pipeline'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pipeline'} | ${'Built by pipeline #1 triggered by Administrator'} | ${onePipeline.createdAt} | ${onePipeline.path}
- ${'published'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'package'} | ${'Published to the baz project Package Registry'} | ${packageData().createdAt} | ${null}
- ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'history'} | ${'Package has 1 archived update'} | ${null} | ${null}
- ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 3} | ${'history'} | ${'Package has 2 archived updates'} | ${null} | ${null}
- ${'pipeline-entry'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pencil'} | ${'Package updated by commit #b83d6e39 on branch master, built by pipeline #3, and published to the registry'} | ${packageData().createdAt} | ${onePipeline.commitPath}
+ name | amount | icon | text | timeAgoTooltip | link
+ ${'created-on'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'clock'} | ${'@gitlab-org/package-15 version 1.0.0 was first created'} | ${packageData().createdAt} | ${null}
+ ${'first-pipeline-commit'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'commit'} | ${'Created by commit b83d6e39 on branch master'} | ${null} | ${onePipeline.commitPath}
+ ${'first-pipeline-pipeline'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pipeline'} | ${'Built by pipeline #1 triggered by Administrator'} | ${onePipeline.createdAt} | ${onePipeline.path}
+ ${'published'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'package'} | ${'Published to the baz project Package Registry'} | ${packageData().createdAt} | ${null}
+ ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'history'} | ${'Package has 1 archived update'} | ${null} | ${null}
+ ${'archived'} | ${HISTORY_PIPELINES_LIMIT + 3} | ${'history'} | ${'Package has 2 archived updates'} | ${null} | ${null}
+ ${'pipeline-entry'} | ${HISTORY_PIPELINES_LIMIT + 2} | ${'pencil'} | ${'Package updated by commit b83d6e39 on branch master, built by pipeline #3, and published to the registry'} | ${packageData().createdAt} | ${onePipeline.commitPath}
`(
'with $amount pipelines history element $name',
({ name, icon, text, timeAgoTooltip, link, amount }) => {
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js
index f0fa9592419..20a459e2c1a 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/package_versions_list_spec.js
@@ -1,7 +1,7 @@
-import { GlKeysetPagination } from '@gitlab/ui';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import PackageVersionsList from '~/packages_and_registries/package_registry/components/details/package_versions_list.vue';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
+import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import VersionRow from '~/packages_and_registries/package_registry/components/details/version_row.vue';
import { packageData } from '../../mock_data';
@@ -21,7 +21,7 @@ describe('PackageVersionsList', () => {
const uiElements = {
findLoader: () => wrapper.findComponent(PackagesListLoader),
- findListPagination: () => wrapper.findComponent(GlKeysetPagination),
+ findRegistryList: () => wrapper.findComponent(RegistryList),
findEmptySlot: () => wrapper.findComponent(EmptySlotStub),
findListRow: () => wrapper.findAllComponents(VersionRow),
};
@@ -33,6 +33,9 @@ describe('PackageVersionsList', () => {
isLoading: false,
...props,
},
+ stubs: {
+ RegistryList,
+ },
slots: {
'empty-state': EmptySlotStub,
},
@@ -55,8 +58,8 @@ describe('PackageVersionsList', () => {
expect(uiElements.findEmptySlot().exists()).toBe(false);
});
- it('does not display pagination', () => {
- expect(uiElements.findListPagination().exists()).toBe(false);
+ it('does not display registry list', () => {
+ expect(uiElements.findRegistryList().exists()).toBe(false);
});
});
@@ -77,8 +80,8 @@ describe('PackageVersionsList', () => {
expect(uiElements.findListRow().exists()).toBe(false);
});
- it('does not display pagination', () => {
- expect(uiElements.findListPagination().exists()).toBe(false);
+ it('does not display registry list', () => {
+ expect(uiElements.findRegistryList().exists()).toBe(false);
});
});
@@ -87,6 +90,19 @@ describe('PackageVersionsList', () => {
mountComponent();
});
+ it('displays package registry list', () => {
+ expect(uiElements.findRegistryList().exists()).toEqual(true);
+ });
+
+ it('binds the right props', () => {
+ expect(uiElements.findRegistryList().props()).toMatchObject({
+ items: packageList,
+ pagination: {},
+ isLoading: false,
+ hiddenDelete: true,
+ });
+ });
+
it('displays package version rows', () => {
expect(uiElements.findListRow().exists()).toEqual(true);
expect(uiElements.findListRow()).toHaveLength(packageList.length);
@@ -102,27 +118,6 @@ describe('PackageVersionsList', () => {
});
});
- describe('pagination display', () => {
- it('does not display pagination if there is no previous or next page', () => {
- expect(uiElements.findListPagination().exists()).toBe(false);
- });
-
- it('displays pagination if pageInfo.hasNextPage is true', async () => {
- await wrapper.setProps({ pageInfo: { hasNextPage: true } });
- expect(uiElements.findListPagination().exists()).toBe(true);
- });
-
- it('displays pagination if pageInfo.hasPreviousPage is true', async () => {
- await wrapper.setProps({ pageInfo: { hasPreviousPage: true } });
- expect(uiElements.findListPagination().exists()).toBe(true);
- });
-
- it('displays pagination if both pageInfo.hasNextPage and pageInfo.hasPreviousPage are true', async () => {
- await wrapper.setProps({ pageInfo: { hasNextPage: true, hasPreviousPage: true } });
- expect(uiElements.findListPagination().exists()).toBe(true);
- });
- });
-
it('does not display loader', () => {
expect(uiElements.findLoader().exists()).toBe(false);
});
@@ -137,14 +132,14 @@ describe('PackageVersionsList', () => {
mountComponent({ pageInfo: { hasNextPage: true } });
});
- it('emits prev-page event when paginator emits prev event', () => {
- uiElements.findListPagination().vm.$emit('prev');
+ it('emits prev-page event when registry list emits prev event', () => {
+ uiElements.findRegistryList().vm.$emit('prev-page');
expect(wrapper.emitted('prev-page')).toHaveLength(1);
});
- it('emits next-page when paginator emits next event', () => {
- uiElements.findListPagination().vm.$emit('next');
+ it('emits next-page when registry list emits next event', () => {
+ uiElements.findRegistryList().vm.$emit('next-page');
expect(wrapper.emitted('next-page')).toHaveLength(1);
});
diff --git a/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js b/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
index 20acb0872e5..4a27f8011df 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/details/pypi_installation_spec.js
@@ -16,7 +16,7 @@ const packageEntity = { ...packageData(), packageType: PACKAGE_TYPE_PYPI };
describe('PypiInstallation', () => {
let wrapper;
- const pipCommandStr = `pip install @gitlab-org/package-15 --extra-index-url ${packageEntity.pypiUrl}`;
+ const pipCommandStr = `pip install @gitlab-org/package-15 --index-url ${packageEntity.pypiUrl}`;
const pypiSetupStr = `[gitlab]
repository = ${packageEntity.pypiSetupUrl}
username = __token__
diff --git a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
index 7cc5bea0f7a..5e9cb8fbb0b 100644
--- a/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/components/list/packages_list_spec.js
@@ -1,14 +1,19 @@
import { GlAlert, GlSprintf } from '@gitlab/ui';
import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { stubComponent } from 'helpers/stub_component';
import PackagesListRow from '~/packages_and_registries/package_registry/components/list/package_list_row.vue';
import PackagesListLoader from '~/packages_and_registries/shared/components/packages_list_loader.vue';
import DeletePackageModal from '~/packages_and_registries/shared/components/delete_package_modal.vue';
+import DeleteModal from '~/packages_and_registries/package_registry/components/delete_modal.vue';
import RegistryList from '~/packages_and_registries/shared/components/registry_list.vue';
import {
DELETE_PACKAGE_TRACKING_ACTION,
+ DELETE_PACKAGES_TRACKING_ACTION,
REQUEST_DELETE_PACKAGE_TRACKING_ACTION,
+ REQUEST_DELETE_PACKAGES_TRACKING_ACTION,
CANCEL_DELETE_PACKAGE_TRACKING_ACTION,
+ CANCEL_DELETE_PACKAGES_TRACKING_ACTION,
} from '~/packages_and_registries/package_registry/constants';
import PackagesList from '~/packages_and_registries/package_registry/components/list/packages_list.vue';
import Tracking from '~/tracking';
@@ -44,6 +49,7 @@ describe('packages_list', () => {
const findRegistryList = () => wrapper.findComponent(RegistryList);
const findPackagesListRow = () => wrapper.findComponent(PackagesListRow);
const findErrorPackageAlert = () => wrapper.findComponent(GlAlert);
+ const findDeletePackagesModal = () => wrapper.findComponent(DeleteModal);
const mountComponent = (props) => {
wrapper = shallowMountExtended(PackagesList, {
@@ -53,6 +59,11 @@ describe('packages_list', () => {
},
stubs: {
DeletePackageModal,
+ DeleteModal: stubComponent(DeleteModal, {
+ methods: {
+ show: jest.fn(),
+ },
+ }),
GlSprintf,
RegistryList,
},
@@ -125,20 +136,48 @@ describe('packages_list', () => {
});
});
- describe('when the user can destroy the package', () => {
- beforeEach(async () => {
+ describe.each`
+ description | finderFunction | deletePayload
+ ${'when the user can destroy the package'} | ${findPackagesListRow} | ${firstPackage}
+ ${'when the user can bulk destroy packages and deletes only one package'} | ${findRegistryList} | ${[firstPackage]}
+ `('$description', ({ finderFunction, deletePayload }) => {
+ let eventSpy;
+ const category = 'UI::NpmPackages';
+
+ beforeEach(() => {
+ eventSpy = jest.spyOn(Tracking, 'event');
mountComponent();
- await findPackagesListRow().vm.$emit('delete', firstPackage);
+ finderFunction().vm.$emit('delete', deletePayload);
});
it('passes itemToBeDeleted to the modal', () => {
expect(findPackageListDeleteModal().props('itemToBeDeleted')).toStrictEqual(firstPackage);
});
- it('emits package:delete when modal confirms', async () => {
- await findPackageListDeleteModal().vm.$emit('ok');
+ it('requesting delete tracks the right action', () => {
+ expect(eventSpy).toHaveBeenCalledWith(
+ category,
+ REQUEST_DELETE_PACKAGE_TRACKING_ACTION,
+ expect.any(Object),
+ );
+ });
+
+ describe('when modal confirms', () => {
+ beforeEach(() => {
+ findPackageListDeleteModal().vm.$emit('ok');
+ });
+
+ it('emits package:delete when modal confirms', () => {
+ expect(wrapper.emitted('package:delete')[0]).toEqual([firstPackage]);
+ });
- expect(wrapper.emitted('package:delete')[0]).toEqual([firstPackage]);
+ it('tracks the right action', () => {
+ expect(eventSpy).toHaveBeenCalledWith(
+ category,
+ DELETE_PACKAGE_TRACKING_ACTION,
+ expect.any(Object),
+ );
+ });
});
it.each(['ok', 'cancel'])('resets itemToBeDeleted when modal emits %s', async (event) => {
@@ -146,26 +185,73 @@ describe('packages_list', () => {
expect(findPackageListDeleteModal().props('itemToBeDeleted')).toBeNull();
});
+
+ it('canceling delete tracks the right action', () => {
+ findPackageListDeleteModal().vm.$emit('cancel');
+
+ expect(eventSpy).toHaveBeenCalledWith(
+ category,
+ CANCEL_DELETE_PACKAGE_TRACKING_ACTION,
+ expect.any(Object),
+ );
+ });
});
describe('when the user can bulk destroy packages', () => {
+ let eventSpy;
+ const items = [firstPackage, secondPackage];
+
beforeEach(() => {
+ eventSpy = jest.spyOn(Tracking, 'event');
mountComponent();
+ findRegistryList().vm.$emit('delete', items);
});
- it('passes itemToBeDeleted to the modal when there is only one package', async () => {
- await findRegistryList().vm.$emit('delete', [firstPackage]);
-
- expect(findPackageListDeleteModal().props('itemToBeDeleted')).toStrictEqual(firstPackage);
+ it('passes itemsToBeDeleted to the modal', () => {
+ expect(findDeletePackagesModal().props('itemsToBeDeleted')).toStrictEqual(items);
expect(wrapper.emitted('delete')).toBeUndefined();
});
- it('emits delete when there is more than one package', () => {
- const items = [firstPackage, secondPackage];
- findRegistryList().vm.$emit('delete', items);
+ it('requesting delete tracks the right action', () => {
+ expect(eventSpy).toHaveBeenCalledWith(
+ undefined,
+ REQUEST_DELETE_PACKAGES_TRACKING_ACTION,
+ expect.any(Object),
+ );
+ });
+
+ describe('when modal confirms', () => {
+ beforeEach(() => {
+ findDeletePackagesModal().vm.$emit('confirm');
+ });
+
+ it('emits delete event', () => {
+ expect(wrapper.emitted('delete')[0]).toEqual([items]);
+ });
+
+ it('tracks the right action', () => {
+ expect(eventSpy).toHaveBeenCalledWith(
+ undefined,
+ DELETE_PACKAGES_TRACKING_ACTION,
+ expect.any(Object),
+ );
+ });
+ });
+
+ it.each(['confirm', 'cancel'])('resets itemsToBeDeleted when modal emits %s', async (event) => {
+ await findDeletePackagesModal().vm.$emit(event);
- expect(wrapper.emitted('delete')).toHaveLength(1);
- expect(wrapper.emitted('delete')[0]).toEqual([items]);
+ expect(findDeletePackagesModal().props('itemsToBeDeleted')).toHaveLength(0);
+ });
+
+ it('canceling delete tracks the right action', () => {
+ findDeletePackagesModal().vm.$emit('cancel');
+
+ expect(eventSpy).toHaveBeenCalledWith(
+ undefined,
+ CANCEL_DELETE_PACKAGES_TRACKING_ACTION,
+ expect.any(Object),
+ );
});
});
@@ -223,44 +309,4 @@ describe('packages_list', () => {
expect(wrapper.emitted('next-page')).toHaveLength(1);
});
});
-
- describe('tracking', () => {
- let eventSpy;
- const category = 'UI::NpmPackages';
-
- beforeEach(() => {
- eventSpy = jest.spyOn(Tracking, 'event');
- mountComponent();
- findPackagesListRow().vm.$emit('delete', firstPackage);
- return nextTick();
- });
-
- it('requesting the delete tracks the right action', () => {
- expect(eventSpy).toHaveBeenCalledWith(
- category,
- REQUEST_DELETE_PACKAGE_TRACKING_ACTION,
- expect.any(Object),
- );
- });
-
- it('confirming delete tracks the right action', () => {
- findPackageListDeleteModal().vm.$emit('ok');
-
- expect(eventSpy).toHaveBeenCalledWith(
- category,
- DELETE_PACKAGE_TRACKING_ACTION,
- expect.any(Object),
- );
- });
-
- it('canceling delete tracks the right action', () => {
- findPackageListDeleteModal().vm.$emit('cancel');
-
- expect(eventSpy).toHaveBeenCalledWith(
- category,
- CANCEL_DELETE_PACKAGE_TRACKING_ACTION,
- expect.any(Object),
- );
- });
- });
});
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap b/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap
deleted file mode 100644
index c2fecf87428..00000000000
--- a/spec/frontend/packages_and_registries/package_registry/pages/__snapshots__/list_spec.js.snap
+++ /dev/null
@@ -1,125 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`PackagesListApp renders 1`] = `
-<div>
- <!---->
-
- <gl-card-stub
- bodyclass="gl-display-flex gl-p-0!"
- class="gl-px-8 gl-py-6 gl-line-height-20 gl-mt-3"
- footerclass=""
- headerclass=""
- >
- <!---->
-
- <div
- class="gl-banner-content"
- >
- <h2
- class="gl-banner-title"
- >
- Help us learn about your registry migration needs
- </h2>
-
- <p>
- If you are interested in migrating packages from your private registry to the GitLab Package Registry, take our survey and tell us more about your needs.
- </p>
-
- <gl-button-stub
- buttontextclasses=""
- category="primary"
- data-testid="gl-banner-primary-button"
- href="https://gitlab.fra1.qualtrics.com/jfe/form/SV_cHomH9FPzOaiDTU"
- icon=""
- size="medium"
- variant="confirm"
- >
- Take survey
- </gl-button-stub>
-
- </div>
-
- <gl-button-stub
- aria-label="Close banner"
- buttontextclasses=""
- category="tertiary"
- class="gl-banner-close"
- icon="close"
- size="small"
- variant="default"
- />
- </gl-card-stub>
-
- <package-title-stub
- count="2"
- helpurl="/help/user/packages/index"
- />
-
- <package-search-stub
- class="gl-mb-5"
- />
-
- <div>
- <section
- class="gl-display-flex empty-state gl-text-center gl-flex-direction-column"
- >
- <div
- class="gl-max-w-full"
- >
- <div
- class="svg-250 svg-content"
- >
- <img
- alt=""
- class="gl-max-w-full gl-dark-invert-keep-hue"
- role="img"
- src="emptyListIllustration"
- />
- </div>
- </div>
-
- <div
- class="gl-max-w-full gl-m-auto"
- >
- <div
- class="gl-mx-auto gl-my-0 gl-p-5"
- >
- <h1
- class="gl-font-size-h-display gl-line-height-36 h4"
- >
-
- There are no packages yet
-
- </h1>
-
- <p
- class="gl-mt-3"
- >
- Learn how to
- <b-link-stub
- class="gl-link"
- event="click"
- href="/help/user/packages/package_registry/index"
- routertag="a"
- target="_blank"
- >
- publish and share your packages
- </b-link-stub>
- with GitLab.
- </p>
-
- <div
- class="gl-display-flex gl-flex-wrap gl-justify-content-center"
- >
- <!---->
-
- <!---->
- </div>
- </div>
- </div>
- </section>
- </div>
-
- <div />
-</div>
-`;
diff --git a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
index abdb875e839..b3cbd9f5dcf 100644
--- a/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
+++ b/spec/frontend/packages_and_registries/package_registry/pages/list_spec.js
@@ -1,23 +1,18 @@
-import { GlAlert, GlBanner, GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
+import { GlAlert, GlEmptyState, GlSprintf, GlLink } from '@gitlab/ui';
import Vue, { nextTick } from 'vue';
-
import VueApollo from 'vue-apollo';
-import * as utils from '~/lib/utils/common_utils';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import { stubComponent } from 'helpers/stub_component';
import ListPage from '~/packages_and_registries/package_registry/pages/list.vue';
import PackageTitle from '~/packages_and_registries/package_registry/components/list/package_title.vue';
import PackageSearch from '~/packages_and_registries/package_registry/components/list/package_search.vue';
import OriginalPackageList from '~/packages_and_registries/package_registry/components/list/packages_list.vue';
import DeletePackage from '~/packages_and_registries/package_registry/components/functional/delete_package.vue';
-import DeleteModal from '~/packages_and_registries/package_registry/components/delete_modal.vue';
import {
PROJECT_RESOURCE_TYPE,
GROUP_RESOURCE_TYPE,
GRAPHQL_PAGE_SIZE,
- HIDE_PACKAGE_MIGRATION_SURVEY_COOKIE,
EMPTY_LIST_HELP_URL,
PACKAGE_HELP_URL,
DELETE_PACKAGES_ERROR_MESSAGE,
@@ -59,13 +54,11 @@ describe('PackagesListApp', () => {
};
const findAlert = () => wrapper.findComponent(GlAlert);
- const findBanner = () => wrapper.findComponent(GlBanner);
const findPackageTitle = () => wrapper.findComponent(PackageTitle);
const findSearch = () => wrapper.findComponent(PackageSearch);
const findListComponent = () => wrapper.findComponent(PackageList);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
const findDeletePackage = () => wrapper.findComponent(DeletePackage);
- const findDeletePackagesModal = () => wrapper.findComponent(DeleteModal);
const mountComponent = ({
resolver = jest.fn().mockResolvedValue(packagesListQuery()),
@@ -84,18 +77,12 @@ describe('PackagesListApp', () => {
apolloProvider,
provide,
stubs: {
- GlBanner,
GlEmptyState,
GlLoadingIcon,
GlSprintf,
GlLink,
PackageList,
DeletePackage,
- DeleteModal: stubComponent(DeleteModal, {
- methods: {
- show: jest.fn(),
- },
- }),
},
});
};
@@ -118,14 +105,6 @@ describe('PackagesListApp', () => {
expect(resolver).not.toHaveBeenCalled();
});
- it('renders', async () => {
- mountComponent();
-
- await waitForFirstRequest();
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
it('has a package title', async () => {
mountComponent();
@@ -138,70 +117,6 @@ describe('PackagesListApp', () => {
});
});
- describe('package migration survey banner', () => {
- describe('with no cookie set', () => {
- beforeEach(() => {
- utils.setCookie = jest.fn();
-
- mountComponent();
- });
-
- it('displays the banner', () => {
- expect(findBanner().exists()).toBe(true);
- });
-
- it('does not call setCookie', () => {
- expect(utils.setCookie).not.toHaveBeenCalled();
- });
-
- describe('when the close button is clicked', () => {
- beforeEach(() => {
- findBanner().vm.$emit('close');
- });
-
- it('sets the dismissed cookie', () => {
- expect(utils.setCookie).toHaveBeenCalledWith(
- HIDE_PACKAGE_MIGRATION_SURVEY_COOKIE,
- 'true',
- );
- });
-
- it('does not display the banner', () => {
- expect(findBanner().exists()).toBe(false);
- });
- });
-
- describe('when the primary button is clicked', () => {
- beforeEach(() => {
- findBanner().vm.$emit('primary');
- });
-
- it('sets the dismissed cookie', () => {
- expect(utils.setCookie).toHaveBeenCalledWith(
- HIDE_PACKAGE_MIGRATION_SURVEY_COOKIE,
- 'true',
- );
- });
-
- it('does not display the banner', () => {
- expect(findBanner().exists()).toBe(false);
- });
- });
- });
-
- describe('with the dismissed cookie set', () => {
- beforeEach(() => {
- jest.spyOn(utils, 'getCookie').mockReturnValue('true');
-
- mountComponent();
- });
-
- it('does not display the banner', () => {
- expect(findBanner().exists()).toBe(false);
- });
- });
- });
-
describe('search component', () => {
it('exists', () => {
mountComponent();
@@ -372,18 +287,6 @@ describe('PackagesListApp', () => {
describe('bulk delete package', () => {
const items = [{ id: '1' }, { id: '2' }];
- it('deletePackage is bound to package-list package:delete event', async () => {
- mountComponent();
-
- await waitForFirstRequest();
-
- findListComponent().vm.$emit('delete', [{ id: '1' }, { id: '2' }]);
-
- await waitForPromises();
-
- expect(findDeletePackagesModal().props('itemsToBeDeleted')).toEqual(items);
- });
-
it('calls mutation with the right values and shows success alert', async () => {
const mutationResolver = jest.fn().mockResolvedValue(packagesDestroyMutation());
mountComponent({
@@ -394,8 +297,6 @@ describe('PackagesListApp', () => {
findListComponent().vm.$emit('delete', items);
- findDeletePackagesModal().vm.$emit('confirm');
-
expect(mutationResolver).toHaveBeenCalledWith({
ids: items.map((item) => item.id),
});
@@ -417,8 +318,6 @@ describe('PackagesListApp', () => {
findListComponent().vm.$emit('delete', items);
- findDeletePackagesModal().vm.$emit('confirm');
-
await waitForPromises();
expect(findAlert().exists()).toBe(true);
diff --git a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
index 1790a9c9bf5..1a157beebe4 100644
--- a/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
+++ b/spec/frontend/pages/import/bulk_imports/history/components/bulk_imports_history_app_spec.js
@@ -1,4 +1,4 @@
-import { GlEmptyState, GlLoadingIcon, GlTable } from '@gitlab/ui';
+import { GlEmptyState, GlLoadingIcon, GlTableLite } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
@@ -23,7 +23,9 @@ describe('BulkImportsHistoryApp', () => {
id: 1,
bulk_import_id: 1,
status: 'finished',
+ entity_type: 'group',
source_full_path: 'top-level-group-12',
+ destination_full_path: 'h5bp/top-level-group-12',
destination_name: 'top-level-group-12',
destination_namespace: 'h5bp',
created_at: '2021-07-08T10:03:44.743Z',
@@ -33,8 +35,10 @@ describe('BulkImportsHistoryApp', () => {
id: 2,
bulk_import_id: 2,
status: 'failed',
+ entity_type: 'project',
source_full_path: 'autodevops-demo',
destination_name: 'autodevops-demo',
+ destination_full_path: 'some-group/autodevops-demo',
destination_namespace: 'flightjs',
parent_id: null,
namespace_id: null,
@@ -74,6 +78,7 @@ describe('BulkImportsHistoryApp', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
+ mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
});
afterEach(() => {
@@ -97,11 +102,10 @@ describe('BulkImportsHistoryApp', () => {
});
it('renders table with data when history is available', async () => {
- mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
createComponent();
await axios.waitForAll();
- const table = wrapper.findComponent(GlTable);
+ const table = wrapper.findComponent(GlTableLite);
expect(table.exists()).toBe(true);
// can't use .props() or .attributes() here
expect(table.vm.$attrs.items).toHaveLength(DUMMY_RESPONSE.length);
@@ -110,7 +114,6 @@ describe('BulkImportsHistoryApp', () => {
it('changes page when requested by pagination bar', async () => {
const NEW_PAGE = 4;
- mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
createComponent();
await axios.waitForAll();
mock.resetHistory();
@@ -126,7 +129,6 @@ describe('BulkImportsHistoryApp', () => {
it('changes page size when requested by pagination bar', async () => {
const NEW_PAGE_SIZE = 4;
- mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
createComponent();
await axios.waitForAll();
mock.resetHistory();
@@ -143,7 +145,6 @@ describe('BulkImportsHistoryApp', () => {
it('sets up the local storage sync correctly', async () => {
const NEW_PAGE_SIZE = 4;
- mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
createComponent();
await axios.waitForAll();
mock.resetHistory();
@@ -155,12 +156,37 @@ describe('BulkImportsHistoryApp', () => {
});
it('renders correct url for destination group when relative_url is empty', async () => {
- mock.onGet(API_URL).reply(200, DUMMY_RESPONSE, DEFAULT_HEADERS);
createComponent({ shallow: false });
await axios.waitForAll();
expect(wrapper.find('tbody tr a').attributes().href).toBe(
- `/${DUMMY_RESPONSE[0].destination_namespace}/${DUMMY_RESPONSE[0].destination_name}`,
+ `/${DUMMY_RESPONSE[0].destination_full_path}`,
+ );
+ });
+
+ it('renders loading icon when destination namespace is not defined', async () => {
+ const RESPONSE = [{ ...DUMMY_RESPONSE[0], destination_full_path: null }];
+
+ mock.onGet(API_URL).reply(200, RESPONSE, DEFAULT_HEADERS);
+ createComponent({ shallow: false });
+ await axios.waitForAll();
+
+ expect(wrapper.find('tbody tr').findComponent(GlLoadingIcon).exists()).toBe(true);
+ });
+
+ it('adds slash to group urls', async () => {
+ createComponent({ shallow: false });
+ await axios.waitForAll();
+
+ expect(wrapper.find('tbody tr a').text()).toBe(`${DUMMY_RESPONSE[0].destination_full_path}/`);
+ });
+
+ it('does not prefixes project urls with slash', async () => {
+ createComponent({ shallow: false });
+ await axios.waitForAll();
+
+ expect(wrapper.findAll('tbody tr a').at(1).text()).toBe(
+ DUMMY_RESPONSE[1].destination_full_path,
);
});
diff --git a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
index 9718d847ed5..aee56247209 100644
--- a/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/fork_form_spec.js
@@ -33,6 +33,7 @@ describe('ForkForm component', () => {
const DEFAULT_PROVIDE = {
newGroupPath: 'some/groups/path',
visibilityHelpPath: 'some/visibility/help/path',
+ cancelPath: '/some/project-full-path',
projectFullPath: '/some/project-full-path',
projectId: '10',
projectName: 'Project Name',
@@ -124,13 +125,13 @@ describe('ForkForm component', () => {
const findVisibilityRadioGroup = () =>
wrapper.find('[data-testid="fork-visibility-radio-group"]');
- it('will go to projectFullPath when click cancel button', () => {
+ it('will go to cancelPath when click cancel button', () => {
createComponent();
- const { projectFullPath } = DEFAULT_PROVIDE;
+ const { cancelPath } = DEFAULT_PROVIDE;
const cancelButton = wrapper.find('[data-testid="cancel-button"]');
- expect(cancelButton.attributes('href')).toBe(projectFullPath);
+ expect(cancelButton.attributes('href')).toBe(cancelPath);
});
const selectedMockNamespace = {
@@ -463,16 +464,12 @@ describe('ForkForm component', () => {
expect(urlUtility.redirectTo).not.toHaveBeenCalled();
});
- it('does not make POST request if no visbility is checked', async () => {
+ it('does not make POST request if no visibility is checked', async () => {
jest.spyOn(axios, 'post');
- setupComponent({
- fields: {
- visibility: {
- value: null,
- },
- },
- });
+ setupComponent();
+ wrapper.vm.form.fields.visibility.value = null;
+ await nextTick();
await submitForm();
diff --git a/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js b/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
index f6d3957115f..82f451ed6ef 100644
--- a/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
+++ b/spec/frontend/pages/projects/forks/new/components/project_namespace_spec.js
@@ -1,11 +1,4 @@
-import {
- GlButton,
- GlDropdown,
- GlDropdownItem,
- GlDropdownSectionHeader,
- GlSearchBoxByType,
- GlTruncate,
-} from '@gitlab/ui';
+import { GlButton, GlListboxItem, GlCollapsibleListbox } from '@gitlab/ui';
import { mount, shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
@@ -80,17 +73,16 @@ describe('ProjectNamespace component', () => {
};
const findButtonLabel = () => wrapper.findComponent(GlButton);
- const findDropdown = () => wrapper.findComponent(GlDropdown);
- const findDropdownText = () => wrapper.findComponent(GlTruncate);
- const findInput = () => wrapper.findComponent(GlSearchBoxByType);
+ const findListBox = () => wrapper.findComponent(GlCollapsibleListbox);
+ const findListBoxText = () => findListBox().props('toggleText');
- const clickDropdownItem = async () => {
- wrapper.findComponent(GlDropdownItem).vm.$emit('click');
+ const clickListBoxItem = async (value = '') => {
+ wrapper.findComponent(GlListboxItem).vm.$emit('select', value);
await nextTick();
};
const showDropdown = () => {
- findDropdown().vm.$emit('shown');
+ findListBox().vm.$emit('shown');
};
beforeAll(() => {
@@ -115,7 +107,7 @@ describe('ProjectNamespace component', () => {
});
it('renders placeholder text', () => {
- expect(findDropdownText().props('text')).toBe('Select a namespace');
+ expect(findListBoxText()).toBe('Select a namespace');
});
});
@@ -127,24 +119,18 @@ describe('ProjectNamespace component', () => {
showDropdown();
});
- it('focuses on the input when the dropdown is opened', () => {
- const spy = jest.spyOn(findInput().vm, 'focusInput');
- showDropdown();
- expect(spy).toHaveBeenCalledTimes(1);
- });
-
it('displays fetched namespaces', () => {
const listItems = wrapper.findAll('li');
- expect(listItems).toHaveLength(3);
- expect(listItems.at(0).findComponent(GlDropdownSectionHeader).text()).toBe('Namespaces');
- expect(listItems.at(1).text()).toBe(data.project.forkTargets.nodes[0].fullPath);
- expect(listItems.at(2).text()).toBe(data.project.forkTargets.nodes[1].fullPath);
+ expect(listItems).toHaveLength(2);
+ expect(listItems.at(0).text()).toBe(data.project.forkTargets.nodes[0].fullPath);
+ expect(listItems.at(1).text()).toBe(data.project.forkTargets.nodes[1].fullPath);
});
it('sets the selected namespace', async () => {
const { fullPath } = data.project.forkTargets.nodes[0];
- await clickDropdownItem();
- expect(findDropdownText().props('text')).toBe(fullPath);
+ await clickListBoxItem(fullPath);
+
+ expect(findListBoxText()).toBe(fullPath);
});
});
@@ -155,7 +141,7 @@ describe('ProjectNamespace component', () => {
});
it('renders `No matches found`', () => {
- expect(wrapper.find('li').text()).toBe('No matches found');
+ expect(findListBox().text()).toContain('No matches found');
});
});
diff --git a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
index e7c7ec0d336..d67f842d011 100644
--- a/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
+++ b/spec/frontend/pages/projects/graphs/__snapshots__/code_coverage_spec.js.snap
@@ -45,6 +45,7 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
toggletext="rspec"
variant="default"
>
+
<!---->
<!---->
@@ -57,22 +58,31 @@ exports[`Code Coverage when fetching data is successful matches the snapshot 1`]
tabindex="-1"
>
<gl-listbox-item-stub
+ data-testid="listbox-item-0"
isselected="true"
>
rspec
</gl-listbox-item-stub>
- <gl-listbox-item-stub>
+ <gl-listbox-item-stub
+ data-testid="listbox-item-1"
+ >
cypress
</gl-listbox-item-stub>
- <gl-listbox-item-stub>
+ <gl-listbox-item-stub
+ data-testid="listbox-item-2"
+ >
karma
</gl-listbox-item-stub>
+
+ <!---->
+
+ <!---->
</ul>
<!---->
diff --git a/spec/frontend/pages/projects/graphs/code_coverage_spec.js b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
index e99734963e3..2ff45266a07 100644
--- a/spec/frontend/pages/projects/graphs/code_coverage_spec.js
+++ b/spec/frontend/pages/projects/graphs/code_coverage_spec.js
@@ -6,7 +6,7 @@ import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_BAD_REQUEST, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import CodeCoverage from '~/pages/projects/graphs/components/code_coverage.vue';
import { codeCoverageMockData, sortedDataByDates } from './mock_data';
@@ -49,7 +49,7 @@ describe('Code Coverage', () => {
describe('when fetching data is successful', () => {
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- mockAxios.onGet().replyOnce(httpStatusCodes.OK, codeCoverageMockData);
+ mockAxios.onGet().replyOnce(HTTP_STATUS_OK, codeCoverageMockData);
createComponent();
@@ -84,7 +84,7 @@ describe('Code Coverage', () => {
describe('when fetching data fails', () => {
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- mockAxios.onGet().replyOnce(httpStatusCodes.BAD_REQUEST);
+ mockAxios.onGet().replyOnce(HTTP_STATUS_BAD_REQUEST);
createComponent();
@@ -108,7 +108,7 @@ describe('Code Coverage', () => {
describe('when fetching data succeed but returns an empty state', () => {
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- mockAxios.onGet().replyOnce(httpStatusCodes.OK, []);
+ mockAxios.onGet().replyOnce(HTTP_STATUS_OK, []);
createComponent();
@@ -136,7 +136,7 @@ describe('Code Coverage', () => {
describe('dropdown options', () => {
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- mockAxios.onGet().replyOnce(httpStatusCodes.OK, codeCoverageMockData);
+ mockAxios.onGet().replyOnce(HTTP_STATUS_OK, codeCoverageMockData);
createComponent();
@@ -153,7 +153,7 @@ describe('Code Coverage', () => {
describe('interactions', () => {
beforeEach(() => {
mockAxios = new MockAdapter(axios);
- mockAxios.onGet().replyOnce(httpStatusCodes.OK, codeCoverageMockData);
+ mockAxios.onGet().replyOnce(HTTP_STATUS_OK, codeCoverageMockData);
createComponent();
diff --git a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
index 897cbf5eaa4..29335308370 100644
--- a/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
+++ b/spec/frontend/pages/projects/learn_gitlab/components/learn_gitlab_section_link_spec.js
@@ -85,6 +85,9 @@ describe('Learn GitLab Section Link', () => {
it('renders a popover trigger with question icon', () => {
expect(findPopoverTrigger().exists()).toBe(true);
expect(findPopoverTrigger().props('icon')).toBe('question-o');
+ expect(findPopoverTrigger().attributes('aria-label')).toBe(
+ LearnGitlabSectionLink.i18n.contactAdmin,
+ );
});
it('renders a popover', () => {
@@ -95,6 +98,15 @@ describe('Learn GitLab Section Link', () => {
});
});
+ it('renders default disabled message', () => {
+ expect(findPopover().text()).toContain(LearnGitlabSectionLink.i18n.contactAdmin);
+ });
+
+ it('renders custom disabled message if provided', () => {
+ createWrapper('trialStarted', { enabled: false, message: 'Custom message' });
+ expect(findPopover().text()).toContain('Custom message');
+ });
+
it('renders a link inside the popover', () => {
expect(findPopoverLink().exists()).toBe(true);
expect(findPopoverLink().attributes('href')).toBe(defaultProps.url);
diff --git a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
index 99df5b58d90..2d3b9afa8f6 100644
--- a/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
+++ b/spec/frontend/pages/projects/pipeline_schedules/shared/components/interval_pattern_input_spec.js
@@ -138,7 +138,7 @@ describe('Interval Pattern Input Component', () => {
'Every day (at 4:00am)',
'Every week (Monday at 4:00am)',
'Every month (Day 1 at 4:00am)',
- 'Custom ( Cron syntax )',
+ 'Custom ( Learn more. )',
]);
});
});
diff --git a/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js b/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js
index 7c9aae13d25..c8e9a31b526 100644
--- a/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js
+++ b/spec/frontend/pages/shared/wikis/components/wiki_content_spec.js
@@ -5,7 +5,7 @@ import MockAdapter from 'axios-mock-adapter';
import WikiContent from '~/pages/shared/wikis/components/wiki_content.vue';
import { renderGFM } from '~/behaviors/markdown/render_gfm';
import axios from '~/lib/utils/axios_utils';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import waitForPromises from 'helpers/wait_for_promises';
import { handleLocationHash } from '~/lib/utils/common_utils';
@@ -59,7 +59,7 @@ describe('pages/shared/wikis/components/wiki_content', () => {
const content = 'content';
beforeEach(() => {
- mock.onGet(PATH, { params: { render_html: true } }).replyOnce(httpStatus.OK, { content });
+ mock.onGet(PATH, { params: { render_html: true } }).replyOnce(HTTP_STATUS_OK, { content });
buildWrapper();
return waitForPromises();
});
@@ -88,7 +88,7 @@ describe('pages/shared/wikis/components/wiki_content', () => {
describe('when loading content fails', () => {
beforeEach(() => {
- mock.onGet(PATH).replyOnce(httpStatus.INTERNAL_SERVER_ERROR, '');
+ mock.onGet(PATH).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, '');
buildWrapper();
return waitForPromises();
});
diff --git a/spec/frontend/pipeline_new/utils/format_refs_spec.js b/spec/frontend/pipeline_new/utils/format_refs_spec.js
deleted file mode 100644
index 71190f55c16..00000000000
--- a/spec/frontend/pipeline_new/utils/format_refs_spec.js
+++ /dev/null
@@ -1,21 +0,0 @@
-import { BRANCH_REF_TYPE, TAG_REF_TYPE } from '~/pipeline_new/constants';
-import formatRefs from '~/pipeline_new/utils/format_refs';
-import { mockBranchRefs, mockTagRefs } from '../mock_data';
-
-describe('Format refs util', () => {
- it('formats branch ref correctly', () => {
- expect(formatRefs(mockBranchRefs, BRANCH_REF_TYPE)).toEqual([
- { fullName: 'refs/heads/main', shortName: 'main' },
- { fullName: 'refs/heads/dev', shortName: 'dev' },
- { fullName: 'refs/heads/release', shortName: 'release' },
- ]);
- });
-
- it('formats tag ref correctly', () => {
- expect(formatRefs(mockTagRefs, TAG_REF_TYPE)).toEqual([
- { fullName: 'refs/tags/1.0.0', shortName: '1.0.0' },
- { fullName: 'refs/tags/1.1.0', shortName: '1.1.0' },
- { fullName: 'refs/tags/1.2.0', shortName: '1.2.0' },
- ]);
- });
-});
diff --git a/spec/frontend/pipeline_wizard/components/wrapper_spec.js b/spec/frontend/pipeline_wizard/components/wrapper_spec.js
index d5b78cebcb3..33c6394eb41 100644
--- a/spec/frontend/pipeline_wizard/components/wrapper_spec.js
+++ b/spec/frontend/pipeline_wizard/components/wrapper_spec.js
@@ -364,6 +364,7 @@ describe('Pipeline Wizard - wrapper.vue', () => {
extra: {
fromStep: 0,
toStep: 1,
+ features: expect.any(Object),
},
});
});
@@ -386,6 +387,7 @@ describe('Pipeline Wizard - wrapper.vue', () => {
extra: {
fromStep: 1,
toStep: 0,
+ features: expect.any(Object),
},
});
});
@@ -409,6 +411,7 @@ describe('Pipeline Wizard - wrapper.vue', () => {
extra: {
fromStep: 2,
toStep: 1,
+ features: expect.any(Object),
},
});
});
@@ -429,6 +432,9 @@ describe('Pipeline Wizard - wrapper.vue', () => {
category: trackingCategory,
label: 'pipeline_wizard_commit',
property: 'commit',
+ extra: {
+ features: expect.any(Object),
+ },
});
});
@@ -443,6 +449,7 @@ describe('Pipeline Wizard - wrapper.vue', () => {
label: 'pipeline_wizard_editor_interaction',
extra: {
currentStep: 0,
+ features: expect.any(Object),
},
});
});
diff --git a/spec/frontend/pipelines/pipelines_spec.js b/spec/frontend/pipelines/pipelines_spec.js
index a3f15e25f36..351572fc83a 100644
--- a/spec/frontend/pipelines/pipelines_spec.js
+++ b/spec/frontend/pipelines/pipelines_spec.js
@@ -71,7 +71,7 @@ describe('Pipelines', () => {
const findTablePagination = () => wrapper.findComponent(TablePagination);
const findTab = (tab) => wrapper.findByTestId(`pipelines-tab-${tab}`);
- const findPipelineKeyDropdown = () => wrapper.findByTestId('pipeline-key-dropdown');
+ const findPipelineKeyCollapsibleBox = () => wrapper.findByTestId('pipeline-key-collapsible-box');
const findRunPipelineButton = () => wrapper.findByTestId('run-pipeline-button');
const findCiLintButton = () => wrapper.findByTestId('ci-lint-button');
const findCleanCacheButton = () => wrapper.findByTestId('clear-cache-button');
@@ -545,8 +545,8 @@ describe('Pipelines', () => {
expect(findFilteredSearch().exists()).toBe(true);
});
- it('renders the pipeline key dropdown', () => {
- expect(findPipelineKeyDropdown().exists()).toBe(true);
+ it('renders the pipeline key collapsible box', () => {
+ expect(findPipelineKeyCollapsibleBox().exists()).toBe(true);
});
it('renders tab empty state finished scope', async () => {
@@ -578,7 +578,7 @@ describe('Pipelines', () => {
});
it('does not render the pipeline key dropdown', () => {
- expect(findPipelineKeyDropdown().exists()).toBe(false);
+ expect(findPipelineKeyCollapsibleBox().exists()).toBe(false);
});
it('does not render tabs nor buttons', () => {
diff --git a/spec/frontend/pipelines/pipelines_table_spec.js b/spec/frontend/pipelines/pipelines_table_spec.js
index 740037a5ac8..9359bd9b95f 100644
--- a/spec/frontend/pipelines/pipelines_table_spec.js
+++ b/spec/frontend/pipelines/pipelines_table_spec.js
@@ -17,7 +17,7 @@ import {
TRACKING_CATEGORIES,
} from '~/pipelines/constants';
-import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
jest.mock('~/pipelines/event_hub');
@@ -50,7 +50,7 @@ describe('Pipelines Table', () => {
};
const findGlTableLite = () => wrapper.findComponent(GlTableLite);
- const findStatusBadge = () => wrapper.findComponent(CiBadge);
+ const findCiBadgeLink = () => wrapper.findComponent(CiBadgeLink);
const findPipelineInfo = () => wrapper.findComponent(PipelineUrl);
const findTriggerer = () => wrapper.findComponent(PipelineTriggerer);
const findPipelineMiniGraph = () => wrapper.findComponent(PipelineMiniGraph);
@@ -97,7 +97,7 @@ describe('Pipelines Table', () => {
describe('status cell', () => {
it('should render a status badge', () => {
- expect(findStatusBadge().exists()).toBe(true);
+ expect(findCiBadgeLink().exists()).toBe(true);
});
});
@@ -171,7 +171,7 @@ describe('Pipelines Table', () => {
});
it('tracks status badge click', () => {
- findStatusBadge().vm.$emit('ciStatusBadgeClick');
+ findCiBadgeLink().vm.$emit('ciStatusBadgeClick');
expect(trackingSpy).toHaveBeenCalledWith(undefined, 'click_ci_status_badge', {
label: TRACKING_CATEGORIES.table,
diff --git a/spec/frontend/projects/commit/components/branches_dropdown_spec.js b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
index a84dd246f5d..7334e007e18 100644
--- a/spec/frontend/projects/commit/components/branches_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/branches_dropdown_spec.js
@@ -1,9 +1,8 @@
-import { GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
import Vuex from 'vuex';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import { DEFAULT_DEBOUNCE_AND_THROTTLE_MS } from '~/lib/utils/constants';
import BranchesDropdown from '~/projects/commit/components/branches_dropdown.vue';
Vue.use(Vuex);
@@ -34,12 +33,7 @@ describe('BranchesDropdown', () => {
}),
);
};
-
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
- const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
- const findNoResults = () => wrapper.findByTestId('empty-result-message');
- const findLoading = () => wrapper.findByTestId('dropdown-text-loading-icon');
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
afterEach(() => {
wrapper.destroy();
@@ -55,72 +49,6 @@ describe('BranchesDropdown', () => {
it('invokes fetchBranches', () => {
expect(spyFetchBranches).toHaveBeenCalled();
});
-
- describe('with a value but visually blanked', () => {
- beforeEach(() => {
- createComponent({ value: '_main_', blanked: true }, { branch: '_main_' });
- });
-
- it('renders all branches', () => {
- expect(findAllDropdownItems()).toHaveLength(3);
- expect(findDropdownItemByIndex(0).text()).toBe('_main_');
- expect(findDropdownItemByIndex(1).text()).toBe('_branch_1_');
- expect(findDropdownItemByIndex(2).text()).toBe('_branch_2_');
- });
-
- it('selects the active branch', () => {
- expect(wrapper.vm.isSelected('_main_')).toBe(true);
- });
- });
- });
-
- describe('Loading states', () => {
- it('shows loading icon while fetching', () => {
- createComponent({ value: '' }, { isFetching: true });
-
- expect(findLoading().isVisible()).toBe(true);
- });
-
- it('does not show loading icon', () => {
- createComponent({ value: '' });
-
- expect(findLoading().isVisible()).toBe(false);
- });
- });
-
- describe('No branches found', () => {
- beforeEach(() => {
- createComponent({ value: '_non_existent_branch_' });
- });
-
- it('renders empty results message', () => {
- expect(findNoResults().text()).toBe('No matching results');
- });
-
- it('shows GlSearchBoxByType with default attributes', () => {
- expect(findSearchBoxByType().exists()).toBe(true);
- expect(findSearchBoxByType().vm.$attrs).toMatchObject({
- placeholder: 'Search branches',
- debounce: DEFAULT_DEBOUNCE_AND_THROTTLE_MS,
- });
- });
- });
-
- describe('Search term is empty', () => {
- beforeEach(() => {
- createComponent({ value: '' });
- });
-
- it('renders all branches when search term is empty', () => {
- expect(findAllDropdownItems()).toHaveLength(3);
- expect(findDropdownItemByIndex(0).text()).toBe('_main_');
- expect(findDropdownItemByIndex(1).text()).toBe('_branch_1_');
- expect(findDropdownItemByIndex(2).text()).toBe('_branch_2_');
- });
-
- it('should not be selected on the inactive branch', () => {
- expect(wrapper.vm.isSelected('_main_')).toBe(false);
- });
});
describe('When searching', () => {
@@ -131,7 +59,7 @@ describe('BranchesDropdown', () => {
it('invokes fetchBranches', async () => {
const spy = jest.spyOn(wrapper.vm, 'fetchBranches');
- findSearchBoxByType().vm.$emit('input', '_anything_');
+ findDropdown().vm.$emit('search', '_anything_');
await nextTick();
@@ -140,46 +68,13 @@ describe('BranchesDropdown', () => {
});
});
- describe('Branches found', () => {
- beforeEach(() => {
- createComponent({ value: '_branch_1_' }, { branch: '_branch_1_' });
- });
-
- it('renders only the branch searched for', () => {
- expect(findAllDropdownItems()).toHaveLength(1);
- expect(findDropdownItemByIndex(0).text()).toBe('_branch_1_');
- });
-
- it('should not display empty results message', () => {
- expect(findNoResults().exists()).toBe(false);
- });
-
- it('should signify this branch is selected', () => {
- expect(wrapper.vm.isSelected('_branch_1_')).toBe(true);
- });
-
- it('should signify the branch is not selected', () => {
- expect(wrapper.vm.isSelected('_not_selected_branch_')).toBe(false);
- });
-
- describe('Custom events', () => {
- it('should emit selectBranch if an branch is clicked', () => {
- findDropdownItemByIndex(0).vm.$emit('click');
-
- expect(wrapper.emitted('selectBranch')).toEqual([['_branch_1_']]);
- expect(wrapper.vm.searchTerm).toBe('_branch_1_');
- });
- });
- });
-
describe('Case insensitive for search term', () => {
beforeEach(() => {
createComponent({ value: '_BrAnCh_1_' });
});
- it('renders only the branch searched for', () => {
- expect(findAllDropdownItems()).toHaveLength(1);
- expect(findDropdownItemByIndex(0).text()).toBe('_branch_1_');
+ it('returns only the branch searched for', () => {
+ expect(findDropdown().props('items')).toEqual([{ text: '_branch_1_', value: '_branch_1_' }]);
});
});
});
diff --git a/spec/frontend/projects/commit/components/projects_dropdown_spec.js b/spec/frontend/projects/commit/components/projects_dropdown_spec.js
index bb20918e0cd..0e213ff388a 100644
--- a/spec/frontend/projects/commit/components/projects_dropdown_spec.js
+++ b/spec/frontend/projects/commit/components/projects_dropdown_spec.js
@@ -1,4 +1,4 @@
-import { GlDropdownItem, GlSearchBoxByType } from '@gitlab/ui';
+import { GlCollapsibleListbox } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
import Vue from 'vue';
import Vuex from 'vuex';
@@ -35,78 +35,23 @@ describe('ProjectsDropdown', () => {
);
};
- const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
- const findSearchBoxByType = () => wrapper.findComponent(GlSearchBoxByType);
- const findDropdownItemByIndex = (index) => wrapper.findAllComponents(GlDropdownItem).at(index);
- const findNoResults = () => wrapper.findByTestId('empty-result-message');
+ const findDropdown = () => wrapper.findComponent(GlCollapsibleListbox);
afterEach(() => {
wrapper.destroy();
spyFetchProjects.mockReset();
});
- describe('No projects found', () => {
- beforeEach(() => {
- createComponent('_non_existent_project_');
- });
-
- it('renders empty results message', () => {
- expect(findNoResults().text()).toBe('No matching results');
- });
-
- it('shows GlSearchBoxByType with default attributes', () => {
- expect(findSearchBoxByType().exists()).toBe(true);
- expect(findSearchBoxByType().vm.$attrs).toMatchObject({
- placeholder: 'Search projects',
- });
- });
- });
-
- describe('Search term is empty', () => {
- beforeEach(() => {
- createComponent('');
- });
-
- it('renders all projects when search term is empty', () => {
- expect(findAllDropdownItems()).toHaveLength(3);
- expect(findDropdownItemByIndex(0).text()).toBe('_project_1_');
- expect(findDropdownItemByIndex(1).text()).toBe('_project_2_');
- expect(findDropdownItemByIndex(2).text()).toBe('_project_3_');
- });
-
- it('should not be selected on the inactive project', () => {
- expect(wrapper.vm.isSelected('_project_1_')).toBe(false);
- });
- });
-
describe('Projects found', () => {
beforeEach(() => {
createComponent('_project_1_', { targetProjectId: '1' });
});
- it('renders only the project searched for', () => {
- expect(findAllDropdownItems()).toHaveLength(1);
- expect(findDropdownItemByIndex(0).text()).toBe('_project_1_');
- });
-
- it('should not display empty results message', () => {
- expect(findNoResults().exists()).toBe(false);
- });
-
- it('should signify this project is selected', () => {
- expect(findDropdownItemByIndex(0).props('isChecked')).toBe(true);
- });
-
- it('should signify the project is not selected', () => {
- expect(wrapper.vm.isSelected('_not_selected_project_')).toBe(false);
- });
-
describe('Custom events', () => {
it('should emit selectProject if a project is clicked', () => {
- findDropdownItemByIndex(0).vm.$emit('click');
+ findDropdown().vm.$emit('select', '1');
expect(wrapper.emitted('selectProject')).toEqual([['1']]);
- expect(wrapper.vm.filterTerm).toBe('_project_1_');
});
});
});
@@ -117,8 +62,7 @@ describe('ProjectsDropdown', () => {
});
it('renders only the project searched for', () => {
- expect(findAllDropdownItems()).toHaveLength(1);
- expect(findDropdownItemByIndex(0).text()).toBe('_project_1_');
+ expect(findDropdown().props('items')).toEqual([{ text: '_project_1_', value: '1' }]);
});
});
});
diff --git a/spec/frontend/projects/merge_requests/components/report_abuse_dropdown_item_spec.js b/spec/frontend/projects/merge_requests/components/report_abuse_dropdown_item_spec.js
new file mode 100644
index 00000000000..35b10375821
--- /dev/null
+++ b/spec/frontend/projects/merge_requests/components/report_abuse_dropdown_item_spec.js
@@ -0,0 +1,73 @@
+import { nextTick } from 'vue';
+import { GlDropdownItem } from '@gitlab/ui';
+import { MountingPortal } from 'portal-vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+
+import ReportAbuseDropdownItem from '~/projects/merge_requests/components/report_abuse_dropdown_item.vue';
+import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
+
+describe('ReportAbuseDropdownItem', () => {
+ let wrapper;
+
+ const ACTION_PATH = '/abuse_reports/add_category';
+ const USER_ID = '1';
+ const REPORTED_FROM_URL = 'http://example.com';
+
+ const createComponent = (props) => {
+ wrapper = shallowMountExtended(ReportAbuseDropdownItem, {
+ propsData: {
+ ...props,
+ },
+ provide: {
+ reportAbusePath: ACTION_PATH,
+ reportedUserId: USER_ID,
+ reportedFromUrl: REPORTED_FROM_URL,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ const findReportAbuseItem = () => wrapper.findComponent(GlDropdownItem);
+ const findAbuseCategorySelector = () => wrapper.findComponent(AbuseCategorySelector);
+ const findMountingPortal = () => wrapper.findComponent(MountingPortal);
+
+ it('renders report abuse dropdown item', () => {
+ expect(findReportAbuseItem().text()).toBe(ReportAbuseDropdownItem.i18n.reportAbuse);
+ });
+
+ it('renders abuse category selector with the drawer initially closed', () => {
+ expect(findAbuseCategorySelector().exists()).toBe(true);
+
+ expect(findAbuseCategorySelector().props('showDrawer')).toBe(false);
+ });
+
+ it('renders abuse category selector inside MountingPortal', () => {
+ expect(findMountingPortal().props()).toMatchObject({
+ mountTo: '#js-report-abuse-drawer',
+ append: true,
+ name: 'abuse-category-selector',
+ });
+ });
+
+ describe('when dropdown item is clicked', () => {
+ beforeEach(() => {
+ findReportAbuseItem().vm.$emit('click');
+ return nextTick();
+ });
+
+ it('opens the abuse category selector', () => {
+ expect(findAbuseCategorySelector().props('showDrawer')).toBe(true);
+ });
+
+ it('closes the abuse category selector', async () => {
+ findAbuseCategorySelector().vm.$emit('close-drawer');
+
+ await nextTick();
+
+ expect(findAbuseCategorySelector().props('showDrawer')).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
index 8c18d2992ea..cf28eda5349 100644
--- a/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
+++ b/spec/frontend/projects/pipelines/charts/components/ci_cd_analytics_charts_spec.js
@@ -5,25 +5,32 @@ import CiCdAnalyticsCharts from '~/vue_shared/components/ci_cd_analytics/ci_cd_a
import SegmentedControlButtonGroup from '~/vue_shared/components/segmented_control_button_group.vue';
import { transformedAreaChartData, chartOptions } from '../mock_data';
+const charts = [
+ {
+ range: 'test range 1',
+ title: 'title 1',
+ data: transformedAreaChartData,
+ },
+ {
+ range: 'test range 2',
+ title: 'title 2',
+ data: transformedAreaChartData,
+ },
+ {
+ range: 'test range 3',
+ title: 'title 3',
+ data: transformedAreaChartData,
+ },
+ {
+ range: 'test range 4',
+ title: 'title 4',
+ data: transformedAreaChartData,
+ },
+];
+
const DEFAULT_PROPS = {
chartOptions,
- charts: [
- {
- range: 'test range 1',
- title: 'title 1',
- data: transformedAreaChartData,
- },
- {
- range: 'test range 2',
- title: 'title 2',
- data: transformedAreaChartData,
- },
- {
- range: 'test range 3',
- title: 'title 3',
- data: transformedAreaChartData,
- },
- ],
+ charts,
};
describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', () => {
@@ -55,13 +62,13 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', (
wrapper = createWrapper();
});
- it('should default to the first chart', () => {
- expect(findSegmentedControl().props('value')).toBe(0);
+ it('should default to the 3rd chart (last 90 days)', () => {
+ expect(findSegmentedControl().props('value')).toBe(2);
});
it('should use the title and index as values', () => {
const options = findSegmentedControl().props('options');
- expect(options).toHaveLength(3);
+ expect(options).toHaveLength(charts.length);
expect(options).toEqual([
{
text: 'title 1',
@@ -75,6 +82,10 @@ describe('~/vue_shared/components/ci_cd_analytics/ci_cd_analytics_charts.vue', (
text: 'title 3',
value: 2,
},
+ {
+ text: 'title 4',
+ value: 3,
+ },
]);
});
diff --git a/spec/frontend/projects/settings/components/default_branch_selector_spec.js b/spec/frontend/projects/settings/components/default_branch_selector_spec.js
index 94648d87524..bfbf3e234f4 100644
--- a/spec/frontend/projects/settings/components/default_branch_selector_spec.js
+++ b/spec/frontend/projects/settings/components/default_branch_selector_spec.js
@@ -32,6 +32,7 @@ describe('projects/settings/components/default_branch_selector', () => {
value: persistedDefaultBranch,
enabledRefTypes: [REF_TYPE_BRANCHES],
projectId,
+ refType: null,
state: true,
translations: {
dropdownHeader: expect.any(String),
diff --git a/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js b/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js
index 49c45c080b4..8d0fd390e35 100644
--- a/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js
+++ b/spec/frontend/projects/settings/repository/branch_rules/components/branch_rule_spec.js
@@ -20,6 +20,7 @@ describe('Branch rule', () => {
};
const findDefaultBadge = () => wrapper.findByText(i18n.defaultLabel);
+ const findProtectedBadge = () => wrapper.findByText(i18n.protectedLabel);
const findBranchName = () => wrapper.findByText(branchRulePropsMock.name);
const findProtectionDetailsList = () => wrapper.findByRole('list');
const findProtectionDetailsListItems = () => wrapper.findAllByRole('listitem');
@@ -32,17 +33,23 @@ describe('Branch rule', () => {
});
describe('badges', () => {
- it('renders default badge', () => {
+ it('renders both default and protected badges', () => {
expect(findDefaultBadge().exists()).toBe(true);
+ expect(findProtectedBadge().exists()).toBe(true);
});
it('does not render default badge if isDefault is set to false', () => {
createComponent({ isDefault: false });
expect(findDefaultBadge().exists()).toBe(false);
});
+
+ it('does not render default badge if branchProtection is null', () => {
+ createComponent(branchRuleWithoutDetailsPropsMock);
+ expect(findProtectedBadge().exists()).toBe(false);
+ });
});
- it('does not render the protection details list if no details are present', () => {
+ it('does not render the protection details list when branchProtection is null', () => {
createComponent(branchRuleWithoutDetailsPropsMock);
expect(findProtectionDetailsList().exists()).toBe(false);
});
diff --git a/spec/frontend/projects/settings/repository/branch_rules/mock_data.js b/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
index 6f506882c36..de7f6c8b88d 100644
--- a/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
+++ b/spec/frontend/projects/settings/repository/branch_rules/mock_data.js
@@ -92,10 +92,7 @@ export const branchRuleWithoutDetailsPropsMock = {
name: 'branch-1',
isDefault: false,
matchingBranchesCount: 1,
- branchProtection: {
- allowForcePush: false,
- codeOwnerApprovalRequired: false,
- },
+ branchProtection: null,
approvalRulesTotal: 0,
statusChecksTotal: 0,
};
diff --git a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
index 13f3eea277a..5fc9f9ba629 100644
--- a/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
+++ b/spec/frontend/projects/settings_service_desk/components/service_desk_root_spec.js
@@ -3,7 +3,7 @@ import { shallowMount } from '@vue/test-utils';
import AxiosMockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import ServiceDeskRoot from '~/projects/settings_service_desk/components/service_desk_root.vue';
import ServiceDeskSetting from '~/projects/settings_service_desk/components/service_desk_setting.vue';
@@ -95,7 +95,7 @@ describe('ServiceDeskRoot', () => {
});
it('sends a request to turn service desk on', () => {
- axiosMock.onPut(provideData.endpoint).replyOnce(httpStatusCodes.OK);
+ axiosMock.onPut(provideData.endpoint).replyOnce(HTTP_STATUS_OK);
expect(spy).toHaveBeenCalledWith(provideData.endpoint, { service_desk_enabled: true });
});
@@ -117,7 +117,7 @@ describe('ServiceDeskRoot', () => {
});
it('sends a request to turn service desk off', () => {
- axiosMock.onPut(provideData.endpoint).replyOnce(httpStatusCodes.OK);
+ axiosMock.onPut(provideData.endpoint).replyOnce(HTTP_STATUS_OK);
expect(spy).toHaveBeenCalledWith(provideData.endpoint, { service_desk_enabled: false });
});
@@ -133,7 +133,7 @@ describe('ServiceDeskRoot', () => {
describe('save event', () => {
describe('successful request', () => {
beforeEach(async () => {
- axiosMock.onPut(provideData.endpoint).replyOnce(httpStatusCodes.OK);
+ axiosMock.onPut(provideData.endpoint).replyOnce(HTTP_STATUS_OK);
wrapper = createComponent();
diff --git a/spec/frontend/read_more_spec.js b/spec/frontend/read_more_spec.js
index 80d7c941660..9eddc50d50a 100644
--- a/spec/frontend/read_more_spec.js
+++ b/spec/frontend/read_more_spec.js
@@ -1,21 +1,23 @@
-import { loadHTMLFixture, resetHTMLFixture } from 'helpers/fixtures';
+import { loadHTMLFixture, resetHTMLFixture, setHTMLFixture } from 'helpers/fixtures';
import initReadMore from '~/read_more';
describe('Read more click-to-expand functionality', () => {
const fixtureName = 'projects/overview.html';
- beforeEach(() => {
- loadHTMLFixture(fixtureName);
- });
+ const findTrigger = () => document.querySelector('.js-read-more-trigger');
afterEach(() => {
resetHTMLFixture();
});
describe('expands target element', () => {
+ beforeEach(() => {
+ loadHTMLFixture(fixtureName);
+ });
+
it('adds "is-expanded" class to target element', () => {
const target = document.querySelector('.read-more-container');
- const trigger = document.querySelector('.js-read-more-trigger');
+ const trigger = findTrigger();
initReadMore();
trigger.click();
@@ -23,4 +25,25 @@ describe('Read more click-to-expand functionality', () => {
expect(target.classList.contains('is-expanded')).toEqual(true);
});
});
+
+ describe('given click on nested element', () => {
+ beforeEach(() => {
+ setHTMLFixture(`
+ <p>Target</p>
+ <button type="button" class="js-read-more-trigger">
+ <span>Button text</span>
+ </button>
+ `);
+
+ const trigger = findTrigger();
+ const nestedElement = trigger.firstElementChild;
+ initReadMore();
+
+ nestedElement.click();
+ });
+
+ it('removes the trigger element', async () => {
+ expect(findTrigger()).toBe(null);
+ });
+ });
});
diff --git a/spec/frontend/ref/components/ref_selector_spec.js b/spec/frontend/ref/components/ref_selector_spec.js
index 96601a729b2..4997c13bbb2 100644
--- a/spec/frontend/ref/components/ref_selector_spec.js
+++ b/spec/frontend/ref/components/ref_selector_spec.js
@@ -18,6 +18,8 @@ import {
REF_TYPE_BRANCHES,
REF_TYPE_TAGS,
REF_TYPE_COMMITS,
+ BRANCH_REF_TYPE,
+ TAG_REF_TYPE,
} from '~/ref/constants';
import createStore from '~/ref/stores/';
@@ -34,7 +36,7 @@ describe('Ref selector component', () => {
let commitApiCallSpy;
let requestSpies;
- const createComponent = (mountOverrides = {}) => {
+ const createComponent = (mountOverrides = {}, propsData = {}) => {
wrapper = mount(
RefSelector,
merge(
@@ -42,6 +44,7 @@ describe('Ref selector component', () => {
propsData: {
projectId,
value: '',
+ ...propsData,
},
listeners: {
// simulate a parent component v-model binding
@@ -338,13 +341,14 @@ describe('Ref selector component', () => {
describe('branches', () => {
describe('when the branches search returns results', () => {
beforeEach(() => {
- createComponent();
+ createComponent({}, { refType: BRANCH_REF_TYPE, useSymbolicRefNames: true });
return waitForRequests();
});
it('renders the branches section in the dropdown', () => {
expect(findBranchesSection().exists()).toBe(true);
+ expect(findBranchesSection().props('shouldShowCheck')).toBe(true);
});
it('renders the "Branches" heading with a total number indicator', () => {
@@ -415,13 +419,14 @@ describe('Ref selector component', () => {
describe('tags', () => {
describe('when the tags search returns results', () => {
beforeEach(() => {
- createComponent();
+ createComponent({}, { refType: TAG_REF_TYPE, useSymbolicRefNames: true });
return waitForRequests();
});
it('renders the tags section in the dropdown', () => {
expect(findTagsSection().exists()).toBe(true);
+ expect(findTagsSection().props('shouldShowCheck')).toBe(true);
});
it('renders the "Tags" heading with a total number indicator', () => {
diff --git a/spec/frontend/repository/commits_service_spec.js b/spec/frontend/repository/commits_service_spec.js
index de7c56f239a..e56975d021a 100644
--- a/spec/frontend/repository/commits_service_spec.js
+++ b/spec/frontend/repository/commits_service_spec.js
@@ -1,9 +1,10 @@
import MockAdapter from 'axios-mock-adapter';
import axios from '~/lib/utils/axios_utils';
import { loadCommits, isRequested, resetRequestedCommits } from '~/repository/commits_service';
-import httpStatus from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { createAlert } from '~/flash';
import { I18N_COMMIT_DATA_FETCH_ERROR } from '~/repository/constants';
+import { refWithSpecialCharMock } from './mock_data';
jest.mock('~/flash');
@@ -14,7 +15,7 @@ describe('commits service', () => {
beforeEach(() => {
mock = new MockAdapter(axios);
- mock.onGet(url).reply(httpStatus.OK, [], {});
+ mock.onGet(url).reply(HTTP_STATUS_OK, [], {});
jest.spyOn(axios, 'get');
});
@@ -39,10 +40,12 @@ describe('commits service', () => {
expect(axios.get).toHaveBeenCalledWith(testUrl, { params: { format: 'json', offset } });
});
- it('encodes the path correctly', async () => {
- await requestCommits(1, 'some-project', 'with $peci@l ch@rs/');
+ it('encodes the path and ref', async () => {
+ const encodedRef = encodeURIComponent(refWithSpecialCharMock);
+ const encodedUrl = `/some-project/-/refs/${encodedRef}/logs_tree/with%20%24peci%40l%20ch%40rs%2F`;
+
+ await requestCommits(1, 'some-project', 'with $peci@l ch@rs/', refWithSpecialCharMock);
- const encodedUrl = '/some-project/-/refs/main/logs_tree/with%20%24peci%40l%20ch%40rs%2F';
expect(axios.get).toHaveBeenCalledWith(encodedUrl, expect.anything());
});
@@ -68,7 +71,7 @@ describe('commits service', () => {
it('calls `createAlert` when the request fails', async () => {
const invalidPath = '/#@ some/path';
const invalidUrl = `${url}${invalidPath}`;
- mock.onGet(invalidUrl).replyOnce(httpStatus.INTERNAL_SERVER_ERROR, [], {});
+ mock.onGet(invalidUrl).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR, [], {});
await requestCommits(1, 'my-project', invalidPath);
diff --git a/spec/frontend/repository/components/blob_content_viewer_spec.js b/spec/frontend/repository/components/blob_content_viewer_spec.js
index 6ece72c41bb..2e8860f67ef 100644
--- a/spec/frontend/repository/components/blob_content_viewer_spec.js
+++ b/spec/frontend/repository/components/blob_content_viewer_spec.js
@@ -25,7 +25,7 @@ import CodeIntelligence from '~/code_navigation/components/app.vue';
import * as urlUtility from '~/lib/utils/url_utility';
import { isLoggedIn, handleLocationHash } from '~/lib/utils/common_utils';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import LineHighlighter from '~/blob/line_highlighter';
import { LEGACY_FILE_TYPES } from '~/repository/constants';
import { SIMPLE_BLOB_VIEWER, RICH_BLOB_VIEWER } from '~/blob/components/constants';
@@ -256,19 +256,19 @@ describe('Blob content viewer component', () => {
);
it('loads the LineHighlighter', async () => {
- mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
+ mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, 'test');
await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
expect(LineHighlighter).toHaveBeenCalled();
});
it('does not load the LineHighlighter for RichViewers', async () => {
- mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
+ mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, 'test');
await createComponent({ blob: { ...richViewerMock, fileType, highlightJs } });
expect(LineHighlighter).not.toHaveBeenCalled();
});
it('scrolls to the hash', async () => {
- mockAxios.onGet(legacyViewerUrl).replyOnce(httpStatusCodes.OK, 'test');
+ mockAxios.onGet(legacyViewerUrl).replyOnce(HTTP_STATUS_OK, 'test');
await createComponent({ blob: { ...simpleViewerMock, fileType, highlightJs } });
expect(handleLocationHash).toHaveBeenCalled();
});
@@ -368,7 +368,7 @@ describe('Blob content viewer component', () => {
it('does not load a CodeIntelligence component when no viewers are loaded', async () => {
const url = 'some_file.js?format=json&viewer=rich';
- mockAxios.onGet(url).replyOnce(httpStatusCodes.INTERNAL_SERVER_ERROR);
+ mockAxios.onGet(url).replyOnce(HTTP_STATUS_INTERNAL_SERVER_ERROR);
await createComponent({ blob: { ...richViewerMock, fileType: 'unknown' } });
expect(findCodeIntelligence().exists()).toBe(false);
diff --git a/spec/frontend/repository/components/blob_viewers/notebook_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/notebook_viewer_spec.js
new file mode 100644
index 00000000000..51f3d31ec72
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/notebook_viewer_spec.js
@@ -0,0 +1,40 @@
+import { GlLoadingIcon } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import NotebookViewer from '~/repository/components/blob_viewers/notebook_viewer.vue';
+import notebookLoader from '~/blob/notebook';
+
+jest.mock('~/blob/notebook');
+
+describe('Notebook Viewer', () => {
+ let wrapper;
+
+ const ROOT_RELATIVE_PATH = '/some/notebook/';
+ const DEFAULT_BLOB_DATA = { rawPath: `${ROOT_RELATIVE_PATH}file.ipynb` };
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(NotebookViewer, {
+ propsData: { blob: DEFAULT_BLOB_DATA },
+ });
+ };
+
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findNotebookWrapper = () => wrapper.findByTestId('notebook');
+
+ beforeEach(() => createComponent());
+
+ it('calls the notebook loader', () => {
+ expect(notebookLoader).toHaveBeenCalledWith({
+ el: wrapper.vm.$refs.viewer,
+ relativeRawPath: ROOT_RELATIVE_PATH,
+ });
+ });
+
+ it('renders a loading icon component', () => {
+ expect(findLoadingIcon().props('size')).toBe('lg');
+ });
+
+ it('renders the notebook wrapper', () => {
+ expect(findNotebookWrapper().exists()).toBe(true);
+ expect(findNotebookWrapper().attributes('data-endpoint')).toBe(DEFAULT_BLOB_DATA.rawPath);
+ });
+});
diff --git a/spec/frontend/repository/components/blob_viewers/openapi_viewer_spec.js b/spec/frontend/repository/components/blob_viewers/openapi_viewer_spec.js
new file mode 100644
index 00000000000..21994d04076
--- /dev/null
+++ b/spec/frontend/repository/components/blob_viewers/openapi_viewer_spec.js
@@ -0,0 +1,30 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import OpenapiViewer from '~/repository/components/blob_viewers/openapi_viewer.vue';
+import renderOpenApi from '~/blob/openapi';
+
+jest.mock('~/blob/openapi');
+
+describe('OpenAPI Viewer', () => {
+ let wrapper;
+
+ const DEFAULT_BLOB_DATA = { rawPath: 'some/openapi.yml' };
+
+ const createOpenApiViewer = () => {
+ wrapper = shallowMountExtended(OpenapiViewer, {
+ propsData: { blob: DEFAULT_BLOB_DATA },
+ });
+ };
+
+ const findOpenApiViewer = () => wrapper.findByTestId('openapi');
+
+ beforeEach(() => createOpenApiViewer());
+
+ it('calls the openapi render', () => {
+ expect(renderOpenApi).toHaveBeenCalledWith(wrapper.vm.$refs.viewer);
+ });
+
+ it('renders an openapi viewer', () => {
+ expect(findOpenApiViewer().exists()).toBe(true);
+ expect(findOpenApiViewer().attributes('data-endpoint')).toBe(DEFAULT_BLOB_DATA.rawPath);
+ });
+});
diff --git a/spec/frontend/repository/components/fork_info_spec.js b/spec/frontend/repository/components/fork_info_spec.js
new file mode 100644
index 00000000000..c23d5ae5823
--- /dev/null
+++ b/spec/frontend/repository/components/fork_info_spec.js
@@ -0,0 +1,122 @@
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import { GlSkeletonLoader, GlIcon, GlLink } from '@gitlab/ui';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { createAlert } from '~/flash';
+
+import ForkInfo, { i18n } from '~/repository/components/fork_info.vue';
+import forkDetailsQuery from '~/repository/queries/fork_details.query.graphql';
+import { propsForkInfo } from '../mock_data';
+
+jest.mock('~/flash');
+
+describe('ForkInfo component', () => {
+ let wrapper;
+ let mockResolver;
+ const forkInfoError = new Error('Something went wrong');
+
+ Vue.use(VueApollo);
+
+ const createCommitData = ({ ahead = 3, behind = 7 }) => {
+ return {
+ data: {
+ project: { id: '1', forkDetails: { ahead, behind, __typename: 'ForkDetails' } },
+ },
+ };
+ };
+
+ const createComponent = (props = {}, data = {}, isRequestFailed = false) => {
+ mockResolver = isRequestFailed
+ ? jest.fn().mockRejectedValue(forkInfoError)
+ : jest.fn().mockResolvedValue(createCommitData(data));
+
+ wrapper = shallowMountExtended(ForkInfo, {
+ apolloProvider: createMockApollo([[forkDetailsQuery, mockResolver]]),
+ propsData: { ...propsForkInfo, ...props },
+ });
+ return waitForPromises();
+ };
+
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findSkeleton = () => wrapper.findComponent(GlSkeletonLoader);
+ const findIcon = () => wrapper.findComponent(GlIcon);
+ const findDivergenceMessage = () => wrapper.find('.gl-text-secondary');
+ const findInaccessibleMessage = () => wrapper.findByTestId('inaccessible-project');
+ it('displays a skeleton while loading data', async () => {
+ createComponent();
+ expect(findSkeleton().exists()).toBe(true);
+ });
+
+ it('does not display skeleton when data is loaded', async () => {
+ await createComponent();
+ expect(findSkeleton().exists()).toBe(false);
+ });
+
+ it('renders fork icon', async () => {
+ await createComponent();
+ expect(findIcon().exists()).toBe(true);
+ });
+
+ it('queries the data when sourceName is present', async () => {
+ await createComponent();
+ expect(mockResolver).toHaveBeenCalled();
+ });
+
+ it('does not query the data when sourceName is empty', async () => {
+ await createComponent({ sourceName: null });
+ expect(mockResolver).not.toHaveBeenCalled();
+ });
+
+ it('renders inaccessible message when fork source is not available', async () => {
+ await createComponent({ sourceName: '' });
+ const message = findInaccessibleMessage();
+ expect(message.exists()).toBe(true);
+ expect(message.text()).toBe(i18n.inaccessibleProject);
+ });
+
+ it('shows source project name with a link to a repo', async () => {
+ await createComponent();
+ const link = findLink();
+ expect(link.text()).toBe(propsForkInfo.sourceName);
+ expect(link.attributes('href')).toBe(propsForkInfo.sourcePath);
+ });
+
+ it('renders unknown divergence message when divergence is unknown', async () => {
+ await createComponent({}, { ahead: null, behind: null });
+ expect(findDivergenceMessage().text()).toBe(i18n.unknown);
+ });
+
+ it('shows correct divergence message when data is present', async () => {
+ await createComponent();
+ expect(findDivergenceMessage().text()).toMatchInterpolatedText(
+ '7 commits behind, 3 commits ahead of the upstream repository.',
+ );
+ });
+
+ it('renders up to date message when divergence is unknown', async () => {
+ await createComponent({}, { ahead: 0, behind: 0 });
+ expect(findDivergenceMessage().text()).toBe(i18n.upToDate);
+ });
+
+ it('renders commits ahead message', async () => {
+ await createComponent({}, { behind: 0 });
+ expect(findDivergenceMessage().text()).toBe('3 commits ahead of the upstream repository.');
+ });
+
+ it('renders commits behind message', async () => {
+ await createComponent({}, { ahead: 0 });
+
+ expect(findDivergenceMessage().text()).toBe('7 commits behind the upstream repository.');
+ });
+
+ it('renders alert with error message when request fails', async () => {
+ await createComponent({}, {}, true);
+ expect(createAlert).toHaveBeenCalledWith({
+ message: i18n.error,
+ captureError: true,
+ error: forkInfoError,
+ });
+ });
+});
diff --git a/spec/frontend/repository/components/new_directory_modal_spec.js b/spec/frontend/repository/components/new_directory_modal_spec.js
index cf0d48280f4..4e5c9a685c4 100644
--- a/spec/frontend/repository/components/new_directory_modal_spec.js
+++ b/spec/frontend/repository/components/new_directory_modal_spec.js
@@ -5,7 +5,7 @@ import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/flash';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import NewDirectoryModal from '~/repository/components/new_directory_modal.vue';
@@ -149,7 +149,7 @@ describe('NewDirectoryModal', () => {
originalBranch,
createNewMr,
} = defaultFormValue;
- mock.onPost(initialProps.path).reply(httpStatusCodes.OK, {});
+ mock.onPost(initialProps.path).reply(HTTP_STATUS_OK, {});
await fillForm();
await submitForm();
@@ -161,7 +161,7 @@ describe('NewDirectoryModal', () => {
});
it('does not submit "create_merge_request" formData if createNewMr is not checked', async () => {
- mock.onPost(initialProps.path).reply(httpStatusCodes.OK, {});
+ mock.onPost(initialProps.path).reply(HTTP_STATUS_OK, {});
await fillForm({ createNewMr: false });
await submitForm();
expect(mock.history.post[0].data.get('create_merge_request')).toBeNull();
@@ -169,7 +169,7 @@ describe('NewDirectoryModal', () => {
it('redirects to the new directory', async () => {
const response = { filePath: 'new-dir-path' };
- mock.onPost(initialProps.path).reply(httpStatusCodes.OK, response);
+ mock.onPost(initialProps.path).reply(HTTP_STATUS_OK, response);
await fillForm({ dirName: 'foo', branchName: 'master', commitMessage: 'foo' });
await submitForm();
diff --git a/spec/frontend/repository/components/tree_content_spec.js b/spec/frontend/repository/components/tree_content_spec.js
index 6eea66f1a7d..f694c8e9166 100644
--- a/spec/frontend/repository/components/tree_content_spec.js
+++ b/spec/frontend/repository/components/tree_content_spec.js
@@ -5,19 +5,25 @@ import FilePreview from '~/repository/components/preview/index.vue';
import FileTable from '~/repository/components/table/index.vue';
import TreeContent from 'jh_else_ce/repository/components/tree_content.vue';
import { loadCommits, isRequested, resetRequestedCommits } from '~/repository/commits_service';
+import waitForPromises from 'helpers/wait_for_promises';
+import { createAlert } from '~/flash';
+import { i18n } from '~/repository/constants';
+import { graphQLErrors } from '../mock_data';
jest.mock('~/repository/commits_service', () => ({
loadCommits: jest.fn(() => Promise.resolve()),
isRequested: jest.fn(),
resetRequestedCommits: jest.fn(),
}));
+jest.mock('~/flash');
let vm;
let $apollo;
+const mockResponse = jest.fn().mockReturnValue(Promise.resolve({ data: {} }));
-function factory(path, data = () => ({})) {
+function factory(path, appoloMockResponse = mockResponse) {
$apollo = {
- query: jest.fn().mockReturnValue(Promise.resolve({ data: data() })),
+ query: appoloMockResponse,
};
vm = shallowMount(TreeContent, {
@@ -222,4 +228,17 @@ describe('Repository table component', () => {
expect(loadCommits.mock.calls).toEqual([['', path, '', 0]]);
});
});
+
+ describe('error handling', () => {
+ const gitalyError = { graphQLErrors };
+ it.each`
+ error | message
+ ${gitalyError} | ${i18n.gitalyError}
+ ${'Error'} | ${i18n.generalError}
+ `('should show an expected error', async ({ error, message }) => {
+ factory('/', jest.fn().mockRejectedValue(error));
+ await waitForPromises();
+ expect(createAlert).toHaveBeenCalledWith({ message, captureError: true });
+ });
+ });
});
diff --git a/spec/frontend/repository/components/upload_blob_modal_spec.js b/spec/frontend/repository/components/upload_blob_modal_spec.js
index 8db169b02b4..9de0666f27a 100644
--- a/spec/frontend/repository/components/upload_blob_modal_spec.js
+++ b/spec/frontend/repository/components/upload_blob_modal_spec.js
@@ -5,7 +5,7 @@ import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
import waitForPromises from 'helpers/wait_for_promises';
import { createAlert } from '~/flash';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { visitUrl } from '~/lib/utils/url_utility';
import UploadBlobModal from '~/repository/components/upload_blob_modal.vue';
import UploadDropzone from '~/vue_shared/components/upload_dropzone/upload_dropzone.vue';
@@ -158,7 +158,7 @@ describe('UploadBlobModal', () => {
describe('successful response', () => {
beforeEach(async () => {
mock = new MockAdapter(axios);
- mock.onPost(initialProps.path).reply(httpStatusCodes.OK, { filePath: 'blah' });
+ mock.onPost(initialProps.path).reply(HTTP_STATUS_OK, { filePath: 'blah' });
findModal().vm.$emit('primary', mockEvent);
diff --git a/spec/frontend/repository/mock_data.js b/spec/frontend/repository/mock_data.js
index cda47a5b0a5..d85434a9148 100644
--- a/spec/frontend/repository/mock_data.js
+++ b/spec/frontend/repository/mock_data.js
@@ -87,6 +87,8 @@ export const applicationInfoMock = { gitpodEnabled: true };
export const propsMock = { path: 'some_file.js', projectPath: 'some/path' };
export const refMock = 'default-ref';
+export const refWithSpecialCharMock = 'feat/selected-#-ref-#';
+export const encodedRefWithSpecialCharMock = 'feat/selected-%23-ref-%23';
export const blobControlsDataMock = {
id: '1234',
@@ -106,3 +108,19 @@ export const blobControlsDataMock = {
},
},
};
+
+export const graphQLErrors = [
+ {
+ message: '14:failed to connect to all addresses.',
+ locations: [{ line: 16, column: 7 }],
+ path: ['project', 'repository', 'paginatedTree'],
+ extensions: { code: 'unavailable', gitaly_code: 14, service: 'git' },
+ },
+];
+
+export const propsForkInfo = {
+ projectPath: 'nataliia/myGitLab',
+ selectedRef: 'main',
+ sourceName: 'gitLab',
+ sourcePath: 'gitlab-org/gitlab',
+};
diff --git a/spec/frontend/repository/utils/ref_switcher_utils_spec.js b/spec/frontend/repository/utils/ref_switcher_utils_spec.js
index 3335059554f..4d0250fffbf 100644
--- a/spec/frontend/repository/utils/ref_switcher_utils_spec.js
+++ b/spec/frontend/repository/utils/ref_switcher_utils_spec.js
@@ -1,5 +1,6 @@
import { generateRefDestinationPath } from '~/repository/utils/ref_switcher_utils';
import setWindowLocation from 'helpers/set_window_location_helper';
+import { refWithSpecialCharMock, encodedRefWithSpecialCharMock } from '../mock_data';
const projectRootPath = 'root/Project1';
const currentRef = 'main';
@@ -19,4 +20,10 @@ describe('generateRefDestinationPath', () => {
setWindowLocation(currentPath);
expect(generateRefDestinationPath(projectRootPath, selectedRef)).toBe(result);
});
+
+ it('encodes the selected ref', () => {
+ const result = `${projectRootPath}/-/tree/${encodedRefWithSpecialCharMock}`;
+
+ expect(generateRefDestinationPath(projectRootPath, refWithSpecialCharMock)).toBe(result);
+ });
});
diff --git a/spec/frontend/search/store/utils_spec.js b/spec/frontend/search/store/utils_spec.js
index 20d764190b1..487ed7bfe03 100644
--- a/spec/frontend/search/store/utils_spec.js
+++ b/spec/frontend/search/store/utils_spec.js
@@ -5,7 +5,10 @@ import {
setFrequentItemToLS,
mergeById,
isSidebarDirty,
+ formatSearchResultCount,
+ getAggregationsUrl,
} from '~/search/store/utils';
+import { useMockLocationHelper } from 'helpers/mock_window_location_helper';
import {
MOCK_LS_KEY,
MOCK_GROUPS,
@@ -241,4 +244,23 @@ describe('Global Search Store Utils', () => {
});
});
});
+ describe('formatSearchResultCount', () => {
+ it('returns zero as string if no count is provided', () => {
+ expect(formatSearchResultCount()).toStrictEqual('0');
+ });
+ it('returns 10K string for 10000 integer', () => {
+ expect(formatSearchResultCount(10000)).toStrictEqual('10K');
+ });
+ it('returns 23K string for "23,000+" string', () => {
+ expect(formatSearchResultCount('23,000+')).toStrictEqual('23K');
+ });
+ });
+
+ describe('getAggregationsUrl', () => {
+ useMockLocationHelper();
+ it('returns zero as string if no count is provided', () => {
+ const testURL = window.location.href;
+ expect(getAggregationsUrl()).toStrictEqual(`${testURL}search/aggregations`);
+ });
+ });
});
diff --git a/spec/frontend/self_monitor/store/actions_spec.js b/spec/frontend/self_monitor/store/actions_spec.js
index 65c9d2f5f01..4c266fabea6 100644
--- a/spec/frontend/self_monitor/store/actions_spec.js
+++ b/spec/frontend/self_monitor/store/actions_spec.js
@@ -1,7 +1,7 @@
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import testAction from 'helpers/vuex_action_helper';
-import statusCodes, { HTTP_STATUS_ACCEPTED } from '~/lib/utils/http_status';
+import { HTTP_STATUS_ACCEPTED, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import * as actions from '~/self_monitor/store/actions';
import * as types from '~/self_monitor/store/mutation_types';
import createState from '~/self_monitor/store/state';
@@ -47,7 +47,7 @@ describe('self-monitor actions', () => {
mock.onPost(state.createProjectEndpoint).reply(HTTP_STATUS_ACCEPTED, {
job_id: '123',
});
- mock.onGet(state.createProjectStatusEndpoint).reply(statusCodes.OK, {
+ mock.onGet(state.createProjectStatusEndpoint).reply(HTTP_STATUS_OK, {
project_full_path: '/self-monitor-url',
});
});
@@ -154,7 +154,7 @@ describe('self-monitor actions', () => {
mock.onDelete(state.deleteProjectEndpoint).reply(HTTP_STATUS_ACCEPTED, {
job_id: '456',
});
- mock.onGet(state.deleteProjectStatusEndpoint).reply(statusCodes.OK, {
+ mock.onGet(state.deleteProjectStatusEndpoint).reply(HTTP_STATUS_OK, {
status: 'success',
});
});
diff --git a/spec/frontend/set_status_modal/set_status_form_spec.js b/spec/frontend/set_status_modal/set_status_form_spec.js
index 486e06d2906..df740d4a431 100644
--- a/spec/frontend/set_status_modal/set_status_form_spec.js
+++ b/spec/frontend/set_status_modal/set_status_form_spec.js
@@ -1,12 +1,15 @@
import $ from 'jquery';
import { mountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import { useFakeDate } from 'helpers/fake_date';
import SetStatusForm from '~/set_status_modal/set_status_form.vue';
+import { NEVER_TIME_RANGE } from '~/set_status_modal/constants';
import EmojiPicker from '~/emoji/components/picker.vue';
import { timeRanges } from '~/vue_shared/constants';
-import { sprintf } from '~/locale';
import GfmAutoComplete from 'ee_else_ce/gfm_auto_complete';
+const [thirtyMinutes, , , oneDay] = timeRanges;
+
describe('SetStatusForm', () => {
let wrapper;
@@ -73,17 +76,71 @@ describe('SetStatusForm', () => {
});
});
- describe('when clear status after is set', () => {
- it('displays value in dropdown toggle button', async () => {
- const clearStatusAfter = timeRanges[0];
+ describe('clear status after dropdown toggle button text', () => {
+ useFakeDate(2022, 11, 5);
- await createComponent({
- propsData: {
- clearStatusAfter,
- },
+ describe('when clear status after has previously been set', () => {
+ describe('when date is today', () => {
+ it('displays time that status will clear', async () => {
+ await createComponent({
+ propsData: {
+ currentClearStatusAfter: '2022-12-05 11:00:00 UTC',
+ },
+ });
+
+ expect(wrapper.findByRole('button', { name: '11:00am' }).exists()).toBe(true);
+ });
});
- expect(wrapper.findByRole('button', { name: clearStatusAfter.label }).exists()).toBe(true);
+ describe('when date is not today', () => {
+ it('displays date and time that status will clear', async () => {
+ await createComponent({
+ propsData: {
+ currentClearStatusAfter: '2022-12-06 11:00:00 UTC',
+ },
+ });
+
+ expect(wrapper.findByRole('button', { name: 'Dec 6, 2022 11:00am' }).exists()).toBe(true);
+ });
+ });
+
+ describe('when a new option is choose from the dropdown', () => {
+ describe('when chosen option is today', () => {
+ it('displays chosen option as time', async () => {
+ await createComponent({
+ propsData: {
+ clearStatusAfter: thirtyMinutes,
+ currentClearStatusAfter: '2022-12-05 11:00:00 UTC',
+ },
+ });
+
+ expect(wrapper.findByRole('button', { name: '12:30am' }).exists()).toBe(true);
+ });
+ });
+
+ describe('when chosen option is not today', () => {
+ it('displays chosen option as date and time', async () => {
+ await createComponent({
+ propsData: {
+ clearStatusAfter: oneDay,
+ currentClearStatusAfter: '2022-12-06 11:00:00 UTC',
+ },
+ });
+
+ expect(wrapper.findByRole('button', { name: 'Dec 6, 2022 12:00am' }).exists()).toBe(
+ true,
+ );
+ });
+ });
+ });
+ });
+
+ describe('when clear status after has not been set', () => {
+ it('displays `Never`', async () => {
+ await createComponent();
+
+ expect(wrapper.findByRole('button', { name: NEVER_TIME_RANGE.label }).exists()).toBe(true);
+ });
});
});
@@ -131,7 +188,7 @@ describe('SetStatusForm', () => {
await wrapper.findByTestId('thirtyMinutes').trigger('click');
- expect(wrapper.emitted('clear-status-after-click')).toEqual([[timeRanges[0]]]);
+ expect(wrapper.emitted('clear-status-after-click')).toEqual([[thirtyMinutes]]);
});
});
@@ -150,20 +207,4 @@ describe('SetStatusForm', () => {
expect(wrapper.findByTestId('no-emoji-placeholder').exists()).toBe(true);
});
});
-
- describe('when `currentClearStatusAfter` prop is set', () => {
- it('displays clear status message', async () => {
- const date = '2022-08-25 21:14:48 UTC';
-
- await createComponent({
- propsData: {
- currentClearStatusAfter: date,
- },
- });
-
- expect(
- wrapper.findByText(sprintf(SetStatusForm.i18n.clearStatusAfterMessage, { date })).exists(),
- ).toBe(true);
- });
- });
});
diff --git a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
index 53d2a9e0978..85cd8d51272 100644
--- a/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/set_status_modal_wrapper_spec.js
@@ -1,6 +1,7 @@
import { GlModal, GlFormCheckbox } from '@gitlab/ui';
import { nextTick } from 'vue';
import { mountExtended } from 'helpers/vue_test_utils_helper';
+import { useFakeDate } from 'helpers/fake_date';
import { initEmojiMock, clearEmojiMock } from 'helpers/emoji';
import * as UserApi from '~/api/user_api';
import EmojiPicker from '~/emoji/components/picker.vue';
@@ -56,7 +57,6 @@ describe('SetStatusModalWrapper', () => {
wrapper.findByPlaceholderText(SetStatusForm.i18n.statusMessagePlaceholder);
const findClearStatusButton = () => wrapper.find('.js-clear-user-status-button');
const findAvailabilityCheckbox = () => wrapper.findComponent(GlFormCheckbox);
- const findClearStatusAtMessage = () => wrapper.find('[data-testid="clear-status-at-message"]');
const getEmojiPicker = () => wrapper.findComponent(EmojiPickerStub);
const initModal = async ({ mockOnUpdateSuccess = true, mockOnUpdateFailure = true } = {}) => {
@@ -103,10 +103,6 @@ describe('SetStatusModalWrapper', () => {
expect(wrapper.find('[data-testid="clear-status-at-dropdown"]').exists()).toBe(true);
});
- it('does not display the clear status at message', () => {
- expect(findClearStatusAtMessage().exists()).toBe(false);
- });
-
it('renders emoji picker dropdown with custom positioning', () => {
expect(getEmojiPicker().props()).toMatchObject({
right: false,
@@ -138,17 +134,16 @@ describe('SetStatusModalWrapper', () => {
});
describe('with currentClearStatusAfter set', () => {
+ useFakeDate(2022, 11, 5);
+
beforeEach(async () => {
await initEmojiMock();
- wrapper = createComponent({ currentClearStatusAfter: '2021-01-01 00:00:00 UTC' });
+ wrapper = createComponent({ currentClearStatusAfter: '2022-12-06 11:00:00 UTC' });
return initModal();
});
- it('displays the clear status at message', () => {
- const clearStatusAtMessage = findClearStatusAtMessage();
-
- expect(clearStatusAtMessage.exists()).toBe(true);
- expect(clearStatusAtMessage.text()).toBe('Your status resets on 2021-01-01 00:00:00 UTC.');
+ it('displays date and time that status will expire in dropdown toggle button', () => {
+ expect(wrapper.findByRole('button', { name: 'Dec 6, 2022 11:00am' }).exists()).toBe(true);
});
});
@@ -170,33 +165,33 @@ describe('SetStatusModalWrapper', () => {
});
it('clicking "setStatus" submits the user status', async () => {
- findModal().vm.$emit('primary');
- await nextTick();
-
// set the availability status
findAvailabilityCheckbox().vm.$emit('input', true);
// set the currentClearStatusAfter to 30 minutes
- wrapper.find('[data-testid="thirtyMinutes"]').trigger('click');
+ await wrapper.find('[data-testid="thirtyMinutes"]').trigger('click');
findModal().vm.$emit('primary');
await nextTick();
- const commonParams = {
+ expect(UserApi.updateUserStatus).toHaveBeenCalledWith({
+ availability: AVAILABILITY_STATUS.BUSY,
+ clearStatusAfter: '30_minutes',
emoji: defaultEmoji,
message: defaultMessage,
- };
-
- expect(UserApi.updateUserStatus).toHaveBeenCalledTimes(2);
- expect(UserApi.updateUserStatus).toHaveBeenNthCalledWith(1, {
- availability: AVAILABILITY_STATUS.NOT_SET,
- clearStatusAfter: null,
- ...commonParams,
});
- expect(UserApi.updateUserStatus).toHaveBeenNthCalledWith(2, {
- availability: AVAILABILITY_STATUS.BUSY,
- clearStatusAfter: '30_minutes',
- ...commonParams,
+ });
+
+ describe('when `Clear status after` field has not been set', () => {
+ it('does not include `clearStatusAfter` in API request', async () => {
+ findModal().vm.$emit('primary');
+ await nextTick();
+
+ expect(UserApi.updateUserStatus).toHaveBeenCalledWith({
+ availability: AVAILABILITY_STATUS.NOT_SET,
+ emoji: defaultEmoji,
+ message: defaultMessage,
+ });
});
});
diff --git a/spec/frontend/set_status_modal/user_profile_set_status_wrapper_spec.js b/spec/frontend/set_status_modal/user_profile_set_status_wrapper_spec.js
index eaee0e77311..a4a2a86dc73 100644
--- a/spec/frontend/set_status_modal/user_profile_set_status_wrapper_spec.js
+++ b/spec/frontend/set_status_modal/user_profile_set_status_wrapper_spec.js
@@ -1,8 +1,6 @@
import { nextTick } from 'vue';
import { cloneDeep } from 'lodash';
import { mountExtended } from 'helpers/vue_test_utils_helper';
-import { resetHTMLFixture } from 'helpers/fixtures';
-import { useFakeDate } from 'helpers/fake_date';
import UserProfileSetStatusWrapper from '~/set_status_modal/user_profile_set_status_wrapper.vue';
import SetStatusForm from '~/set_status_modal/set_status_form.vue';
import { TIME_RANGES_WITH_NEVER, NEVER_TIME_RANGE } from '~/set_status_modal/constants';
@@ -51,7 +49,7 @@ describe('UserProfileSetStatusWrapper', () => {
emoji: defaultProvide.fields.emoji.value,
message: defaultProvide.fields.message.value,
availability: true,
- clearStatusAfter: NEVER_TIME_RANGE,
+ clearStatusAfter: null,
currentClearStatusAfter: defaultProvide.fields.clearStatusAfter.value,
});
});
@@ -69,27 +67,41 @@ describe('UserProfileSetStatusWrapper', () => {
);
});
- describe('when clear status after dropdown is set to `Never`', () => {
- it('renders hidden clear status after input with value unset', () => {
- createComponent();
+ describe('when clear status after has previously been set', () => {
+ describe('when clear status after dropdown is not set', () => {
+ it('does not render hidden clear status after input', () => {
+ createComponent();
- expect(
- findInput(defaultProvide.fields.clearStatusAfter.name).attributes('value'),
- ).toBeUndefined();
+ expect(findInput(defaultProvide.fields.clearStatusAfter.name).exists()).toBe(false);
+ });
});
- });
- describe('when clear status after dropdown has a value selected', () => {
- it('renders hidden clear status after input with value set', async () => {
- createComponent();
+ describe('when clear status after dropdown is set to `Never`', () => {
+ it('renders hidden clear status after input with value unset', async () => {
+ createComponent();
- findSetStatusForm().vm.$emit('clear-status-after-click', TIME_RANGES_WITH_NEVER[1]);
+ findSetStatusForm().vm.$emit('clear-status-after-click', NEVER_TIME_RANGE);
- await nextTick();
+ await nextTick();
- expect(findInput(defaultProvide.fields.clearStatusAfter.name).attributes('value')).toBe(
- TIME_RANGES_WITH_NEVER[1].shortcut,
- );
+ expect(
+ findInput(defaultProvide.fields.clearStatusAfter.name).attributes('value'),
+ ).toBeUndefined();
+ });
+ });
+
+ describe('when clear status after dropdown is set to a time range', () => {
+ it('renders hidden clear status after input with value set', async () => {
+ createComponent();
+
+ findSetStatusForm().vm.$emit('clear-status-after-click', TIME_RANGES_WITH_NEVER[1]);
+
+ await nextTick();
+
+ expect(findInput(defaultProvide.fields.clearStatusAfter.name).attributes('value')).toBe(
+ TIME_RANGES_WITH_NEVER[1].shortcut,
+ );
+ });
});
});
@@ -120,37 +132,4 @@ describe('UserProfileSetStatusWrapper', () => {
expect(findInput(defaultProvide.fields.message.name).attributes('value')).toBe(newMessage);
});
});
-
- describe('when form is successfully submitted', () => {
- // 2022-09-02 00:00:00 UTC
- useFakeDate(2022, 8, 2);
-
- const form = document.createElement('form');
- form.classList.add('js-edit-user');
-
- beforeEach(async () => {
- document.body.appendChild(form);
- createComponent();
-
- const oneDay = TIME_RANGES_WITH_NEVER[4];
-
- findSetStatusForm().vm.$emit('clear-status-after-click', oneDay);
-
- await nextTick();
-
- form.dispatchEvent(new Event('ajax:success'));
- });
-
- afterEach(() => {
- resetHTMLFixture();
- });
-
- it('updates clear status after dropdown to `Never`', () => {
- expect(findSetStatusForm().props('clearStatusAfter')).toBe(NEVER_TIME_RANGE);
- });
-
- it('updates `currentClearStatusAfter` prop', () => {
- expect(findSetStatusForm().props('currentClearStatusAfter')).toBe('2022-09-03 00:00:00 UTC');
- });
- });
});
diff --git a/spec/frontend/set_status_modal/utils_spec.js b/spec/frontend/set_status_modal/utils_spec.js
index 1e918b75a98..a1c899be900 100644
--- a/spec/frontend/set_status_modal/utils_spec.js
+++ b/spec/frontend/set_status_modal/utils_spec.js
@@ -1,5 +1,8 @@
-import { isUserBusy } from '~/set_status_modal/utils';
-import { AVAILABILITY_STATUS } from '~/set_status_modal/constants';
+import { isUserBusy, computedClearStatusAfterValue } from '~/set_status_modal/utils';
+import { AVAILABILITY_STATUS, NEVER_TIME_RANGE } from '~/set_status_modal/constants';
+import { timeRanges } from '~/vue_shared/constants';
+
+const [thirtyMinutes] = timeRanges;
describe('Set status modal utils', () => {
describe('isUserBusy', () => {
@@ -13,4 +16,15 @@ describe('Set status modal utils', () => {
expect(isUserBusy(value)).toBe(result);
});
});
+
+ describe('computedClearStatusAfterValue', () => {
+ it.each`
+ value | expected
+ ${null} | ${null}
+ ${NEVER_TIME_RANGE} | ${null}
+ ${thirtyMinutes} | ${thirtyMinutes.shortcut}
+ `('with $value returns $expected', ({ value, expected }) => {
+ expect(computedClearStatusAfterValue(value)).toBe(expected);
+ });
+ });
});
diff --git a/spec/frontend/sidebar/components/assignees/assignees_spec.js b/spec/frontend/sidebar/components/assignees/assignees_spec.js
index 6971ae2f9ed..d422292ed9e 100644
--- a/spec/frontend/sidebar/components/assignees/assignees_spec.js
+++ b/spec/frontend/sidebar/components/assignees/assignees_spec.js
@@ -1,10 +1,10 @@
import { GlIcon } from '@gitlab/ui';
import { mount } from '@vue/test-utils';
-import { nextTick } from 'vue';
import { trimText } from 'helpers/text_helper';
import UsersMockHelper from 'helpers/user_mock_data_helper';
import Assignee from '~/sidebar/components/assignees/assignees.vue';
import AssigneeAvatarLink from '~/sidebar/components/assignees/assignee_avatar_link.vue';
+import CollapsedAssigneeList from '~/sidebar/components/assignees/collapsed_assignee_list.vue';
import UsersMock from '../../mock_data';
describe('Assignee component', () => {
@@ -66,10 +66,8 @@ describe('Assignee component', () => {
editable: true,
});
- jest.spyOn(wrapper.vm, '$emit');
- wrapper.find('[data-testid="assign-yourself"]').trigger('click');
+ await wrapper.find('[data-testid="assign-yourself"]').trigger('click');
- await nextTick();
expect(wrapper.emitted('assign-self')).toHaveLength(1);
});
});
@@ -166,7 +164,11 @@ describe('Assignee component', () => {
editable: true,
});
- expect(wrapper.vm.sortedAssigness[0].can_merge).toBe(true);
+ expect(wrapper.findComponent(CollapsedAssigneeList).props('users')[0]).toEqual(
+ expect.objectContaining({
+ can_merge: true,
+ }),
+ );
});
it('passes the sorted assignees to the uncollapsed-assignee-list', () => {
diff --git a/spec/frontend/sidebar/components/copy/sidebar_reference_widget_spec.js b/spec/frontend/sidebar/components/copy/sidebar_reference_widget_spec.js
index c5161a748a9..40f14d581dc 100644
--- a/spec/frontend/sidebar/components/copy/sidebar_reference_widget_spec.js
+++ b/spec/frontend/sidebar/components/copy/sidebar_reference_widget_spec.js
@@ -66,7 +66,7 @@ describe('Sidebar Reference Widget', () => {
});
describe('when error occurs', () => {
- it('calls createFlash with correct parameters', async () => {
+ it(`emits 'fetch-error' event with correct parameters`, async () => {
const mockError = new Error('mayday');
createComponent({
diff --git a/spec/frontend/super_sidebar/components/counter_spec.js b/spec/frontend/super_sidebar/components/counter_spec.js
new file mode 100644
index 00000000000..1150b0a3aa8
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/counter_spec.js
@@ -0,0 +1,56 @@
+import { GlIcon } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { __ } from '~/locale';
+import Counter from '~/super_sidebar/components/counter.vue';
+
+describe('Counter component', () => {
+ let wrapper;
+
+ const defaultPropsData = {
+ count: 3,
+ href: '',
+ icon: 'issues',
+ label: __('Issues'),
+ };
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const findButton = () => wrapper.find('button');
+ const findIcon = () => wrapper.getComponent(GlIcon);
+ const findLink = () => wrapper.find('a');
+
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMountExtended(Counter, {
+ propsData: {
+ ...defaultPropsData,
+ ...props,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ describe('default', () => {
+ it('renders icon', () => {
+ expect(findIcon().props('name')).toBe('issues');
+ });
+
+ it('renders button', () => {
+ expect(findButton().attributes('aria-label')).toBe('Issues 3');
+ expect(findLink().exists()).toBe(false);
+ });
+ });
+
+ describe('link', () => {
+ it('renders link', () => {
+ createWrapper({ href: '/dashboard/todos' });
+ expect(findLink().attributes('aria-label')).toBe('Issues 3');
+ expect(findLink().attributes('href')).toBe('/dashboard/todos');
+ expect(findButton().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/super_sidebar_spec.js b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
new file mode 100644
index 00000000000..d7d2f67dc8a
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/super_sidebar_spec.js
@@ -0,0 +1,33 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import SuperSidebar from '~/super_sidebar/components/super_sidebar.vue';
+import UserBar from '~/super_sidebar/components/user_bar.vue';
+import { sidebarData } from '../mock_data';
+
+describe('SuperSidebar component', () => {
+ let wrapper;
+
+ const findUserBar = () => wrapper.findComponent(UserBar);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMountExtended(SuperSidebar, {
+ propsData: {
+ sidebarData,
+ ...props,
+ },
+ });
+ };
+
+ describe('default', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('renders UserBar with sidebarData', () => {
+ expect(findUserBar().props('sidebarData')).toBe(sidebarData);
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/components/user_bar_spec.js b/spec/frontend/super_sidebar/components/user_bar_spec.js
new file mode 100644
index 00000000000..6d0186a2749
--- /dev/null
+++ b/spec/frontend/super_sidebar/components/user_bar_spec.js
@@ -0,0 +1,46 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { __ } from '~/locale';
+import Counter from '~/super_sidebar/components/counter.vue';
+import UserBar from '~/super_sidebar/components/user_bar.vue';
+import { sidebarData } from '../mock_data';
+
+describe('UserBar component', () => {
+ let wrapper;
+
+ const findCounter = (at) => wrapper.findAllComponents(Counter).at(at);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ const createWrapper = (props = {}) => {
+ wrapper = shallowMountExtended(UserBar, {
+ propsData: {
+ sidebarData,
+ ...props,
+ },
+ provide: {
+ rootPath: '/',
+ toggleNewNavEndpoint: '/-/profile/preferences',
+ },
+ });
+ };
+
+ describe('default', () => {
+ beforeEach(() => {
+ createWrapper();
+ });
+
+ it('renders issues counter', () => {
+ expect(findCounter(0).props('count')).toBe(sidebarData.assigned_open_issues_count);
+ expect(findCounter(0).props('href')).toBe(sidebarData.issues_dashboard_path);
+ expect(findCounter(0).props('label')).toBe(__('Issues'));
+ });
+
+ it('renders todos counter', () => {
+ expect(findCounter(2).props('count')).toBe(sidebarData.todos_pending_count);
+ expect(findCounter(2).props('href')).toBe('/dashboard/todos');
+ expect(findCounter(2).props('label')).toBe(__('To-Do list'));
+ });
+ });
+});
diff --git a/spec/frontend/super_sidebar/mock_data.js b/spec/frontend/super_sidebar/mock_data.js
new file mode 100644
index 00000000000..7db0d0ea5cc
--- /dev/null
+++ b/spec/frontend/super_sidebar/mock_data.js
@@ -0,0 +1,9 @@
+export const sidebarData = {
+ name: 'Administrator',
+ username: 'root',
+ avatar_url: 'path/to/img_administrator',
+ assigned_open_issues_count: 1,
+ assigned_open_merge_requests_count: 2,
+ todos_pending_count: 3,
+ issues_dashboard_path: 'path/to/issues',
+};
diff --git a/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
new file mode 100644
index 00000000000..3379af3f41c
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/components/project_storage_app_spec.js
@@ -0,0 +1,150 @@
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import ProjectStorageApp from '~/usage_quotas/storage/components/project_storage_app.vue';
+import UsageGraph from '~/usage_quotas/storage/components/usage_graph.vue';
+import { TOTAL_USAGE_DEFAULT_TEXT } from '~/usage_quotas/storage/constants';
+import getProjectStorageStatistics from '~/usage_quotas/storage/queries/project_storage.query.graphql';
+import {
+ projectData,
+ mockGetProjectStorageStatisticsGraphQLResponse,
+ mockEmptyResponse,
+ defaultProjectProvideValues,
+} from '../mock_data';
+
+Vue.use(VueApollo);
+
+describe('ProjectStorageApp', () => {
+ let wrapper;
+
+ const createMockApolloProvider = ({ reject = false, mockedValue } = {}) => {
+ let response;
+
+ if (reject) {
+ response = jest.fn().mockRejectedValue(mockedValue || new Error('GraphQL error'));
+ } else {
+ response = jest.fn().mockResolvedValue(mockedValue);
+ }
+
+ const requestHandlers = [[getProjectStorageStatistics, response]];
+
+ return createMockApollo(requestHandlers);
+ };
+
+ const createComponent = ({ provide = {}, mockApollo } = {}) => {
+ wrapper = extendedWrapper(
+ shallowMount(ProjectStorageApp, {
+ apolloProvider: mockApollo,
+ provide: {
+ ...defaultProjectProvideValues,
+ ...provide,
+ },
+ }),
+ );
+ };
+
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findUsagePercentage = () => wrapper.findByTestId('total-usage');
+ const findUsageQuotasHelpLink = () => wrapper.findByTestId('usage-quotas-help-link');
+ const findUsageGraph = () => wrapper.findComponent(UsageGraph);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('with apollo fetching successful', () => {
+ let mockApollo;
+
+ beforeEach(async () => {
+ mockApollo = createMockApolloProvider({
+ mockedValue: mockGetProjectStorageStatisticsGraphQLResponse,
+ });
+ createComponent({ mockApollo });
+ await waitForPromises();
+ });
+
+ it('renders correct total usage', () => {
+ expect(findUsagePercentage().text()).toBe(projectData.storage.totalUsage);
+ });
+
+ it('renders correct usage quotas help link', () => {
+ expect(findUsageQuotasHelpLink().attributes('href')).toBe(
+ defaultProjectProvideValues.helpLinks.usageQuotas,
+ );
+ });
+ });
+
+ describe('with apollo loading', () => {
+ let mockApollo;
+
+ beforeEach(() => {
+ mockApollo = createMockApolloProvider({
+ mockedValue: new Promise(() => {}),
+ });
+ createComponent({ mockApollo });
+ });
+
+ it('should show loading icon', () => {
+ expect(findLoadingIcon().exists()).toBe(true);
+ });
+ });
+
+ describe('with apollo returning empty data', () => {
+ let mockApollo;
+
+ beforeEach(async () => {
+ mockApollo = createMockApolloProvider({
+ mockedValue: mockEmptyResponse,
+ });
+ createComponent({ mockApollo });
+ await waitForPromises();
+ });
+
+ it('shows default text for total usage', () => {
+ expect(findUsagePercentage().text()).toBe(TOTAL_USAGE_DEFAULT_TEXT);
+ });
+ });
+
+ describe('with apollo fetching error', () => {
+ let mockApollo;
+
+ beforeEach(async () => {
+ mockApollo = createMockApolloProvider();
+ createComponent({ mockApollo, reject: true });
+ await waitForPromises();
+ });
+
+ it('renders gl-alert', () => {
+ expect(findAlert().exists()).toBe(true);
+ });
+ });
+
+ describe('rendering <usage-graph />', () => {
+ let mockApollo;
+
+ beforeEach(async () => {
+ mockApollo = createMockApolloProvider({
+ mockedValue: mockGetProjectStorageStatisticsGraphQLResponse,
+ });
+ createComponent({ mockApollo });
+ await waitForPromises();
+ });
+
+ it('renders usage-graph component if project.statistics exists', () => {
+ expect(findUsageGraph().exists()).toBe(true);
+ });
+
+ it('passes project.statistics to usage-graph component', () => {
+ const {
+ __typename,
+ ...statistics
+ } = mockGetProjectStorageStatisticsGraphQLResponse.data.project.statistics;
+ expect(findUsageGraph().props('rootStorageStatistics')).toMatchObject(statistics);
+ });
+ });
+});
diff --git a/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js b/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
new file mode 100644
index 00000000000..ce489f69cad
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/components/project_storage_detail_spec.js
@@ -0,0 +1,129 @@
+import { GlTableLite, GlPopover } from '@gitlab/ui';
+import { mount } from '@vue/test-utils';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import ProjectStorageDetail from '~/usage_quotas/storage/components/project_storage_detail.vue';
+import {
+ containerRegistryPopoverId,
+ containerRegistryId,
+ uploadsPopoverId,
+ uploadsId,
+} from '~/usage_quotas/storage/constants';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
+import { projectData, projectHelpLinks } from '../mock_data';
+
+describe('ProjectStorageDetail', () => {
+ let wrapper;
+
+ const { storageTypes } = projectData.storage;
+ const defaultProps = { storageTypes };
+
+ const createComponent = (props = {}) => {
+ wrapper = extendedWrapper(
+ mount(ProjectStorageDetail, {
+ propsData: {
+ ...defaultProps,
+ ...props,
+ },
+ provide: {
+ containerRegistryPopoverContent: 'Sample popover message',
+ },
+ }),
+ );
+ };
+
+ const generateStorageType = (id = 'buildArtifactsSize') => {
+ return {
+ storageType: {
+ id,
+ name: 'Test Name',
+ description: 'Test Description',
+ helpPath: '/test-type',
+ },
+ value: 400000,
+ };
+ };
+
+ const findTable = () => wrapper.findComponent(GlTableLite);
+ const findPopoverById = (id) =>
+ wrapper.findAllComponents(GlPopover).filter((p) => p.attributes('data-testid') === id);
+ const findContainerRegistryPopover = () => findPopoverById(containerRegistryPopoverId);
+ const findUploadsPopover = () => findPopoverById(uploadsPopoverId);
+ const findContainerRegistryWarningIcon = () => wrapper.find(`#${containerRegistryPopoverId}`);
+ const findUploadsWarningIcon = () => wrapper.find(`#${uploadsPopoverId}`);
+
+ beforeEach(() => {
+ createComponent();
+ });
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('with storage types', () => {
+ it.each(storageTypes)(
+ 'renders table row correctly %o',
+ ({ storageType: { id, name, description } }) => {
+ expect(wrapper.findByTestId(`${id}-name`).text()).toBe(name);
+ expect(wrapper.findByTestId(`${id}-description`).text()).toBe(description);
+ expect(wrapper.findByTestId(`${id}-icon`).props('name')).toBe(id);
+ expect(wrapper.findByTestId(`${id}-help-link`).attributes('href')).toBe(
+ projectHelpLinks[id.replace(`Size`, ``)],
+ );
+ },
+ );
+
+ it('should render items in order from the biggest usage size to the smallest', () => {
+ const rows = findTable().find('tbody').findAll('tr');
+ // Cloning array not to mutate the source
+ const sortedStorageTypes = [...storageTypes].sort((a, b) => b.value - a.value);
+
+ sortedStorageTypes.forEach((storageType, i) => {
+ const rowUsageAmount = rows.wrappers[i].find('td:last-child').text();
+ const expectedUsageAmount = numberToHumanSize(storageType.value, 1);
+ expect(rowUsageAmount).toBe(expectedUsageAmount);
+ });
+ });
+ });
+
+ describe('without storage types', () => {
+ beforeEach(() => {
+ createComponent({ storageTypes: [] });
+ });
+
+ it('should render the table header <th>', () => {
+ expect(findTable().find('th').exists()).toBe(true);
+ });
+
+ it('should not render any table data <td>', () => {
+ expect(findTable().find('td').exists()).toBe(false);
+ });
+ });
+
+ describe.each`
+ description | mockStorageTypes | rendersContainerRegistryPopover | rendersUploadsPopover
+ ${'without any storage type that has popover'} | ${[generateStorageType()]} | ${false} | ${false}
+ ${'with container registry storage type'} | ${[generateStorageType(containerRegistryId)]} | ${true} | ${false}
+ ${'with uploads storage type'} | ${[generateStorageType(uploadsId)]} | ${false} | ${true}
+ ${'with container registry and uploads storage types'} | ${[generateStorageType(containerRegistryId), generateStorageType(uploadsId)]} | ${true} | ${true}
+ `(
+ '$description',
+ ({ mockStorageTypes, rendersContainerRegistryPopover, rendersUploadsPopover }) => {
+ beforeEach(() => {
+ createComponent({ storageTypes: mockStorageTypes });
+ });
+
+ it(`does ${
+ rendersContainerRegistryPopover ? '' : ' not'
+ } render container registry warning icon and popover`, () => {
+ expect(findContainerRegistryWarningIcon().exists()).toBe(rendersContainerRegistryPopover);
+ expect(findContainerRegistryPopover().exists()).toBe(rendersContainerRegistryPopover);
+ });
+
+ it(`does ${
+ rendersUploadsPopover ? '' : ' not'
+ } render container registry warning icon and popover`, () => {
+ expect(findUploadsWarningIcon().exists()).toBe(rendersUploadsPopover);
+ expect(findUploadsPopover().exists()).toBe(rendersUploadsPopover);
+ });
+ },
+ );
+});
diff --git a/spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js b/spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js
new file mode 100644
index 00000000000..1eb3386bfb8
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/components/storage_type_icon_spec.js
@@ -0,0 +1,41 @@
+import { mount } from '@vue/test-utils';
+import { GlIcon } from '@gitlab/ui';
+import StorageTypeIcon from '~/usage_quotas/storage/components/storage_type_icon.vue';
+
+describe('StorageTypeIcon', () => {
+ let wrapper;
+
+ const createComponent = (props = {}) => {
+ wrapper = mount(StorageTypeIcon, {
+ propsData: {
+ ...props,
+ },
+ });
+ };
+
+ const findGlIcon = () => wrapper.findComponent(GlIcon);
+
+ describe('rendering icon', () => {
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it.each`
+ expected | provided
+ ${'doc-image'} | ${'lfsObjectsSize'}
+ ${'snippet'} | ${'snippetsSize'}
+ ${'infrastructure-registry'} | ${'repositorySize'}
+ ${'package'} | ${'packagesSize'}
+ ${'upload'} | ${'uploadsSize'}
+ ${'disk'} | ${'wikiSize'}
+ ${'disk'} | ${'anything-else'}
+ `(
+ 'renders icon with name of $expected when name prop is $provided',
+ ({ expected, provided }) => {
+ createComponent({ name: provided });
+
+ expect(findGlIcon().props('name')).toBe(expected);
+ },
+ );
+ });
+});
diff --git a/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js b/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js
new file mode 100644
index 00000000000..75b970d937a
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/components/usage_graph_spec.js
@@ -0,0 +1,144 @@
+import { shallowMount } from '@vue/test-utils';
+import { numberToHumanSize } from '~/lib/utils/number_utils';
+import UsageGraph from '~/usage_quotas/storage/components/usage_graph.vue';
+
+let data;
+let wrapper;
+
+function mountComponent({ rootStorageStatistics, limit }) {
+ wrapper = shallowMount(UsageGraph, {
+ propsData: {
+ rootStorageStatistics,
+ limit,
+ },
+ });
+}
+function findStorageTypeUsagesSerialized() {
+ return wrapper
+ .findAll('[data-testid="storage-type-usage"]')
+ .wrappers.map((wp) => wp.element.style.flex);
+}
+
+describe('Storage Counter usage graph component', () => {
+ beforeEach(() => {
+ data = {
+ rootStorageStatistics: {
+ wikiSize: 5000,
+ repositorySize: 4000,
+ packagesSize: 3000,
+ containerRegistrySize: 2500,
+ lfsObjectsSize: 2000,
+ buildArtifactsSize: 500,
+ pipelineArtifactsSize: 500,
+ snippetsSize: 2000,
+ storageSize: 17000,
+ uploadsSize: 1000,
+ },
+ limit: 2000,
+ };
+ mountComponent(data);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ it('renders the legend in order', () => {
+ const types = wrapper.findAll('[data-testid="storage-type-legend"]');
+
+ const {
+ buildArtifactsSize,
+ pipelineArtifactsSize,
+ lfsObjectsSize,
+ packagesSize,
+ containerRegistrySize,
+ repositorySize,
+ wikiSize,
+ snippetsSize,
+ uploadsSize,
+ } = data.rootStorageStatistics;
+
+ expect(types.at(0).text()).toMatchInterpolatedText(`Wiki ${numberToHumanSize(wikiSize)}`);
+ expect(types.at(1).text()).toMatchInterpolatedText(
+ `Repository ${numberToHumanSize(repositorySize)}`,
+ );
+ expect(types.at(2).text()).toMatchInterpolatedText(
+ `Packages ${numberToHumanSize(packagesSize)}`,
+ );
+ expect(types.at(3).text()).toMatchInterpolatedText(
+ `Container Registry ${numberToHumanSize(containerRegistrySize)}`,
+ );
+ expect(types.at(4).text()).toMatchInterpolatedText(
+ `LFS storage ${numberToHumanSize(lfsObjectsSize)}`,
+ );
+ expect(types.at(5).text()).toMatchInterpolatedText(
+ `Snippets ${numberToHumanSize(snippetsSize)}`,
+ );
+ expect(types.at(6).text()).toMatchInterpolatedText(
+ `Artifacts ${numberToHumanSize(buildArtifactsSize + pipelineArtifactsSize)}`,
+ );
+ expect(types.at(7).text()).toMatchInterpolatedText(`Uploads ${numberToHumanSize(uploadsSize)}`);
+ });
+
+ describe('when storage type is not used', () => {
+ beforeEach(() => {
+ data.rootStorageStatistics.wikiSize = 0;
+ mountComponent(data);
+ });
+
+ it('filters the storage type', () => {
+ expect(wrapper.text()).not.toContain('Wikis');
+ });
+ });
+
+ describe('when there is no storage usage', () => {
+ beforeEach(() => {
+ data.rootStorageStatistics.storageSize = 0;
+ mountComponent(data);
+ });
+
+ it('does not render', () => {
+ expect(wrapper.html()).toEqual('');
+ });
+ });
+
+ describe('when limit is 0', () => {
+ beforeEach(() => {
+ data.limit = 0;
+ mountComponent(data);
+ });
+
+ it('sets correct flex values', () => {
+ expect(findStorageTypeUsagesSerialized()).toStrictEqual([
+ '0.29411764705882354',
+ '0.23529411764705882',
+ '0.17647058823529413',
+ '0.14705882352941177',
+ '0.11764705882352941',
+ '0.11764705882352941',
+ '0.058823529411764705',
+ '0.058823529411764705',
+ ]);
+ });
+ });
+
+ describe('when storage exceeds limit', () => {
+ beforeEach(() => {
+ data.limit = data.rootStorageStatistics.storageSize - 1;
+ mountComponent(data);
+ });
+
+ it('does render correclty', () => {
+ expect(findStorageTypeUsagesSerialized()).toStrictEqual([
+ '0.29411764705882354',
+ '0.23529411764705882',
+ '0.17647058823529413',
+ '0.14705882352941177',
+ '0.11764705882352941',
+ '0.11764705882352941',
+ '0.058823529411764705',
+ '0.058823529411764705',
+ ]);
+ });
+ });
+});
diff --git a/spec/frontend/usage_quotas/storage/mock_data.js b/spec/frontend/usage_quotas/storage/mock_data.js
new file mode 100644
index 00000000000..b1c6be10d80
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/mock_data.js
@@ -0,0 +1,101 @@
+import mockGetProjectStorageStatisticsGraphQLResponse from 'test_fixtures/graphql/usage_quotas/storage/project_storage.query.graphql.json';
+
+export { mockGetProjectStorageStatisticsGraphQLResponse };
+export const mockEmptyResponse = { data: { project: null } };
+
+export const projectData = {
+ storage: {
+ totalUsage: '13.8 MiB',
+ storageTypes: [
+ {
+ storageType: {
+ id: 'containerRegistrySize',
+ name: 'Container Registry',
+ description: 'Gitlab-integrated Docker Container Registry for storing Docker Images.',
+ helpPath: '/container_registry',
+ },
+ value: 3_900_000,
+ },
+ {
+ storageType: {
+ id: 'buildArtifactsSize',
+ name: 'Artifacts',
+ description: 'Pipeline artifacts and job artifacts, created with CI/CD.',
+ helpPath: '/build-artifacts',
+ },
+ value: 400000,
+ },
+ {
+ storageType: {
+ id: 'lfsObjectsSize',
+ name: 'LFS storage',
+ description: 'Audio samples, videos, datasets, and graphics.',
+ helpPath: '/lsf-objects',
+ },
+ value: 4800000,
+ },
+ {
+ storageType: {
+ id: 'packagesSize',
+ name: 'Packages',
+ description: 'Code packages and container images.',
+ helpPath: '/packages',
+ },
+ value: 3800000,
+ },
+ {
+ storageType: {
+ id: 'repositorySize',
+ name: 'Repository',
+ description: 'Git repository.',
+ helpPath: '/repository',
+ },
+ value: 3900000,
+ },
+ {
+ storageType: {
+ id: 'snippetsSize',
+ name: 'Snippets',
+ description: 'Shared bits of code and text.',
+ helpPath: '/snippets',
+ },
+ value: 0,
+ },
+ {
+ storageType: {
+ id: 'uploadsSize',
+ name: 'Uploads',
+ description: 'File attachments and smaller design graphics.',
+ helpPath: '/uploads',
+ },
+ value: 900000,
+ },
+ {
+ storageType: {
+ id: 'wikiSize',
+ name: 'Wiki',
+ description: 'Wiki content.',
+ helpPath: '/wiki',
+ },
+ value: 300000,
+ },
+ ],
+ },
+};
+
+export const projectHelpLinks = {
+ containerRegistry: '/container_registry',
+ usageQuotas: '/usage-quotas',
+ buildArtifacts: '/build-artifacts',
+ lfsObjects: '/lsf-objects',
+ packages: '/packages',
+ repository: '/repository',
+ snippets: '/snippets',
+ uploads: '/uploads',
+ wiki: '/wiki',
+};
+
+export const defaultProjectProvideValues = {
+ projectPath: '/project-path',
+ helpLinks: projectHelpLinks,
+};
diff --git a/spec/frontend/usage_quotas/storage/utils_spec.js b/spec/frontend/usage_quotas/storage/utils_spec.js
new file mode 100644
index 00000000000..8fdd307c008
--- /dev/null
+++ b/spec/frontend/usage_quotas/storage/utils_spec.js
@@ -0,0 +1,88 @@
+import cloneDeep from 'lodash/cloneDeep';
+import { PROJECT_STORAGE_TYPES } from '~/usage_quotas/storage/constants';
+import {
+ parseGetProjectStorageResults,
+ getStorageTypesFromProjectStatistics,
+ descendingStorageUsageSort,
+} from '~/usage_quotas/storage/utils';
+import {
+ mockGetProjectStorageStatisticsGraphQLResponse,
+ defaultProjectProvideValues,
+ projectData,
+} from './mock_data';
+
+describe('getStorageTypesFromProjectStatistics', () => {
+ const projectStatistics = mockGetProjectStorageStatisticsGraphQLResponse.data.project.statistics;
+
+ describe('matches project statistics value with matching storage type', () => {
+ const typesWithStats = getStorageTypesFromProjectStatistics(projectStatistics);
+
+ it.each(PROJECT_STORAGE_TYPES)('storage type: $id', ({ id }) => {
+ expect(typesWithStats).toContainEqual({
+ storageType: expect.objectContaining({
+ id,
+ }),
+ value: projectStatistics[id],
+ });
+ });
+ });
+
+ it('adds helpPath to a relevant type', () => {
+ const trimTypeId = (id) => id.replace('Size', '');
+ const helpLinks = PROJECT_STORAGE_TYPES.reduce((acc, { id }) => {
+ const key = trimTypeId(id);
+ return {
+ ...acc,
+ [key]: `url://${id}`,
+ };
+ }, {});
+
+ const typesWithStats = getStorageTypesFromProjectStatistics(projectStatistics, helpLinks);
+
+ typesWithStats.forEach((type) => {
+ const key = trimTypeId(type.storageType.id);
+ expect(type.storageType.helpPath).toBe(helpLinks[key]);
+ });
+ });
+});
+describe('parseGetProjectStorageResults', () => {
+ it('parses project statistics correctly', () => {
+ expect(
+ parseGetProjectStorageResults(
+ mockGetProjectStorageStatisticsGraphQLResponse.data,
+ defaultProjectProvideValues.helpLinks,
+ ),
+ ).toMatchObject(projectData);
+ });
+
+ it('includes storage type with size of 0 in returned value', () => {
+ const mockedResponse = cloneDeep(mockGetProjectStorageStatisticsGraphQLResponse.data);
+ // ensuring a specific storage type item has size of 0
+ mockedResponse.project.statistics.repositorySize = 0;
+
+ const response = parseGetProjectStorageResults(
+ mockedResponse,
+ defaultProjectProvideValues.helpLinks,
+ );
+
+ expect(response.storage.storageTypes).toEqual(
+ expect.arrayContaining([
+ {
+ storageType: expect.any(Object),
+ value: 0,
+ },
+ ]),
+ );
+ });
+});
+
+describe('descendingStorageUsageSort', () => {
+ it('sorts items by a given key in descending order', () => {
+ const items = [{ k: 1 }, { k: 3 }, { k: 2 }];
+
+ const sorted = [...items].sort(descendingStorageUsageSort('k'));
+
+ const expectedSorted = [{ k: 3 }, { k: 2 }, { k: 1 }];
+ expect(sorted).toEqual(expectedSorted);
+ });
+});
diff --git a/spec/frontend/users/profile/components/report_abuse_button_spec.js b/spec/frontend/users/profile/components/report_abuse_button_spec.js
new file mode 100644
index 00000000000..7ad28566f49
--- /dev/null
+++ b/spec/frontend/users/profile/components/report_abuse_button_spec.js
@@ -0,0 +1,79 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { BV_HIDE_TOOLTIP } from '~/lib/utils/constants';
+
+import ReportAbuseButton from '~/users/profile/components/report_abuse_button.vue';
+import AbuseCategorySelector from '~/abuse_reports/components/abuse_category_selector.vue';
+
+describe('ReportAbuseButton', () => {
+ let wrapper;
+
+ const ACTION_PATH = '/abuse_reports/add_category';
+ const USER_ID = '1';
+ const REPORTED_FROM_URL = 'http://example.com';
+
+ const createComponent = (props) => {
+ wrapper = shallowMountExtended(ReportAbuseButton, {
+ propsData: {
+ ...props,
+ },
+ provide: {
+ reportAbusePath: ACTION_PATH,
+ reportedUserId: USER_ID,
+ reportedFromUrl: REPORTED_FROM_URL,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ const findReportAbuseButton = () => wrapper.findComponent(GlButton);
+ const findAbuseCategorySelector = () => wrapper.findComponent(AbuseCategorySelector);
+
+ it('renders report abuse button', () => {
+ expect(findReportAbuseButton().exists()).toBe(true);
+
+ expect(findReportAbuseButton().props()).toMatchObject({
+ category: 'primary',
+ icon: 'error',
+ });
+
+ expect(findReportAbuseButton().attributes('aria-label')).toBe(
+ wrapper.vm.$options.i18n.reportAbuse,
+ );
+ });
+
+ it('renders abuse category selector with the drawer initially closed', () => {
+ expect(findAbuseCategorySelector().exists()).toBe(true);
+
+ expect(findAbuseCategorySelector().props('showDrawer')).toBe(false);
+ });
+
+ describe('when button is clicked', () => {
+ beforeEach(async () => {
+ await findReportAbuseButton().vm.$emit('click');
+ });
+
+ it('opens the abuse category selector', () => {
+ expect(findAbuseCategorySelector().props('showDrawer')).toBe(true);
+ });
+
+ it('closes the abuse category selector', async () => {
+ await findAbuseCategorySelector().vm.$emit('close-drawer');
+
+ expect(findAbuseCategorySelector().props('showDrawer')).toBe(false);
+ });
+ });
+
+ describe('when user hovers out of the button', () => {
+ it(`should emit ${BV_HIDE_TOOLTIP} to close the tooltip`, () => {
+ jest.spyOn(wrapper.vm.$root, '$emit');
+
+ findReportAbuseButton().vm.$emit('mouseout');
+
+ expect(wrapper.vm.$root.$emit).toHaveBeenCalledWith(BV_HIDE_TOOLTIP);
+ });
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js b/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
index c253dc63f23..81f266d8070 100644
--- a/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/mr_collapsible_extension_spec.js
@@ -42,8 +42,8 @@ describe('Merge Request Collapsible Extension', () => {
expect(wrapper.find('[data-testid="collapsed-header"]').text()).toBe('hello there');
});
- it('renders chevron-lg-right icon', () => {
- expect(findIcon().props('name')).toBe('chevron-lg-right');
+ it('renders chevron-right icon', () => {
+ expect(findIcon().props('name')).toBe('chevron-right');
});
describe('onClick', () => {
@@ -60,8 +60,8 @@ describe('Merge Request Collapsible Extension', () => {
expect(findTitle().text()).toBe('Collapse');
});
- it('renders chevron-lg-down icon', () => {
- expect(findIcon().props('name')).toBe('chevron-lg-down');
+ it('renders chevron-down icon', () => {
+ expect(findIcon().props('name')).toBe('chevron-down');
});
});
});
diff --git a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap b/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
deleted file mode 100644
index 4077564486c..00000000000
--- a/spec/frontend/vue_merge_request_widget/components/states/__snapshots__/mr_widget_auto_merge_enabled_spec.js.snap
+++ /dev/null
@@ -1,163 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`MRWidgetAutoMergeEnabled template should have correct elements 1`] = `
-<div
- class="mr-widget-body media gl-display-flex gl-align-items-center"
->
- <div
- class="gl-w-6 gl-h-6 gl-display-flex gl-align-self-start gl-mr-3"
- >
- <div
- class="gl-display-flex gl-m-auto"
- >
- <div
- class="gl-mr-3 gl-p-2 gl-m-0! gl-text-blue-500 gl-w-6 gl-p-2"
- >
- <div
- class="gl-rounded-full gl-relative gl-display-flex mr-widget-extension-icon"
- >
- <div
- class="gl-absolute gl-top-half gl-left-50p gl-translate-x-n50 gl-display-flex gl-m-auto"
- >
- <div
- class="gl-display-flex gl-m-auto gl-translate-y-n50"
- >
- <svg
- aria-label="Scheduled "
- class="gl-display-block gl-icon s12"
- data-qa-selector="status_scheduled_icon"
- data-testid="status-scheduled-icon"
- role="img"
- >
- <use
- href="#status-scheduled"
- />
- </svg>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
-
- <div
- class="gl-display-flex gl-w-full"
- >
- <div
- class="media-body gl-display-flex gl-align-items-center"
- >
-
- <h4
- class="gl-mr-3"
- data-testid="statusText"
- >
- Set by to be merged automatically when the pipeline succeeds
- </h4>
-
- <div
- class="gl-display-flex gl-font-size-0 gl-ml-auto gl-gap-3"
- >
- <div
- class="gl-display-flex gl-align-items-flex-start"
- >
- <div
- class="dropdown b-dropdown gl-dropdown gl-display-block gl-md-display-none! btn-group"
- lazy=""
- no-caret=""
- title="Options"
- >
- <!---->
- <button
- aria-expanded="false"
- aria-haspopup="true"
- class="btn dropdown-toggle btn-default btn-sm gl-p-2! gl-button gl-dropdown-toggle btn-default-tertiary dropdown-icon-only dropdown-toggle-no-caret"
- type="button"
- >
- <!---->
-
- <svg
- aria-hidden="true"
- class="dropdown-icon gl-icon s16"
- data-testid="ellipsis_v-icon"
- role="img"
- >
- <use
- href="#ellipsis_v"
- />
- </svg>
-
- <span
- class="gl-dropdown-button-text gl-sr-only"
- >
-
- </span>
-
- <svg
- aria-hidden="true"
- class="gl-button-icon dropdown-chevron gl-icon s16"
- data-testid="chevron-down-icon"
- role="img"
- >
- <use
- href="#chevron-down"
- />
- </svg>
- </button>
- <ul
- class="dropdown-menu dropdown-menu-right"
- role="menu"
- tabindex="-1"
- >
- <!---->
- </ul>
- </div>
-
- <button
- class="btn gl-display-none gl-md-display-block gl-float-left btn-confirm btn-sm gl-button btn-confirm-tertiary js-cancel-auto-merge"
- data-qa-selector="cancel_auto_merge_button"
- data-testid="cancelAutomaticMergeButton"
- type="button"
- >
- <!---->
-
- <!---->
-
- <span
- class="gl-button-text"
- >
-
- Cancel auto-merge
-
- </span>
- </button>
- </div>
- </div>
- </div>
-
- <div
- class="gl-md-display-none gl-border-l-1 gl-border-l-solid gl-border-gray-100 gl-ml-3 gl-pl-3 gl-h-6 gl-mt-1"
- >
- <button
- class="btn gl-vertical-align-top btn-default btn-sm gl-button btn-default-tertiary btn-icon"
- title="Collapse merge details"
- type="button"
- >
- <!---->
-
- <svg
- aria-hidden="true"
- class="gl-button-icon gl-icon s16"
- data-testid="chevron-lg-up-icon"
- role="img"
- >
- <use
- href="#chevron-lg-up"
- />
- </svg>
-
- <!---->
- </button>
- </div>
- </div>
-</div>
-`;
diff --git a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js
index 5b9f30dfb86..fef5fee5f19 100644
--- a/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/states/mr_widget_auto_merge_enabled_spec.js
@@ -128,14 +128,6 @@ describe('MRWidgetAutoMergeEnabled', () => {
});
describe('template', () => {
- it('should have correct elements', () => {
- factory({
- ...defaultMrProps(),
- });
-
- expect(wrapper.element).toMatchSnapshot();
- });
-
it('should disable cancel auto merge button when the action is in progress', async () => {
factory({
...defaultMrProps(),
diff --git a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
index 4c93c88de16..7e941c5ceaa 100644
--- a/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
+++ b/spec/frontend/vue_merge_request_widget/components/widget/widget_spec.js
@@ -1,6 +1,6 @@
import { nextTick } from 'vue';
import * as Sentry from '@sentry/browser';
-import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import HelpPopover from '~/vue_shared/components/help_popover.vue';
import waitForPromises from 'helpers/wait_for_promises';
import StatusIcon from '~/vue_merge_request_widget/components/extensions/status_icon.vue';
@@ -26,8 +26,8 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
const findHelpPopover = () => wrapper.findComponent(HelpPopover);
const findDynamicScroller = () => wrapper.findByTestId('dynamic-content-scroller');
- const createComponent = ({ propsData, slots } = {}) => {
- wrapper = shallowMountExtended(Widget, {
+ const createComponent = ({ propsData, slots, mountFn = shallowMountExtended } = {}) => {
+ wrapper = mountFn(Widget, {
propsData: {
isCollapsible: false,
loadingText: 'Loading widget',
@@ -73,6 +73,13 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
expect(findStatusIcon().props()).toMatchObject({ iconName: 'failed', isLoading: false });
});
+ it('displays the error text when :has-error is true', () => {
+ createComponent({
+ propsData: { hasError: true, errorText: 'API error' },
+ });
+ expect(wrapper.findByText('API error').exists()).toBe(true);
+ });
+
it('displays loading icon until request is made and then displays status icon when the request is complete', async () => {
const fetchCollapsedData = jest
.fn()
@@ -425,6 +432,7 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
beforeEach(() => {
createComponent({
+ mountFn: mountExtended,
propsData: {
isCollapsible: true,
content,
@@ -437,5 +445,11 @@ describe('~/vue_merge_request_widget/components/widget/widget.vue', () => {
await waitForPromises();
expect(findDynamicScroller().props('items')).toEqual(content);
});
+
+ it('renders the dynamic content inside the dynamic scroller', async () => {
+ findToggleButton().vm.$emit('click');
+ await waitForPromises();
+ expect(wrapper.findByText('Main text for the row').exists()).toBe(true);
+ });
});
});
diff --git a/spec/frontend/vue_merge_request_widget/extensions/security_reports/mock_data.js b/spec/frontend/vue_merge_request_widget/extensions/security_reports/mock_data.js
new file mode 100644
index 00000000000..c7354483e8b
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/extensions/security_reports/mock_data.js
@@ -0,0 +1,141 @@
+export const mockArtifacts = () => ({
+ data: {
+ project: {
+ id: 'gid://gitlab/Project/9',
+ mergeRequest: {
+ id: 'gid://gitlab/MergeRequest/1',
+ headPipeline: {
+ id: 'gid://gitlab/Ci::Pipeline/1',
+ jobs: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Ci::Build/14',
+ name: 'sam_scan',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/14/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/14/artifacts/download?file_type=sast',
+ fileType: 'SAST',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ id: 'gid://gitlab/Ci::Build/11',
+ name: 'sast-spotbugs',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/11/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/11/artifacts/download?file_type=sast',
+ fileType: 'SAST',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ id: 'gid://gitlab/Ci::Build/10',
+ name: 'sast-sobelow',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/10/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ id: 'gid://gitlab/Ci::Build/9',
+ name: 'sast-pmd-apex',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/9/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ id: 'gid://gitlab/Ci::Build/8',
+ name: 'sast-eslint',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/8/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/8/artifacts/download?file_type=sast',
+ fileType: 'SAST',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ {
+ id: 'gid://gitlab/Ci::Build/7',
+ name: 'secrets',
+ artifacts: {
+ nodes: [
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/7/artifacts/download?file_type=trace',
+ fileType: 'TRACE',
+ __typename: 'CiJobArtifact',
+ },
+ {
+ downloadPath:
+ '/root/security-reports/-/jobs/7/artifacts/download?file_type=secret_detection',
+ fileType: 'SECRET_DETECTION',
+ __typename: 'CiJobArtifact',
+ },
+ ],
+ __typename: 'CiJobArtifactConnection',
+ },
+ __typename: 'CiJob',
+ },
+ ],
+ __typename: 'CiJobConnection',
+ },
+ __typename: 'Pipeline',
+ },
+ __typename: 'MergeRequest',
+ },
+ __typename: 'Project',
+ },
+ },
+});
diff --git a/spec/frontend/vue_merge_request_widget/extensions/security_reports/mr_widget_security_reports_spec.js b/spec/frontend/vue_merge_request_widget/extensions/security_reports/mr_widget_security_reports_spec.js
new file mode 100644
index 00000000000..16c2adaffaf
--- /dev/null
+++ b/spec/frontend/vue_merge_request_widget/extensions/security_reports/mr_widget_security_reports_spec.js
@@ -0,0 +1,93 @@
+import Vue from 'vue';
+import { GlDropdown } from '@gitlab/ui';
+import VueApollo from 'vue-apollo';
+import MRSecurityWidget from '~/vue_merge_request_widget/extensions/security_reports/mr_widget_security_reports.vue';
+import Widget from '~/vue_merge_request_widget/components/widget/widget.vue';
+import securityReportMergeRequestDownloadPathsQuery from '~/vue_merge_request_widget/extensions/security_reports/graphql/security_report_merge_request_download_paths.query.graphql';
+import { mountExtended } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockArtifacts } from './mock_data';
+
+Vue.use(VueApollo);
+
+describe('vue_merge_request_widget/extensions/security_reports/mr_widget_security_reports.vue', () => {
+ let wrapper;
+
+ const createComponent = ({ propsData, mockResponse = mockArtifacts() } = {}) => {
+ wrapper = mountExtended(MRSecurityWidget, {
+ apolloProvider: createMockApollo([
+ [securityReportMergeRequestDownloadPathsQuery, jest.fn().mockResolvedValue(mockResponse)],
+ ]),
+ propsData: {
+ ...propsData,
+ mr: {},
+ },
+ });
+ };
+
+ const findWidget = () => wrapper.findComponent(Widget);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItem = (name) => wrapper.findByTestId(name);
+
+ describe('with data', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('displays the correct message', () => {
+ expect(wrapper.findByText('Security scans have run').exists()).toBe(true);
+ });
+
+ it('displays the help popover', () => {
+ expect(findWidget().props('helpPopover')).toEqual({
+ content: {
+ learnMorePath:
+ '/help/user/application_security/index#view-security-scan-information-in-merge-requests',
+ text:
+ 'New vulnerabilities are vulnerabilities that the security scan detects in the merge request that are different to existing vulnerabilities in the default branch.',
+ },
+ options: {
+ title: 'Security scan results',
+ },
+ });
+ });
+
+ it.each`
+ artifactName | exists | downloadPath
+ ${'sam_scan'} | ${true} | ${'/root/security-reports/-/jobs/14/artifacts/download?file_type=sast'}
+ ${'sast-spotbugs'} | ${true} | ${'/root/security-reports/-/jobs/11/artifacts/download?file_type=sast'}
+ ${'sast-sobelow'} | ${false} | ${''}
+ ${'sast-pmd-apex'} | ${false} | ${''}
+ ${'sast-eslint'} | ${true} | ${'/root/security-reports/-/jobs/8/artifacts/download?file_type=sast'}
+ ${'secrets'} | ${true} | ${'/root/security-reports/-/jobs/7/artifacts/download?file_type=secret_detection'}
+ `(
+ 'has a dropdown to download $artifactName artifacts',
+ ({ artifactName, exists, downloadPath }) => {
+ expect(findDropdown().exists()).toBe(true);
+ expect(wrapper.findByText(`Download ${artifactName}`).exists()).toBe(exists);
+
+ if (exists) {
+ const dropdownItem = findDropdownItem(`download-${artifactName}`);
+ expect(dropdownItem.attributes('download')).toBe('');
+ expect(dropdownItem.attributes('href')).toBe(downloadPath);
+ }
+ },
+ );
+ });
+
+ describe('without data', () => {
+ beforeEach(() => {
+ createComponent({ mockResponse: { data: { project: { id: 'project-id' } } } });
+ });
+
+ it('displays the correct message', () => {
+ expect(wrapper.findByText('Security scans have run').exists()).toBe(true);
+ });
+
+ it('should not display the artifacts dropdown', () => {
+ expect(findDropdown().exists()).toBe(false);
+ });
+ });
+});
diff --git a/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
index baef247b649..548b68bc103 100644
--- a/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extensions/test_report/index_spec.js
@@ -8,7 +8,11 @@ import waitForPromises from 'helpers/wait_for_promises';
import axios from '~/lib/utils/axios_utils';
import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
-import httpStatusCodes, { HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status';
+import {
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
+ HTTP_STATUS_NO_CONTENT,
+ HTTP_STATUS_OK,
+} from '~/lib/utils/http_status';
import TestCaseDetails from '~/pipelines/components/test_reports/test_case_details.vue';
import { failedReport } from 'jest/ci/reports/mock_data/mock_data';
@@ -57,7 +61,7 @@ describe('Test report extension', () => {
};
const createExpandedWidgetWithData = async (data = mixedResultsTestReports) => {
- mockApi(httpStatusCodes.OK, data);
+ mockApi(HTTP_STATUS_OK, data);
createComponent();
await waitForPromises();
findToggleCollapsedButton().trigger('click');
@@ -75,7 +79,7 @@ describe('Test report extension', () => {
describe('summary', () => {
it('displays loading state initially', () => {
- mockApi(httpStatusCodes.OK);
+ mockApi(HTTP_STATUS_OK);
createComponent();
expect(wrapper.text()).toContain(i18n.loading);
@@ -91,7 +95,7 @@ describe('Test report extension', () => {
});
it('with an error response, displays failed to load text', async () => {
- mockApi(httpStatusCodes.INTERNAL_SERVER_ERROR);
+ mockApi(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent();
await waitForPromises();
@@ -107,7 +111,7 @@ describe('Test report extension', () => {
${'failed test results'} | ${newFailedTestReports} | ${'Test summary: 2 failed, 11 total tests'}
${'resolved failures'} | ${resolvedFailures} | ${'Test summary: 4 fixed test results, 11 total tests'}
`('displays summary text for $description', async ({ mockData, expectedResult }) => {
- mockApi(httpStatusCodes.OK, mockData);
+ mockApi(HTTP_STATUS_OK, mockData);
createComponent();
await waitForPromises();
@@ -116,7 +120,7 @@ describe('Test report extension', () => {
});
it('displays report level recently failed count', async () => {
- mockApi(httpStatusCodes.OK, recentFailures);
+ mockApi(HTTP_STATUS_OK, recentFailures);
createComponent();
await waitForPromises();
@@ -127,7 +131,7 @@ describe('Test report extension', () => {
});
it('displays a link to the full report', async () => {
- mockApi(httpStatusCodes.OK);
+ mockApi(HTTP_STATUS_OK);
createComponent();
await waitForPromises();
@@ -137,7 +141,7 @@ describe('Test report extension', () => {
});
it('hides copy failed tests button when there are no failing tests', async () => {
- mockApi(httpStatusCodes.OK);
+ mockApi(HTTP_STATUS_OK);
createComponent();
await waitForPromises();
@@ -146,7 +150,7 @@ describe('Test report extension', () => {
});
it('displays copy failed tests button when there are failing tests', async () => {
- mockApi(httpStatusCodes.OK, newFailedTestReports);
+ mockApi(HTTP_STATUS_OK, newFailedTestReports);
createComponent();
await waitForPromises();
@@ -159,7 +163,7 @@ describe('Test report extension', () => {
});
it('hides copy failed tests button when endpoint returns null files', async () => {
- mockApi(httpStatusCodes.OK, newFailedTestWithNullFilesReport);
+ mockApi(HTTP_STATUS_OK, newFailedTestWithNullFilesReport);
createComponent();
await waitForPromises();
@@ -168,7 +172,7 @@ describe('Test report extension', () => {
});
it('copy failed tests button updates tooltip text when clicked', async () => {
- mockApi(httpStatusCodes.OK, newFailedTestReports);
+ mockApi(HTTP_STATUS_OK, newFailedTestReports);
createComponent();
await waitForPromises();
@@ -195,7 +199,7 @@ describe('Test report extension', () => {
});
it('shows an error when a suite has a parsing error', async () => {
- mockApi(httpStatusCodes.OK, reportWithParsingErrors);
+ mockApi(HTTP_STATUS_OK, reportWithParsingErrors);
createComponent();
await waitForPromises();
diff --git a/spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js b/spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js
index a06ad930abe..01049e54a7f 100644
--- a/spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/accessibility/index_spec.js
@@ -6,7 +6,7 @@ import axios from '~/lib/utils/axios_utils';
import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
import accessibilityExtension from '~/vue_merge_request_widget/extensions/accessibility';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_INTERNAL_SERVER_ERROR, HTTP_STATUS_OK } from '~/lib/utils/http_status';
import { accessibilityReportResponseErrors, accessibilityReportResponseSuccess } from './mock_data';
describe('Accessibility extension', () => {
@@ -45,7 +45,7 @@ describe('Accessibility extension', () => {
describe('summary', () => {
it('displays loading text', () => {
- mockApi(httpStatusCodes.OK, accessibilityReportResponseErrors);
+ mockApi(HTTP_STATUS_OK, accessibilityReportResponseErrors);
createComponent();
@@ -53,7 +53,7 @@ describe('Accessibility extension', () => {
});
it('displays failed loading text', async () => {
- mockApi(httpStatusCodes.INTERNAL_SERVER_ERROR);
+ mockApi(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent();
@@ -63,7 +63,7 @@ describe('Accessibility extension', () => {
});
it('displays detected errors and is expandable', async () => {
- mockApi(httpStatusCodes.OK, accessibilityReportResponseErrors);
+ mockApi(HTTP_STATUS_OK, accessibilityReportResponseErrors);
createComponent();
@@ -76,7 +76,7 @@ describe('Accessibility extension', () => {
});
it('displays no detected errors and is not expandable', async () => {
- mockApi(httpStatusCodes.OK, accessibilityReportResponseSuccess);
+ mockApi(HTTP_STATUS_OK, accessibilityReportResponseSuccess);
createComponent();
@@ -91,7 +91,7 @@ describe('Accessibility extension', () => {
describe('expanded data', () => {
beforeEach(async () => {
- mockApi(httpStatusCodes.OK, accessibilityReportResponseErrors);
+ mockApi(HTTP_STATUS_OK, accessibilityReportResponseErrors);
createComponent();
diff --git a/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js b/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
index f0ebbb1a82e..67b327217ef 100644
--- a/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/code_quality/index_spec.js
@@ -7,10 +7,18 @@ import axios from '~/lib/utils/axios_utils';
import extensionsContainer from '~/vue_merge_request_widget/components/extensions/container';
import { registerExtension } from '~/vue_merge_request_widget/components/extensions';
import codeQualityExtension from '~/vue_merge_request_widget/extensions/code_quality';
-import httpStatusCodes, { HTTP_STATUS_NO_CONTENT } from '~/lib/utils/http_status';
-import { i18n } from '~/vue_merge_request_widget/extensions/code_quality/constants';
+import {
+ HTTP_STATUS_INTERNAL_SERVER_ERROR,
+ HTTP_STATUS_NO_CONTENT,
+ HTTP_STATUS_OK,
+} from '~/lib/utils/http_status';
+import {
+ i18n,
+ codeQualityPrefixes,
+} from '~/vue_merge_request_widget/extensions/code_quality/constants';
import {
codeQualityResponseNewErrors,
+ codeQualityResponseResolvedErrors,
codeQualityResponseResolvedAndNewErrors,
codeQualityResponseNoErrors,
} from './mock_data';
@@ -29,6 +37,10 @@ describe('Code Quality extension', () => {
const findToggleCollapsedButton = () => wrapper.findByTestId('toggle-button');
const findAllExtensionListItems = () => wrapper.findAllByTestId('extension-list-item');
+ const isCollapsable = () => wrapper.findByTestId('toggle-button').exists();
+ const getNeutralIcon = () => wrapper.findByTestId('status-neutral-icon').exists();
+ const getAlertIcon = () => wrapper.findByTestId('status-alert-icon').exists();
+ const getSuccessIcon = () => wrapper.findByTestId('status-success-icon').exists();
const createComponent = () => {
wrapper = mountExtended(extensionsContainer, {
@@ -55,7 +67,7 @@ describe('Code Quality extension', () => {
describe('summary', () => {
it('displays loading text', () => {
- mockApi(httpStatusCodes.OK, codeQualityResponseNewErrors);
+ mockApi(HTTP_STATUS_OK, codeQualityResponseNewErrors);
createComponent();
@@ -72,28 +84,57 @@ describe('Code Quality extension', () => {
});
it('displays failed loading text', async () => {
- mockApi(httpStatusCodes.INTERNAL_SERVER_ERROR);
+ mockApi(HTTP_STATUS_INTERNAL_SERVER_ERROR);
createComponent();
await waitForPromises();
+
expect(wrapper.text()).toBe(i18n.error);
+ expect(isCollapsable()).toBe(false);
});
- it('displays correct single Report', async () => {
- mockApi(httpStatusCodes.OK, codeQualityResponseNewErrors);
+ it('displays new Errors finding', async () => {
+ mockApi(HTTP_STATUS_OK, codeQualityResponseNewErrors);
createComponent();
await waitForPromises();
+ expect(wrapper.text()).toBe(
+ i18n
+ .singularCopy(
+ i18n.findings(codeQualityResponseNewErrors.new_errors, codeQualityPrefixes.new),
+ )
+ .replace(/%{strong_start}/g, '')
+ .replace(/%{strong_end}/g, ''),
+ );
+ expect(isCollapsable()).toBe(true);
+ expect(getAlertIcon()).toBe(true);
+ });
+
+ it('displays resolved Errors finding', async () => {
+ mockApi(HTTP_STATUS_OK, codeQualityResponseResolvedErrors);
+ createComponent();
+
+ await waitForPromises();
expect(wrapper.text()).toBe(
- i18n.degradedCopy(i18n.singularReport(codeQualityResponseNewErrors.new_errors)),
+ i18n
+ .singularCopy(
+ i18n.findings(
+ codeQualityResponseResolvedErrors.resolved_errors,
+ codeQualityPrefixes.fixed,
+ ),
+ )
+ .replace(/%{strong_start}/g, '')
+ .replace(/%{strong_end}/g, ''),
);
+ expect(isCollapsable()).toBe(true);
+ expect(getSuccessIcon()).toBe(true);
});
it('displays quality improvement and degradation', async () => {
- mockApi(httpStatusCodes.OK, codeQualityResponseResolvedAndNewErrors);
+ mockApi(HTTP_STATUS_OK, codeQualityResponseResolvedAndNewErrors);
createComponent();
await waitForPromises();
@@ -102,28 +143,38 @@ describe('Code Quality extension', () => {
expect(wrapper.text()).toBe(
i18n
.improvementAndDegradationCopy(
- i18n.pluralReport(codeQualityResponseResolvedAndNewErrors.resolved_errors),
- i18n.pluralReport(codeQualityResponseResolvedAndNewErrors.new_errors),
+ i18n.findings(
+ codeQualityResponseResolvedAndNewErrors.resolved_errors,
+ codeQualityPrefixes.fixed,
+ ),
+ i18n.findings(
+ codeQualityResponseResolvedAndNewErrors.new_errors,
+ codeQualityPrefixes.new,
+ ),
)
.replace(/%{strong_start}/g, '')
.replace(/%{strong_end}/g, ''),
);
+ expect(isCollapsable()).toBe(true);
+ expect(getAlertIcon()).toBe(true);
});
it('displays no detected errors', async () => {
- mockApi(httpStatusCodes.OK, codeQualityResponseNoErrors);
+ mockApi(HTTP_STATUS_OK, codeQualityResponseNoErrors);
createComponent();
await waitForPromises();
expect(wrapper.text()).toBe(i18n.noChanges);
+ expect(isCollapsable()).toBe(false);
+ expect(getNeutralIcon()).toBe(true);
});
});
describe('expanded data', () => {
beforeEach(async () => {
- mockApi(httpStatusCodes.OK, codeQualityResponseResolvedAndNewErrors);
+ mockApi(HTTP_STATUS_OK, codeQualityResponseResolvedAndNewErrors);
createComponent();
diff --git a/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js b/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js
index 2e8e70f25db..cb23b730a93 100644
--- a/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js
+++ b/spec/frontend/vue_merge_request_widget/extentions/code_quality/mock_data.js
@@ -17,9 +17,34 @@ export const codeQualityResponseNewErrors = {
resolved_errors: [],
existing_errors: [],
summary: {
- total: 2,
+ total: 12235,
resolved: 0,
- errored: 2,
+ errored: 12235,
+ },
+};
+
+export const codeQualityResponseResolvedErrors = {
+ status: 'success',
+ new_errors: [],
+ resolved_errors: [
+ {
+ description: "Parsing error: 'return' outside of function",
+ severity: 'minor',
+ file_path: 'index.js',
+ line: 12,
+ },
+ {
+ description: 'TODO found',
+ severity: 'minor',
+ file_path: '.gitlab-ci.yml',
+ line: 73,
+ },
+ ],
+ existing_errors: [],
+ summary: {
+ total: 12235,
+ resolved: 0,
+ errored: 12235,
},
};
@@ -43,9 +68,9 @@ export const codeQualityResponseResolvedAndNewErrors = {
],
existing_errors: [],
summary: {
- total: 2,
+ total: 12233,
resolved: 1,
- errored: 1,
+ errored: 12233,
},
};
@@ -55,8 +80,8 @@ export const codeQualityResponseNoErrors = {
resolved_errors: [],
existing_errors: [],
summary: {
- total: 0,
+ total: 12234,
resolved: 0,
- errored: 0,
+ errored: 12234,
},
};
diff --git a/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
index d038660e6d3..015d394312a 100644
--- a/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
+++ b/spec/frontend/vue_merge_request_widget/mr_widget_how_to_merge_modal_spec.js
@@ -34,7 +34,7 @@ describe('MRWidgetHowToMerge', () => {
});
it('renders a selection of markdown fields', () => {
- expect(findInstructionsFields().length).toBe(3);
+ expect(findInstructionsFields().length).toBe(2);
});
it('renders a tip including a link to docs when a valid link is present', () => {
@@ -48,23 +48,11 @@ describe('MRWidgetHowToMerge', () => {
it('should render different instructions based on if the user can merge', () => {
mountComponent({ props: { canMerge: true } });
- expect(findInstructionsFields().at(2).text()).toContain('git push origin');
- });
-
- it('should render different instructions based on if the merge is based off a fork', () => {
- mountComponent({ props: { isFork: true } });
- expect(findInstructionsFields().at(0).text()).toContain('FETCH_HEAD');
- });
-
- it('escapes the target branch name shell-secure', () => {
- mountComponent({ props: { targetBranch: '";echo$IFS"you_shouldnt_run_this' } });
-
- expect(findInstructionsFields().at(1).text()).toContain('\'";echo$IFS"you_shouldnt_run_this\'');
+ expect(findInstructionsFields().at(1).text()).toContain('git push origin');
});
it('escapes the source branch name shell-secure', () => {
mountComponent({ props: { sourceBranch: 'branch-of-$USER' } });
-
expect(findInstructionsFields().at(0).text()).toContain("'branch-of-$USER'");
});
});
diff --git a/spec/frontend/vue_shared/components/ci_badge_link_spec.js b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
index 07cbfe1e79b..4f24ec2d015 100644
--- a/spec/frontend/vue_shared/components/ci_badge_link_spec.js
+++ b/spec/frontend/vue_shared/components/ci_badge_link_spec.js
@@ -1,6 +1,6 @@
import { GlLink } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import CiBadge from '~/vue_shared/components/ci_badge_link.vue';
+import CiBadgeLink from '~/vue_shared/components/ci_badge_link.vue';
import CiIcon from '~/vue_shared/components/ci_icon.vue';
jest.mock('~/lib/utils/url_utility', () => ({
@@ -79,7 +79,7 @@ describe('CI Badge Link Component', () => {
const findIcon = () => wrapper.findComponent(CiIcon);
const createComponent = (propsData) => {
- wrapper = shallowMount(CiBadge, { propsData });
+ wrapper = shallowMount(CiBadgeLink, { propsData });
};
afterEach(() => {
diff --git a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
index 66ef473f368..63c22aff3d5 100644
--- a/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
+++ b/spec/frontend/vue_shared/components/filtered_search_bar/store/modules/filters/actions_spec.js
@@ -4,7 +4,7 @@ import testAction from 'helpers/vuex_action_helper';
import { mockBranches } from 'jest/vue_shared/components/filtered_search_bar/mock_data';
import Api from '~/api';
import { createAlert } from '~/flash';
-import httpStatusCodes from '~/lib/utils/http_status';
+import { HTTP_STATUS_OK, HTTP_STATUS_SERVICE_UNAVAILABLE } from '~/lib/utils/http_status';
import * as actions from '~/vue_shared/components/filtered_search_bar/store/modules/filters/actions';
import * as types from '~/vue_shared/components/filtered_search_bar/store/modules/filters/mutation_types';
import initialState from '~/vue_shared/components/filtered_search_bar/store/modules/filters/state';
@@ -122,7 +122,7 @@ describe('Filters actions', () => {
':id',
encodeURIComponent(projectEndpoint),
);
- mock.onGet(url).replyOnce(httpStatusCodes.OK, mockBranches);
+ mock.onGet(url).replyOnce(HTTP_STATUS_OK, mockBranches);
});
it('dispatches RECEIVE_BRANCHES_SUCCESS with received data', () => {
@@ -143,7 +143,7 @@ describe('Filters actions', () => {
describe('error', () => {
beforeEach(() => {
- mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ mock.onAny().replyOnce(HTTP_STATUS_SERVICE_UNAVAILABLE);
});
it('dispatches RECEIVE_BRANCHES_ERROR', () => {
@@ -155,7 +155,7 @@ describe('Filters actions', () => {
{ type: types.REQUEST_BRANCHES },
{
type: types.RECEIVE_BRANCHES_ERROR,
- payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ payload: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
],
[],
@@ -177,7 +177,7 @@ describe('Filters actions', () => {
describe('success', () => {
beforeEach(() => {
- mock.onAny().replyOnce(httpStatusCodes.OK, filterUsers);
+ mock.onAny().replyOnce(HTTP_STATUS_OK, filterUsers);
});
it('dispatches RECEIVE_AUTHORS_SUCCESS with received data and groupEndpoint set', () => {
@@ -215,7 +215,7 @@ describe('Filters actions', () => {
describe('error', () => {
beforeEach(() => {
- mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ mock.onAny().replyOnce(HTTP_STATUS_SERVICE_UNAVAILABLE);
});
it('dispatches RECEIVE_AUTHORS_ERROR and groupEndpoint set', () => {
@@ -227,7 +227,7 @@ describe('Filters actions', () => {
{ type: types.REQUEST_AUTHORS },
{
type: types.RECEIVE_AUTHORS_ERROR,
- payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ payload: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
],
[],
@@ -246,7 +246,7 @@ describe('Filters actions', () => {
{ type: types.REQUEST_AUTHORS },
{
type: types.RECEIVE_AUTHORS_ERROR,
- payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ payload: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
],
[],
@@ -261,7 +261,7 @@ describe('Filters actions', () => {
describe('fetchMilestones', () => {
describe('success', () => {
beforeEach(() => {
- mock.onGet(milestonesEndpoint).replyOnce(httpStatusCodes.OK, filterMilestones);
+ mock.onGet(milestonesEndpoint).replyOnce(HTTP_STATUS_OK, filterMilestones);
});
it('dispatches RECEIVE_MILESTONES_SUCCESS with received data', () => {
@@ -282,7 +282,7 @@ describe('Filters actions', () => {
describe('error', () => {
beforeEach(() => {
- mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ mock.onAny().replyOnce(HTTP_STATUS_SERVICE_UNAVAILABLE);
});
it('dispatches RECEIVE_MILESTONES_ERROR', () => {
@@ -294,7 +294,7 @@ describe('Filters actions', () => {
{ type: types.REQUEST_MILESTONES },
{
type: types.RECEIVE_MILESTONES_ERROR,
- payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ payload: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
],
[],
@@ -307,7 +307,7 @@ describe('Filters actions', () => {
describe('success', () => {
let restoreVersion;
beforeEach(() => {
- mock.onAny().replyOnce(httpStatusCodes.OK, filterUsers);
+ mock.onAny().replyOnce(HTTP_STATUS_OK, filterUsers);
restoreVersion = gon.api_version;
gon.api_version = 'v1';
});
@@ -352,7 +352,7 @@ describe('Filters actions', () => {
describe('error', () => {
let restoreVersion;
beforeEach(() => {
- mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ mock.onAny().replyOnce(HTTP_STATUS_SERVICE_UNAVAILABLE);
restoreVersion = gon.api_version;
gon.api_version = 'v1';
});
@@ -370,7 +370,7 @@ describe('Filters actions', () => {
{ type: types.REQUEST_ASSIGNEES },
{
type: types.RECEIVE_ASSIGNEES_ERROR,
- payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ payload: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
],
[],
@@ -389,7 +389,7 @@ describe('Filters actions', () => {
{ type: types.REQUEST_ASSIGNEES },
{
type: types.RECEIVE_ASSIGNEES_ERROR,
- payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ payload: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
],
[],
@@ -404,7 +404,7 @@ describe('Filters actions', () => {
describe('fetchLabels', () => {
describe('success', () => {
beforeEach(() => {
- mock.onGet(labelsEndpoint).replyOnce(httpStatusCodes.OK, filterLabels);
+ mock.onGet(labelsEndpoint).replyOnce(HTTP_STATUS_OK, filterLabels);
});
it('dispatches RECEIVE_LABELS_SUCCESS with received data', () => {
@@ -425,7 +425,7 @@ describe('Filters actions', () => {
describe('error', () => {
beforeEach(() => {
- mock.onAny().replyOnce(httpStatusCodes.SERVICE_UNAVAILABLE);
+ mock.onAny().replyOnce(HTTP_STATUS_SERVICE_UNAVAILABLE);
});
it('dispatches RECEIVE_LABELS_ERROR', () => {
@@ -437,7 +437,7 @@ describe('Filters actions', () => {
{ type: types.REQUEST_LABELS },
{
type: types.RECEIVE_LABELS_ERROR,
- payload: httpStatusCodes.SERVICE_UNAVAILABLE,
+ payload: HTTP_STATUS_SERVICE_UNAVAILABLE,
},
],
[],
diff --git a/spec/frontend/vue_shared/components/group_select/group_select_spec.js b/spec/frontend/vue_shared/components/group_select/group_select_spec.js
index c10b32c6acc..87dd7795b98 100644
--- a/spec/frontend/vue_shared/components/group_select/group_select_spec.js
+++ b/spec/frontend/vue_shared/components/group_select/group_select_spec.js
@@ -1,20 +1,18 @@
import { nextTick } from 'vue';
-import { GlCollapsibleListbox } from '@gitlab/ui';
+import { GlFormGroup, GlCollapsibleListbox } from '@gitlab/ui';
import MockAdapter from 'axios-mock-adapter';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import axios from '~/lib/utils/axios_utils';
-import { createAlert } from '~/flash';
import GroupSelect from '~/vue_shared/components/group_select/group_select.vue';
import {
TOGGLE_TEXT,
+ RESET_LABEL,
FETCH_GROUPS_ERROR,
FETCH_GROUP_ERROR,
QUERY_TOO_SHORT_MESSAGE,
} from '~/vue_shared/components/group_select/constants';
import waitForPromises from 'helpers/wait_for_promises';
-jest.mock('~/flash');
-
describe('GroupSelect', () => {
let wrapper;
let mock;
@@ -26,22 +24,34 @@ describe('GroupSelect', () => {
};
const groupEndpoint = `/api/undefined/groups/${groupMock.id}`;
+ // Stubs
+ const GlAlert = {
+ template: '<div><slot /></div>',
+ };
+
// Props
+ const label = 'label';
const inputName = 'inputName';
const inputId = 'inputId';
// Finders
+ const findFormGroup = () => wrapper.findComponent(GlFormGroup);
const findListbox = () => wrapper.findComponent(GlCollapsibleListbox);
const findInput = () => wrapper.findByTestId('input');
+ const findAlert = () => wrapper.findComponent(GlAlert);
// Helpers
const createComponent = ({ props = {} } = {}) => {
wrapper = shallowMountExtended(GroupSelect, {
propsData: {
+ label,
inputName,
inputId,
...props,
},
+ stubs: {
+ GlAlert,
+ },
});
};
const openListbox = () => findListbox().vm.$emit('shown');
@@ -65,6 +75,12 @@ describe('GroupSelect', () => {
mock.restore();
});
+ it('passes the label to GlFormGroup', () => {
+ createComponent();
+
+ expect(findFormGroup().attributes('label')).toBe(label);
+ });
+
describe('on mount', () => {
it('fetches groups when the listbox is opened', async () => {
createComponent();
@@ -94,13 +110,13 @@ describe('GroupSelect', () => {
.reply(200, [{ full_name: 'notTheSelectedGroup', id: '2' }]);
mock.onGet(groupEndpoint).reply(500);
createComponent({ props: { initialSelection: groupMock.id } });
+
+ expect(findAlert().exists()).toBe(false);
+
await waitForPromises();
- expect(createAlert).toHaveBeenCalledWith({
- message: FETCH_GROUP_ERROR,
- error: expect.any(Error),
- parent: wrapper.vm.$el,
- });
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(FETCH_GROUP_ERROR);
});
});
});
@@ -109,13 +125,12 @@ describe('GroupSelect', () => {
mock.onGet('/api/undefined/groups.json').reply(500);
createComponent();
openListbox();
+ expect(findAlert().exists()).toBe(false);
+
await waitForPromises();
- expect(createAlert).toHaveBeenCalledWith({
- message: FETCH_GROUPS_ERROR,
- error: expect.any(Error),
- parent: wrapper.vm.$el,
- });
+ expect(findAlert().exists()).toBe(true);
+ expect(findAlert().text()).toBe(FETCH_GROUPS_ERROR);
});
describe('selection', () => {
@@ -186,7 +201,11 @@ describe('GroupSelect', () => {
await waitForPromises();
expect(mock.history.get).toHaveLength(2);
- expect(mock.history.get[1].params).toStrictEqual({ search: searchString });
+ expect(mock.history.get[1].params).toStrictEqual({
+ page: 1,
+ per_page: 20,
+ search: searchString,
+ });
});
it('shows a notice if the search query is too short', async () => {
@@ -199,4 +218,105 @@ describe('GroupSelect', () => {
expect(findListbox().props('noResultsText')).toBe(QUERY_TOO_SHORT_MESSAGE);
});
});
+
+ describe('pagination', () => {
+ const searchString = 'searchString';
+
+ beforeEach(async () => {
+ let requestCount = 0;
+ mock.onGet('/api/undefined/groups.json').reply(({ params }) => {
+ requestCount += 1;
+ return [
+ 200,
+ [
+ {
+ full_name: `Group [page: ${params.page} - search: ${params.search}]`,
+ id: requestCount,
+ },
+ ],
+ {
+ page: params.page,
+ 'x-total-pages': 3,
+ },
+ ];
+ });
+ createComponent();
+ openListbox();
+ findListbox().vm.$emit('bottom-reached');
+ return waitForPromises();
+ });
+
+ it('fetches the next page when bottom is reached', async () => {
+ expect(mock.history.get).toHaveLength(2);
+ expect(mock.history.get[1].params).toStrictEqual({
+ page: 2,
+ per_page: 20,
+ search: '',
+ });
+ });
+
+ it('fetches the first page when the search query changes', async () => {
+ search(searchString);
+ await waitForPromises();
+
+ expect(mock.history.get).toHaveLength(3);
+ expect(mock.history.get[2].params).toStrictEqual({
+ page: 1,
+ per_page: 20,
+ search: searchString,
+ });
+ });
+
+ it('retains the search query when infinite scrolling', async () => {
+ search(searchString);
+ await waitForPromises();
+ findListbox().vm.$emit('bottom-reached');
+ await waitForPromises();
+
+ expect(mock.history.get).toHaveLength(4);
+ expect(mock.history.get[3].params).toStrictEqual({
+ page: 2,
+ per_page: 20,
+ search: searchString,
+ });
+ });
+
+ it('pauses infinite scroll after fetching the last page', async () => {
+ expect(findListbox().props('infiniteScroll')).toBe(true);
+
+ findListbox().vm.$emit('bottom-reached');
+ await waitForPromises();
+
+ expect(findListbox().props('infiniteScroll')).toBe(false);
+ });
+
+ it('resumes infinite scroll when search query changes', async () => {
+ findListbox().vm.$emit('bottom-reached');
+ await waitForPromises();
+
+ expect(findListbox().props('infiniteScroll')).toBe(false);
+
+ search(searchString);
+ await waitForPromises();
+
+ expect(findListbox().props('infiniteScroll')).toBe(true);
+ });
+ });
+
+ it.each`
+ description | clearable | expectedLabel
+ ${'passes'} | ${true} | ${RESET_LABEL}
+ ${'does not pass'} | ${false} | ${''}
+ `(
+ '$description the reset button label to the listbox when clearable is $clearable',
+ ({ clearable, expectedLabel }) => {
+ createComponent({
+ props: {
+ clearable,
+ },
+ });
+
+ expect(findListbox().props('resetButtonLabel')).toBe(expectedLabel);
+ },
+ );
});
diff --git a/spec/frontend/vue_shared/components/header_ci_component_spec.js b/spec/frontend/vue_shared/components/header_ci_component_spec.js
index aea76f164f0..94e1ece8c6b 100644
--- a/spec/frontend/vue_shared/components/header_ci_component_spec.js
+++ b/spec/frontend/vue_shared/components/header_ci_component_spec.js
@@ -84,11 +84,12 @@ describe('Header CI Component', () => {
expect(findUserLink().text()).toContain(defaultProps.user.username);
});
- it('has the correct data attributes', () => {
+ it('has the correct HTML attributes', () => {
expect(findUserLink().attributes()).toMatchObject({
'data-user-id': defaultProps.user.id.toString(),
'data-username': defaultProps.user.username,
'data-name': defaultProps.user.name,
+ href: defaultProps.user.web_url,
});
});
diff --git a/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js b/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js
index cb7262b15e3..7ed6a59c844 100644
--- a/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js
+++ b/spec/frontend/vue_shared/components/listbox_input/listbox_input_spec.js
@@ -1,11 +1,13 @@
import { shallowMount } from '@vue/test-utils';
-import { GlListbox } from '@gitlab/ui';
+import { GlFormGroup, GlListbox } from '@gitlab/ui';
import ListboxInput from '~/vue_shared/components/listbox_input/listbox_input.vue';
describe('ListboxInput', () => {
let wrapper;
// Props
+ const label = 'label';
+ const decription = 'decription';
const name = 'name';
const defaultToggleText = 'defaultToggleText';
const items = [
@@ -21,30 +23,70 @@ describe('ListboxInput', () => {
options: [{ text: 'Item 3', value: '3' }],
},
];
+ const id = 'id';
// Finders
+ const findGlFormGroup = () => wrapper.findComponent(GlFormGroup);
const findGlListbox = () => wrapper.findComponent(GlListbox);
const findInput = () => wrapper.find('input');
const createComponent = (propsData) => {
wrapper = shallowMount(ListboxInput, {
propsData: {
+ label,
+ decription,
name,
defaultToggleText,
items,
...propsData,
},
+ attrs: {
+ id,
+ },
});
};
- describe('input attributes', () => {
+ describe('wrapper', () => {
+ it.each`
+ description | labelProp | descriptionProp | rendersGlFormGroup
+ ${'does not render'} | ${''} | ${''} | ${false}
+ ${'renders'} | ${'labelProp'} | ${''} | ${true}
+ ${'renders'} | ${''} | ${'descriptionProp'} | ${true}
+ ${'renders'} | ${'labelProp'} | ${'descriptionProp'} | ${true}
+ `(
+ "$description a GlFormGroup when label is '$labelProp' and description is '$descriptionProp'",
+ ({ labelProp, descriptionProp, rendersGlFormGroup }) => {
+ createComponent({ label: labelProp, description: descriptionProp });
+
+ expect(findGlFormGroup().exists()).toBe(rendersGlFormGroup);
+ },
+ );
+ });
+
+ describe('options', () => {
beforeEach(() => {
createComponent();
});
+ it('passes the label to the form group', () => {
+ expect(findGlFormGroup().attributes('label')).toBe(label);
+ });
+
+ it('passes the decription to the form group', () => {
+ expect(findGlFormGroup().attributes('decription')).toBe(decription);
+ });
+
it('sets the input name', () => {
expect(findInput().attributes('name')).toBe(name);
});
+
+ it('is not filterable with few items', () => {
+ expect(findGlListbox().props('searchable')).toBe(false);
+ });
+
+ it('passes attributes to the root element', () => {
+ expect(findGlFormGroup().attributes('id')).toBe(id);
+ });
});
describe('toggle text', () => {
@@ -91,12 +133,29 @@ describe('ListboxInput', () => {
});
describe('search', () => {
- beforeEach(() => {
- createComponent();
+ it('is searchable when there are more than 10 items', () => {
+ createComponent({
+ items: [
+ {
+ text: 'Group 1',
+ options: [...Array(10).keys()].map((index) => ({
+ text: index + 1,
+ value: String(index + 1),
+ })),
+ },
+ {
+ text: 'Group 2',
+ options: [{ text: 'Item 11', value: '11' }],
+ },
+ ],
+ });
+
+ expect(findGlListbox().props('searchable')).toBe(true);
});
it('passes all items to GlListbox by default', () => {
createComponent();
+
expect(findGlListbox().props('items')).toStrictEqual(items);
});
diff --git a/spec/frontend/vue_shared/components/markdown/editor_mode_dropdown_spec.js b/spec/frontend/vue_shared/components/markdown/editor_mode_dropdown_spec.js
new file mode 100644
index 00000000000..34071775b9c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/markdown/editor_mode_dropdown_spec.js
@@ -0,0 +1,58 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import EditorModeDropdown from '~/vue_shared/components/markdown/editor_mode_dropdown.vue';
+
+describe('vue_shared/component/markdown/editor_mode_dropdown', () => {
+ let wrapper;
+
+ const createComponent = ({ value, size } = {}) => {
+ wrapper = shallowMount(EditorModeDropdown, {
+ propsData: {
+ value,
+ size,
+ },
+ });
+ };
+
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findDropdownItem = (text) =>
+ wrapper
+ .findAllComponents(GlDropdownItem)
+ .filter((item) => item.text().startsWith(text))
+ .at(0);
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe.each`
+ modeText | value | dropdownText | otherMode
+ ${'Rich text'} | ${'richText'} | ${'View markdown'} | ${'Markdown'}
+ ${'Markdown'} | ${'markdown'} | ${'View rich text'} | ${'Rich text'}
+ `('$modeText', ({ modeText, value, dropdownText, otherMode }) => {
+ beforeEach(() => {
+ createComponent({ value });
+ });
+
+ it('shows correct dropdown label', () => {
+ expect(findDropdown().props('text')).toEqual(dropdownText);
+ });
+
+ it('checks correct checked dropdown item', () => {
+ expect(findDropdownItem(modeText).props().isChecked).toBe(true);
+ expect(findDropdownItem(otherMode).props().isChecked).toBe(false);
+ });
+
+ it('emits event on click', () => {
+ findDropdownItem(modeText).vm.$emit('click');
+
+ expect(wrapper.emitted().input).toEqual([[value]]);
+ });
+ });
+
+ it('passes size to dropdown', () => {
+ createComponent({ size: 'small', value: 'markdown' });
+
+ expect(findDropdown().props('size')).toEqual('small');
+ });
+});
diff --git a/spec/frontend/vue_shared/components/markdown/field_spec.js b/spec/frontend/vue_shared/components/markdown/field_spec.js
index 285ea10c813..3b8e78bbadd 100644
--- a/spec/frontend/vue_shared/components/markdown/field_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/field_spec.js
@@ -37,7 +37,7 @@ describe('Markdown field component', () => {
axiosMock.restore();
});
- function createSubject({ lines = [], enablePreview = true } = {}) {
+ function createSubject({ lines = [], enablePreview = true, showContentEditorSwitcher } = {}) {
// We actually mount a wrapper component so that we can force Vue to rerender classes in order to test a regression
// caused by mixing Vanilla JS and Vue.
subject = mountExtended(
@@ -68,6 +68,7 @@ describe('Markdown field component', () => {
lines,
enablePreview,
restrictedToolBarItems,
+ showContentEditorSwitcher,
},
},
);
@@ -191,6 +192,7 @@ describe('Markdown field component', () => {
markdownDocsPath,
quickActionsDocsPath: '',
showCommentToolBar: true,
+ showContentEditorSwitcher: false,
});
});
});
@@ -342,4 +344,18 @@ describe('Markdown field component', () => {
restrictedToolBarItems,
);
});
+
+ describe('showContentEditorSwitcher', () => {
+ it('defaults to false', () => {
+ createSubject();
+
+ expect(findMarkdownToolbar().props('showContentEditorSwitcher')).toBe(false);
+ });
+
+ it('passes showContentEditorSwitcher', () => {
+ createSubject({ showContentEditorSwitcher: true });
+
+ expect(findMarkdownToolbar().props('showContentEditorSwitcher')).toBe(true);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
index 5f416db2676..e3df2cde1c1 100644
--- a/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/markdown_editor_spec.js
@@ -1,4 +1,3 @@
-import { GlSegmentedControl } from '@gitlab/ui';
import axios from 'axios';
import MockAdapter from 'axios-mock-adapter';
import { nextTick } from 'vue';
@@ -49,7 +48,6 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
},
});
};
- const findSegmentedControl = () => wrapper.findComponent(GlSegmentedControl);
const findMarkdownField = () => wrapper.findComponent(MarkdownField);
const findTextarea = () => wrapper.find('textarea');
const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
@@ -97,36 +95,28 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(findTextarea().element.value).toBe(value);
});
- it('renders switch segmented control', () => {
+ it(`emits ${EDITING_MODE_CONTENT_EDITOR} event when enableContentEditor emitted from markdown editor`, async () => {
buildWrapper();
- expect(findSegmentedControl().props()).toEqual({
- checked: EDITING_MODE_MARKDOWN_FIELD,
- options: [
- {
- text: expect.any(String),
- value: EDITING_MODE_MARKDOWN_FIELD,
- },
- {
- text: expect.any(String),
- value: EDITING_MODE_CONTENT_EDITOR,
- },
- ],
- });
- });
+ findMarkdownField().vm.$emit('enableContentEditor');
- describe.each`
- editingMode
- ${EDITING_MODE_CONTENT_EDITOR}
- ${EDITING_MODE_MARKDOWN_FIELD}
- `('when segmented control emits change event with $editingMode value', ({ editingMode }) => {
- it(`emits ${editingMode} event`, () => {
- buildWrapper();
+ await nextTick();
- findSegmentedControl().vm.$emit('change', editingMode);
+ expect(wrapper.emitted(EDITING_MODE_CONTENT_EDITOR)).toHaveLength(1);
+ });
- expect(wrapper.emitted(editingMode)).toHaveLength(1);
+ it(`emits ${EDITING_MODE_MARKDOWN_FIELD} event when enableMarkdownEditor emitted from content editor`, async () => {
+ buildWrapper({
+ stubs: { ContentEditor: stubComponent(ContentEditor) },
});
+
+ findMarkdownField().vm.$emit('enableContentEditor');
+
+ await nextTick();
+
+ findContentEditor().vm.$emit('enableMarkdownEditor');
+
+ expect(wrapper.emitted(EDITING_MODE_MARKDOWN_FIELD)).toHaveLength(1);
});
describe(`when editingMode is ${EDITING_MODE_MARKDOWN_FIELD}`, () => {
@@ -159,11 +149,10 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(wrapper.emitted('keydown')).toHaveLength(1);
});
- describe(`when segmented control triggers input event with ${EDITING_MODE_CONTENT_EDITOR} value`, () => {
+ describe(`when markdown field triggers enableContentEditor event`, () => {
beforeEach(() => {
buildWrapper();
- findSegmentedControl().vm.$emit('input', EDITING_MODE_CONTENT_EDITOR);
- findSegmentedControl().vm.$emit('change', EDITING_MODE_CONTENT_EDITOR);
+ findMarkdownField().vm.$emit('enableContentEditor');
});
it('displays the content editor', () => {
@@ -202,7 +191,7 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
describe(`when editingMode is ${EDITING_MODE_CONTENT_EDITOR}`, () => {
beforeEach(() => {
buildWrapper();
- findSegmentedControl().vm.$emit('input', EDITING_MODE_CONTENT_EDITOR);
+ findMarkdownField().vm.$emit('enableContentEditor');
});
describe('when autofocus is true', () => {
@@ -234,9 +223,9 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(wrapper.emitted('keydown')).toEqual([[event]]);
});
- describe(`when segmented control triggers input event with ${EDITING_MODE_MARKDOWN_FIELD} value`, () => {
+ describe(`when richText editor triggers enableMarkdownEditor event`, () => {
beforeEach(() => {
- findSegmentedControl().vm.$emit('input', EDITING_MODE_MARKDOWN_FIELD);
+ findContentEditor().vm.$emit('enableMarkdownEditor');
});
it('hides the content editor', () => {
@@ -251,29 +240,5 @@ describe('vue_shared/component/markdown/markdown_editor', () => {
expect(findLocalStorageSync().props().value).toBe(EDITING_MODE_MARKDOWN_FIELD);
});
});
-
- describe('when content editor emits loading event', () => {
- beforeEach(() => {
- findContentEditor().vm.$emit('loading');
- });
-
- it('disables switch editing mode control', () => {
- // This is the only way that I found to check the segmented control is disabled
- expect(findSegmentedControl().find('input[disabled]').exists()).toBe(true);
- });
-
- describe.each`
- event
- ${'loadingSuccess'}
- ${'loadingError'}
- `('when content editor emits $event event', ({ event }) => {
- beforeEach(() => {
- findContentEditor().vm.$emit(event);
- });
- it('enables the switch editing mode control', () => {
- expect(findSegmentedControl().find('input[disabled]').exists()).toBe(false);
- });
- });
- });
});
});
diff --git a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
index f698794b951..b1a1dbbeb7a 100644
--- a/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
+++ b/spec/frontend/vue_shared/components/markdown/toolbar_spec.js
@@ -1,5 +1,6 @@
import { mount } from '@vue/test-utils';
import Toolbar from '~/vue_shared/components/markdown/toolbar.vue';
+import EditorModeDropdown from '~/vue_shared/components/markdown/editor_mode_dropdown.vue';
describe('toolbar', () => {
let wrapper;
@@ -47,4 +48,18 @@ describe('toolbar', () => {
expect(wrapper.find('.comment-toolbar').exists()).toBe(true);
});
});
+
+ describe('with content editor switcher', () => {
+ beforeEach(() => {
+ createMountedWrapper({
+ showContentEditorSwitcher: true,
+ });
+ });
+
+ it('re-emits event from switcher', () => {
+ wrapper.findComponent(EditorModeDropdown).vm.$emit('input', 'richText');
+
+ expect(wrapper.emitted('enableContentEditor')).toEqual([[]]);
+ });
+ });
});
diff --git a/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap b/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
deleted file mode 100644
index 2ea8985b16a..00000000000
--- a/spec/frontend/vue_shared/components/runner_aws_deployments/__snapshots__/runner_aws_deployments_modal_spec.js.snap
+++ /dev/null
@@ -1,177 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`RunnerAwsDeploymentsModal renders the modal 1`] = `
-<gl-modal-stub
- actionprimary="[object Object]"
- actionsecondary="[object Object]"
- arialabel=""
- dismisslabel="Close"
- modalclass=""
- modalid="runner-aws-deployments-modal"
- size="sm"
- title="Deploy GitLab Runner in AWS"
- titletag="h4"
->
- <p>
- Select your preferred option here. In the next step, you can choose the capacity for your runner in the AWS CloudFormation console.
- </p>
-
- <gl-form-radio-group-stub
- checked="[object Object]"
- disabledfield="disabled"
- htmlfield="html"
- label="Choose your preferred GitLab Runner"
- label-sr-only=""
- options=""
- textfield="text"
- valuefield="value"
- >
- <gl-form-radio-stub
- class="gl-py-5 gl-pl-8 gl-border-b"
- value="[object Object]"
- >
- <div
- class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
- >
-
- Amazon Linux 2 Docker HA with manual scaling and optional scheduling. Non-spot.
-
- <gl-accordion-stub
- class="gl-pt-3"
- headerlevel="3"
- >
- <gl-accordion-item-stub
- class="gl-font-weight-normal"
- headerclass=""
- title="More Details"
- titlevisible="Less Details"
- >
- <p
- class="gl-pt-2"
- >
- No spot. This is the default choice for Linux Docker executor.
- </p>
-
- <p
- class="gl-m-0"
- >
- A capacity of 1 enables warm HA through Auto Scaling group re-spawn. A capacity of 2 enables hot HA because the service is available even when a node is lost. A capacity of 3 or more enables hot HA and manual scaling of runner fleet.
- </p>
- </gl-accordion-item-stub>
- </gl-accordion-stub>
- </div>
- </gl-form-radio-stub>
- <gl-form-radio-stub
- class="gl-py-5 gl-pl-8 gl-border-b"
- value="[object Object]"
- >
- <div
- class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
- >
-
- Amazon Linux 2 Docker HA with manual scaling and optional scheduling. 100% spot.
-
- <gl-accordion-stub
- class="gl-pt-3"
- headerlevel="3"
- >
- <gl-accordion-item-stub
- class="gl-font-weight-normal"
- headerclass=""
- title="More Details"
- titlevisible="Less Details"
- >
- <p
- class="gl-pt-2"
- >
- 100% spot.
- </p>
-
- <p
- class="gl-m-0"
- >
- Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet.
- </p>
- </gl-accordion-item-stub>
- </gl-accordion-stub>
- </div>
- </gl-form-radio-stub>
- <gl-form-radio-stub
- class="gl-py-5 gl-pl-8 gl-border-b"
- value="[object Object]"
- >
- <div
- class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
- >
-
- Windows 2019 Shell with manual scaling and optional scheduling. Non-spot.
-
- <gl-accordion-stub
- class="gl-pt-3"
- headerlevel="3"
- >
- <gl-accordion-item-stub
- class="gl-font-weight-normal"
- headerclass=""
- title="More Details"
- titlevisible="Less Details"
- >
- <p
- class="gl-pt-2"
- >
- No spot. Default choice for Windows Shell executor.
- </p>
-
- <p
- class="gl-m-0"
- >
- Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet.
- </p>
- </gl-accordion-item-stub>
- </gl-accordion-stub>
- </div>
- </gl-form-radio-stub>
- <gl-form-radio-stub
- class="gl-py-5 gl-pl-8"
- value="[object Object]"
- >
- <div
- class="gl-mt-n1 gl-pl-4 gl-pb-2 gl-font-weight-bold"
- >
-
- Windows 2019 Shell with manual scaling and optional scheduling. 100% spot.
-
- <gl-accordion-stub
- class="gl-pt-3"
- headerlevel="3"
- >
- <gl-accordion-item-stub
- class="gl-font-weight-normal"
- headerclass=""
- title="More Details"
- titlevisible="Less Details"
- >
- <p
- class="gl-pt-2"
- >
- 100% spot.
- </p>
-
- <p
- class="gl-m-0"
- >
- Capacity of 1 enables warm HA through Auto Scaling group re-spawn. Capacity of 2 enables hot HA because the service is available even when a node is lost. Capacity of 3 or more enables hot HA and manual scaling of runner fleet.
- </p>
- </gl-accordion-item-stub>
- </gl-accordion-stub>
- </div>
- </gl-form-radio-stub>
- </gl-form-radio-group-stub>
-
- <p>
- <gl-sprintf-stub
- message="Don't see what you are looking for? See the full list of options, including a fully customizable option %{linkStart}here%{linkEnd}."
- />
- </p>
-</gl-modal-stub>
-`;
diff --git a/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js b/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js
index a9ba4946358..c8ca75787f1 100644
--- a/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js
+++ b/spec/frontend/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal_spec.js
@@ -1,30 +1,28 @@
-import { GlModal, GlFormRadio } from '@gitlab/ui';
+import { GlModal } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import { getBaseURL, visitUrl } from '~/lib/utils/url_utility';
-import { mockTracking } from 'helpers/tracking_helper';
-import {
- CF_BASE_URL,
- TEMPLATES_BASE_URL,
- EASY_BUTTONS,
-} from '~/vue_shared/components/runner_aws_deployments/constants';
+import { s__ } from '~/locale';
import RunnerAwsDeploymentsModal from '~/vue_shared/components/runner_aws_deployments/runner_aws_deployments_modal.vue';
+import RunnerAwsInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_aws_instructions.vue';
jest.mock('~/lib/utils/url_utility', () => ({
...jest.requireActual('~/lib/utils/url_utility'),
visitUrl: jest.fn(),
}));
+const mockModalId = 'runner-aws-deployments-modal';
+
describe('RunnerAwsDeploymentsModal', () => {
let wrapper;
const findModal = () => wrapper.findComponent(GlModal);
- const findEasyButtons = () => wrapper.findAllComponents(GlFormRadio);
+ const findRunnerAwsInstructions = () => wrapper.findComponent(RunnerAwsInstructions);
- const createComponent = () => {
+ const createComponent = (options) => {
wrapper = shallowMount(RunnerAwsDeploymentsModal, {
propsData: {
- modalId: 'runner-aws-deployments-modal',
+ modalId: mockModalId,
},
+ ...options,
});
};
@@ -36,39 +34,39 @@ describe('RunnerAwsDeploymentsModal', () => {
wrapper.destroy();
});
- it('renders the modal', () => {
- expect(wrapper.element).toMatchSnapshot();
+ it('renders modal', () => {
+ expect(findModal().props()).toMatchObject({
+ size: 'sm',
+ modalId: mockModalId,
+ title: s__('Runners|Deploy GitLab Runner in AWS'),
+ });
+ expect(findModal().attributes()).toMatchObject({
+ 'hide-footer': '',
+ });
});
- it('should contain all easy buttons', () => {
- expect(findEasyButtons()).toHaveLength(EASY_BUTTONS.length);
+ it('renders modal contents', () => {
+ expect(findRunnerAwsInstructions().exists()).toBe(true);
});
- describe('first easy button', () => {
- it('should contain the correct description', () => {
- expect(findEasyButtons().at(0).text()).toContain(EASY_BUTTONS[0].description);
- });
-
- it('should contain the correct link', () => {
- const templateUrl = encodeURIComponent(TEMPLATES_BASE_URL + EASY_BUTTONS[0].templateName);
- const { stackName } = EASY_BUTTONS[0];
- const instanceUrl = encodeURIComponent(getBaseURL());
- const url = `${CF_BASE_URL}templateURL=${templateUrl}&stackName=${stackName}&param_3GITLABRunnerInstanceURL=${instanceUrl}`;
-
- findModal().vm.$emit('primary');
+ it('when contents trigger closing, modal closes', () => {
+ const mockClose = jest.fn();
- expect(visitUrl).toHaveBeenCalledWith(url, true);
+ createComponent({
+ stubs: {
+ GlModal: {
+ template: '<div><slot/></div>',
+ methods: {
+ close: mockClose,
+ },
+ },
+ },
});
- it('should track an event when clicked', () => {
- const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ expect(mockClose).toHaveBeenCalledTimes(0);
- findModal().vm.$emit('primary');
+ findRunnerAwsInstructions().vm.$emit('close');
- expect(trackingSpy).toHaveBeenCalledTimes(1);
- expect(trackingSpy).toHaveBeenCalledWith(undefined, 'template_clicked', {
- label: EASY_BUTTONS[0].stackName,
- });
- });
+ expect(mockClose).toHaveBeenCalledTimes(1);
});
});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_docker_instructions_spec.js.snap b/spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_docker_instructions_spec.js.snap
new file mode 100644
index 00000000000..d14f66df8a1
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_docker_instructions_spec.js.snap
@@ -0,0 +1,3 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`RunnerDockerInstructions renders contents 1`] = `"To install Runner in a container follow the instructions described in the GitLab documentation View installation instructions Close"`;
diff --git a/spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_kubernetes_instructions_spec.js.snap b/spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_kubernetes_instructions_spec.js.snap
new file mode 100644
index 00000000000..1172bf07dff
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/instructions/__snapshots__/runner_kubernetes_instructions_spec.js.snap
@@ -0,0 +1,3 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`RunnerKubernetesInstructions renders contents 1`] = `"To install Runner in Kubernetes follow the instructions described in the GitLab documentation. View installation instructions Close"`;
diff --git a/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_aws_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_aws_instructions_spec.js
new file mode 100644
index 00000000000..4d566dbec0c
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_aws_instructions_spec.js
@@ -0,0 +1,117 @@
+import {
+ GlAccordion,
+ GlAccordionItem,
+ GlButton,
+ GlFormRadio,
+ GlFormRadioGroup,
+ GlLink,
+ GlSprintf,
+} from '@gitlab/ui';
+import { getBaseURL, visitUrl } from '~/lib/utils/url_utility';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import {
+ AWS_README_URL,
+ AWS_CF_BASE_URL,
+ AWS_TEMPLATES_BASE_URL,
+ AWS_EASY_BUTTONS,
+} from '~/vue_shared/components/runner_instructions/constants';
+import RunnerAwsInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_aws_instructions.vue';
+import { __ } from '~/locale';
+
+jest.mock('~/lib/utils/url_utility', () => ({
+ ...jest.requireActual('~/lib/utils/url_utility'),
+ visitUrl: jest.fn(),
+}));
+
+describe('RunnerAwsInstructions', () => {
+ let wrapper;
+
+ const findEasyButtonsRadioGroup = () => wrapper.findComponent(GlFormRadioGroup);
+ const findEasyButtons = () => wrapper.findAllComponents(GlFormRadio);
+ const findEasyButtonAt = (i) => findEasyButtons().at(i);
+ const findLink = () => wrapper.findComponent(GlLink);
+ const findOkButton = () =>
+ wrapper
+ .findAllComponents(GlButton)
+ .filter((w) => w.props('variant') === 'confirm')
+ .at(0);
+ const findCloseButton = () => wrapper.findByText(__('Close'));
+
+ const createComponent = () => {
+ wrapper = shallowMountExtended(RunnerAwsInstructions, {
+ stubs: {
+ GlSprintf,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should contain every button', () => {
+ expect(findEasyButtons()).toHaveLength(AWS_EASY_BUTTONS.length);
+ });
+
+ const AWS_EASY_BUTTONS_PARAMS = AWS_EASY_BUTTONS.map((val, idx) => ({ ...val, idx }));
+
+ describe.each(AWS_EASY_BUTTONS_PARAMS)(
+ 'easy button %#',
+ ({ idx, description, moreDetails1, moreDetails2, templateName, stackName }) => {
+ it('should contain button description', () => {
+ const text = findEasyButtonAt(idx).text();
+
+ expect(text).toContain(description);
+ expect(text).toContain(moreDetails1);
+ expect(text).toContain(moreDetails2);
+ });
+
+ it('should show more details', () => {
+ const accordion = findEasyButtonAt(idx).findComponent(GlAccordion);
+ const accordionItem = accordion.findComponent(GlAccordionItem);
+
+ expect(accordion.props('headerLevel')).toBe(3);
+ expect(accordionItem.props('title')).toBe(__('More Details'));
+ expect(accordionItem.props('titleVisible')).toBe(__('Less Details'));
+ });
+
+ describe('when clicked', () => {
+ let trackingSpy;
+
+ beforeEach(() => {
+ trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ findEasyButtonsRadioGroup().vm.$emit('input', idx);
+ findOkButton().vm.$emit('click');
+ });
+
+ it('should contain the correct link', () => {
+ const templateUrl = encodeURIComponent(AWS_TEMPLATES_BASE_URL + templateName);
+ const instanceUrl = encodeURIComponent(getBaseURL());
+ const url = `${AWS_CF_BASE_URL}templateURL=${templateUrl}&stackName=${stackName}&param_3GITLABRunnerInstanceURL=${instanceUrl}`;
+
+ expect(visitUrl).toHaveBeenCalledTimes(1);
+ expect(visitUrl).toHaveBeenCalledWith(url, true);
+ });
+
+ it('should track an event when clicked', () => {
+ expect(trackingSpy).toHaveBeenCalledTimes(1);
+ expect(trackingSpy).toHaveBeenCalledWith(undefined, 'template_clicked', {
+ label: stackName,
+ });
+ });
+ });
+ },
+ );
+
+ it('displays link with more information', () => {
+ expect(findLink().attributes('href')).toBe(AWS_README_URL);
+ });
+
+ it('triggers the modal to close', () => {
+ findCloseButton().vm.$emit('click');
+
+ expect(wrapper.emitted('close')).toHaveLength(1);
+ });
+});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js
new file mode 100644
index 00000000000..f9d700fe67f
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_cli_instructions_spec.js
@@ -0,0 +1,169 @@
+import { GlAlert, GlLoadingIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { extendedWrapper } from 'helpers/vue_test_utils_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import getRunnerSetupInstructionsQuery from '~/vue_shared/components/runner_instructions/graphql/get_runner_setup.query.graphql';
+import RunnerCliInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_cli_instructions.vue';
+
+import { mockRunnerPlatforms, mockInstructions, mockInstructionsWindows } from '../mock_data';
+
+Vue.use(VueApollo);
+
+jest.mock('@gitlab/ui/dist/utils');
+
+const mockPlatforms = mockRunnerPlatforms.data.runnerPlatforms.nodes.map(
+ ({ name, humanReadableName, architectures }) => ({
+ name,
+ humanReadableName,
+ architectures: architectures?.nodes || [],
+ }),
+);
+
+const [mockPlatform, mockPlatform2] = mockPlatforms;
+const mockArchitectures = mockPlatform.architectures;
+
+describe('RunnerCliInstructions component', () => {
+ let wrapper;
+ let fakeApollo;
+ let runnerSetupInstructionsHandler;
+
+ const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
+ const findAlert = () => wrapper.findComponent(GlAlert);
+ const findArchitectureDropdownItems = () => wrapper.findAllByTestId('architecture-dropdown-item');
+ const findBinaryDownloadButton = () => wrapper.findByTestId('binary-download-button');
+ const findBinaryInstructions = () => wrapper.findByTestId('binary-instructions');
+ const findRegisterCommand = () => wrapper.findByTestId('register-command');
+
+ const createComponent = ({ props, ...options } = {}) => {
+ const requestHandlers = [[getRunnerSetupInstructionsQuery, runnerSetupInstructionsHandler]];
+
+ fakeApollo = createMockApollo(requestHandlers);
+
+ wrapper = extendedWrapper(
+ shallowMount(RunnerCliInstructions, {
+ propsData: {
+ platform: mockPlatform,
+ registrationToken: 'MY_TOKEN',
+ ...props,
+ },
+ apolloProvider: fakeApollo,
+ ...options,
+ }),
+ );
+ };
+
+ beforeEach(() => {
+ runnerSetupInstructionsHandler = jest.fn().mockResolvedValue(mockInstructions);
+ });
+
+ afterEach(() => {
+ wrapper.destroy();
+ });
+
+ describe('when the instructions are shown', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('should not show alert', async () => {
+ expect(findAlert().exists()).toBe(false);
+ });
+
+ it('should contain a number of dropdown items for the architecture options', () => {
+ expect(findArchitectureDropdownItems()).toHaveLength(
+ mockRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes.length,
+ );
+ });
+
+ describe('should display instructions', () => {
+ const { installInstructions } = mockInstructions.data.runnerSetup;
+
+ it('runner instructions are requested', () => {
+ expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
+ platform: 'linux',
+ architecture: 'amd64',
+ });
+ });
+
+ it('binary instructions are shown', async () => {
+ const instructions = findBinaryInstructions().text();
+
+ expect(instructions).toBe(installInstructions.trim());
+ });
+
+ it('register command is shown with a replaced token', async () => {
+ const command = findRegisterCommand().text();
+
+ expect(command).toBe(
+ 'sudo gitlab-runner register --url http://localhost/ --registration-token MY_TOKEN',
+ );
+ });
+
+ it('architecture download link is shown', () => {
+ expect(findBinaryDownloadButton().attributes('href')).toBe(
+ mockArchitectures[0].downloadLocation,
+ );
+ });
+ });
+
+ describe('after another platform and architecture are selected', () => {
+ beforeEach(async () => {
+ runnerSetupInstructionsHandler.mockResolvedValue(mockInstructionsWindows);
+
+ findArchitectureDropdownItems().at(1).vm.$emit('click');
+
+ wrapper.setProps({ platform: mockPlatform2 });
+ await waitForPromises();
+ });
+
+ it('runner instructions are requested', () => {
+ expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
+ platform: mockPlatform2.name,
+ architecture: mockPlatform2.architectures[0].name,
+ });
+ });
+ });
+ });
+
+ describe('when a register token is not known', () => {
+ beforeEach(async () => {
+ createComponent({ props: { registrationToken: undefined } });
+ await waitForPromises();
+ });
+
+ it('register command is shown without a defined registration token', () => {
+ const instructions = findRegisterCommand().text();
+
+ expect(instructions).toBe(mockInstructions.data.runnerSetup.registerInstructions);
+ });
+ });
+
+ describe('when apollo is loading', () => {
+ it('should show a loading icon', async () => {
+ createComponent();
+
+ expect(findGlLoadingIcon().exists()).toBe(true);
+
+ await waitForPromises();
+
+ expect(findGlLoadingIcon().exists()).toBe(false);
+ });
+ });
+
+ describe('when instructions cannot be loaded', () => {
+ beforeEach(async () => {
+ runnerSetupInstructionsHandler.mockRejectedValue();
+
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('should show alert', () => {
+ expect(wrapper.emitted()).toEqual({ error: [[]] });
+ });
+ });
+});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_docker_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_docker_instructions_spec.js
new file mode 100644
index 00000000000..2922d261b24
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_docker_instructions_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+
+import { GlButton } from '@gitlab/ui';
+import RunnerDockerInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_docker_instructions.vue';
+
+describe('RunnerDockerInstructions', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(RunnerDockerInstructions, {});
+ };
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders contents', () => {
+ expect(wrapper.text().replace(/\s+/g, ' ')).toMatchSnapshot();
+ });
+
+ it('renders link', () => {
+ expect(findButton().attributes('href')).toBe(
+ 'https://docs.gitlab.com/runner/install/docker.html',
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_kubernetes_instructions_spec.js b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_kubernetes_instructions_spec.js
new file mode 100644
index 00000000000..0bfcc0e3d86
--- /dev/null
+++ b/spec/frontend/vue_shared/components/runner_instructions/instructions/runner_kubernetes_instructions_spec.js
@@ -0,0 +1,28 @@
+import { shallowMount } from '@vue/test-utils';
+
+import { GlButton } from '@gitlab/ui';
+import RunnerKubernetesInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_kubernetes_instructions.vue';
+
+describe('RunnerKubernetesInstructions', () => {
+ let wrapper;
+
+ const createComponent = () => {
+ wrapper = shallowMount(RunnerKubernetesInstructions, {});
+ };
+
+ const findButton = () => wrapper.findComponent(GlButton);
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('renders contents', () => {
+ expect(wrapper.text().replace(/\s+/g, ' ')).toMatchSnapshot();
+ });
+
+ it('renders link', () => {
+ expect(findButton().attributes('href')).toBe(
+ 'https://docs.gitlab.com/runner/install/kubernetes.html',
+ );
+ });
+});
diff --git a/spec/frontend/vue_shared/components/runner_instructions/mock_data.js b/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
index 79cacadd6af..add334f166c 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/mock_data.js
@@ -1,5 +1,5 @@
-import mockGraphqlRunnerPlatforms from 'test_fixtures/graphql/runner_instructions/get_runner_platforms.query.graphql.json';
-import mockGraphqlInstructions from 'test_fixtures/graphql/runner_instructions/get_runner_setup.query.graphql.json';
-import mockGraphqlInstructionsWindows from 'test_fixtures/graphql/runner_instructions/get_runner_setup.query.graphql.windows.json';
+import mockRunnerPlatforms from 'test_fixtures/graphql/runner_instructions/get_runner_platforms.query.graphql.json';
+import mockInstructions from 'test_fixtures/graphql/runner_instructions/get_runner_setup.query.graphql.json';
+import mockInstructionsWindows from 'test_fixtures/graphql/runner_instructions/get_runner_setup.query.graphql.windows.json';
-export { mockGraphqlRunnerPlatforms, mockGraphqlInstructions, mockGraphqlInstructionsWindows };
+export { mockRunnerPlatforms, mockInstructions, mockInstructionsWindows };
diff --git a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
index ae9157591c5..19f2dd137ff 100644
--- a/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
+++ b/spec/frontend/vue_shared/components/runner_instructions/runner_instructions_modal_spec.js
@@ -1,4 +1,4 @@
-import { GlAlert, GlModal, GlButton, GlLoadingIcon, GlSkeletonLoader } from '@gitlab/ui';
+import { GlAlert, GlModal, GlButton, GlSkeletonLoader } from '@gitlab/ui';
import { GlBreakpointInstance as bp } from '@gitlab/ui/dist/utils';
import { shallowMount } from '@vue/test-utils';
import Vue, { nextTick } from 'vue';
@@ -6,15 +6,13 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import { extendedWrapper } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
-import getRunnerPlatformsQuery from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_platforms.query.graphql';
-import getRunnerSetupInstructionsQuery from '~/vue_shared/components/runner_instructions/graphql/queries/get_runner_setup.query.graphql';
+import getRunnerPlatformsQuery from '~/vue_shared/components/runner_instructions/graphql/get_runner_platforms.query.graphql';
import RunnerInstructionsModal from '~/vue_shared/components/runner_instructions/runner_instructions_modal.vue';
+import RunnerCliInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_cli_instructions.vue';
+import RunnerDockerInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_docker_instructions.vue';
+import RunnerKubernetesInstructions from '~/vue_shared/components/runner_instructions/instructions/runner_kubernetes_instructions.vue';
-import {
- mockGraphqlRunnerPlatforms,
- mockGraphqlInstructions,
- mockGraphqlInstructionsWindows,
-} from './mock_data';
+import { mockRunnerPlatforms } from './mock_data';
Vue.use(VueApollo);
@@ -40,24 +38,16 @@ describe('RunnerInstructionsModal component', () => {
let wrapper;
let fakeApollo;
let runnerPlatformsHandler;
- let runnerSetupInstructionsHandler;
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
- const findGlLoadingIcon = () => wrapper.findComponent(GlLoadingIcon);
const findAlert = () => wrapper.findComponent(GlAlert);
const findModal = () => wrapper.findComponent(GlModal);
const findPlatformButtonGroup = () => wrapper.findByTestId('platform-buttons');
const findPlatformButtons = () => findPlatformButtonGroup().findAllComponents(GlButton);
- const findArchitectureDropdownItems = () => wrapper.findAllByTestId('architecture-dropdown-item');
- const findBinaryDownloadButton = () => wrapper.findByTestId('binary-download-button');
- const findBinaryInstructions = () => wrapper.findByTestId('binary-instructions');
- const findRegisterCommand = () => wrapper.findByTestId('register-command');
+ const findRunnerCliInstructions = () => wrapper.findComponent(RunnerCliInstructions);
const createComponent = ({ props, shown = true, ...options } = {}) => {
- const requestHandlers = [
- [getRunnerPlatformsQuery, runnerPlatformsHandler],
- [getRunnerSetupInstructionsQuery, runnerSetupInstructionsHandler],
- ];
+ const requestHandlers = [[getRunnerPlatformsQuery, runnerPlatformsHandler]];
fakeApollo = createMockApollo(requestHandlers);
@@ -80,8 +70,7 @@ describe('RunnerInstructionsModal component', () => {
};
beforeEach(() => {
- runnerPlatformsHandler = jest.fn().mockResolvedValue(mockGraphqlRunnerPlatforms);
- runnerSetupInstructionsHandler = jest.fn().mockResolvedValue(mockGraphqlInstructions);
+ runnerPlatformsHandler = jest.fn().mockResolvedValue(mockRunnerPlatforms);
});
afterEach(() => {
@@ -103,90 +92,15 @@ describe('RunnerInstructionsModal component', () => {
const buttons = findPlatformButtons();
- expect(buttons).toHaveLength(mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes.length);
+ expect(buttons).toHaveLength(mockRunnerPlatforms.data.runnerPlatforms.nodes.length);
});
- it('should contain a number of dropdown items for the architecture options', () => {
- expect(findArchitectureDropdownItems()).toHaveLength(
- mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes.length,
- );
- });
-
- describe('should display default instructions', () => {
- const { installInstructions } = mockGraphqlInstructions.data.runnerSetup;
-
- it('runner instructions are requested', () => {
- expect(runnerSetupInstructionsHandler).toHaveBeenCalledWith({
- platform: 'linux',
- architecture: 'amd64',
- });
- });
-
- it('binary instructions are shown', async () => {
- const instructions = findBinaryInstructions().text();
-
- expect(instructions).toBe(installInstructions.trim());
- });
-
- it('register command is shown with a replaced token', async () => {
- const command = findRegisterCommand().text();
-
- expect(command).toBe(
- 'sudo gitlab-runner register --url http://localhost/ --registration-token MY_TOKEN',
- );
- });
- });
-
- describe('after a platform and architecture are selected', () => {
- const windowsIndex = 2;
- const { installInstructions } = mockGraphqlInstructionsWindows.data.runnerSetup;
-
- beforeEach(async () => {
- runnerSetupInstructionsHandler.mockResolvedValue(mockGraphqlInstructionsWindows);
-
- findPlatformButtons().at(windowsIndex).vm.$emit('click');
- await waitForPromises();
- });
+ it('should display architecture options', () => {
+ const { architectures } = findRunnerCliInstructions().props('platform');
- it('runner instructions are requested', () => {
- expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
- platform: 'windows',
- architecture: 'amd64',
- });
- });
-
- it('architecture download link is updated', () => {
- const architectures =
- mockGraphqlRunnerPlatforms.data.runnerPlatforms.nodes[windowsIndex].architectures.nodes;
-
- expect(findBinaryDownloadButton().attributes('href')).toBe(
- architectures[0].downloadLocation,
- );
- });
-
- it('other binary instructions are shown', () => {
- const instructions = findBinaryInstructions().text();
-
- expect(instructions).toBe(installInstructions.trim());
- });
-
- it('register command is shown', () => {
- const command = findRegisterCommand().text();
-
- expect(command).toBe(
- './gitlab-runner.exe register --url http://localhost/ --registration-token MY_TOKEN',
- );
- });
-
- it('runner instructions are requested with another architecture', async () => {
- findArchitectureDropdownItems().at(1).vm.$emit('click');
- await waitForPromises();
-
- expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
- platform: 'windows',
- architecture: '386',
- });
- });
+ expect(architectures).toEqual(
+ mockRunnerPlatforms.data.runnerPlatforms.nodes[0].architectures.nodes,
+ );
});
describe('when the modal resizes', () => {
@@ -206,16 +120,14 @@ describe('RunnerInstructionsModal component', () => {
});
});
- describe('when a register token is not known', () => {
+ describe.each([null, 'DEFINED'])('when registration token is %p', (token) => {
beforeEach(async () => {
- createComponent({ props: { registrationToken: undefined } });
+ createComponent({ props: { registrationToken: token } });
await waitForPromises();
});
it('register command is shown without a defined registration token', () => {
- const instructions = findRegisterCommand().text();
-
- expect(instructions).toBe(mockGraphqlInstructions.data.runnerSetup.registerInstructions);
+ expect(findRunnerCliInstructions().props('registrationToken')).toBe(token);
});
});
@@ -225,21 +137,33 @@ describe('RunnerInstructionsModal component', () => {
await waitForPromises();
});
- it('runner instructions for the default selected platform are requested', () => {
- expect(runnerSetupInstructionsHandler).toHaveBeenLastCalledWith({
- platform: 'osx',
- architecture: 'amd64',
- });
+ it('should preselect', () => {
+ const selected = findPlatformButtons()
+ .filter((btn) => btn.props('selected'))
+ .at(0);
+
+ expect(selected.text()).toBe('macOS');
});
- it('sets the focus on the default selected platform', () => {
- const findOsxPlatformButton = () => wrapper.findComponent({ ref: 'osx' });
+ it('runner instructions for the default selected platform are requested', () => {
+ const { name } = findRunnerCliInstructions().props('platform');
- findOsxPlatformButton().element.focus = jest.fn();
+ expect(name).toBe('osx');
+ });
+ });
- findModal().vm.$emit('shown');
+ describe.each`
+ platform | component
+ ${'docker'} | ${RunnerDockerInstructions}
+ ${'kubernetes'} | ${RunnerKubernetesInstructions}
+ `('with platform "$platform"', ({ platform, component }) => {
+ beforeEach(async () => {
+ createComponent({ props: { defaultPlatformName: platform } });
+ await waitForPromises();
+ });
- expect(findOsxPlatformButton().element.focus).toHaveBeenCalled();
+ it(`runner instructions for ${platform} are shown`, () => {
+ expect(wrapper.findComponent(component).exists()).toBe(true);
});
});
@@ -251,7 +175,6 @@ describe('RunnerInstructionsModal component', () => {
it('does not fetch instructions', () => {
expect(runnerPlatformsHandler).not.toHaveBeenCalled();
- expect(runnerSetupInstructionsHandler).not.toHaveBeenCalled();
});
});
@@ -259,43 +182,41 @@ describe('RunnerInstructionsModal component', () => {
it('should show a skeleton loader', async () => {
createComponent();
await nextTick();
- await nextTick();
expect(findSkeletonLoader().exists()).toBe(true);
- expect(findGlLoadingIcon().exists()).toBe(false);
-
- // wait on fetch of both `platforms` and `instructions`
- await nextTick();
- await nextTick();
-
- expect(findGlLoadingIcon().exists()).toBe(true);
});
it('once loaded, should not show a loading state', async () => {
createComponent();
-
await waitForPromises();
expect(findSkeletonLoader().exists()).toBe(false);
- expect(findGlLoadingIcon().exists()).toBe(false);
});
});
- describe('when instructions cannot be loaded', () => {
- beforeEach(async () => {
- runnerSetupInstructionsHandler.mockRejectedValue();
+ describe('errors', () => {
+ it('should show an alert when platforms cannot be loaded', async () => {
+ runnerPlatformsHandler.mockRejectedValue();
createComponent();
await waitForPromises();
- });
- it('should show alert', () => {
expect(findAlert().exists()).toBe(true);
});
- it('should not show instructions', () => {
- expect(findBinaryInstructions().exists()).toBe(false);
- expect(findRegisterCommand().exists()).toBe(false);
+ it('should show alert when instructions cannot be loaded', async () => {
+ createComponent();
+ await waitForPromises();
+
+ findRunnerCliInstructions().vm.$emit('error');
+ await waitForPromises();
+
+ expect(findAlert().exists()).toBe(true);
+
+ findAlert().vm.$emit('dismiss');
+ await nextTick();
+
+ expect(findAlert().exists()).toBe(false);
});
});
@@ -312,14 +233,16 @@ describe('RunnerInstructionsModal component', () => {
describe('show()', () => {
let mockShow;
+ let mockClose;
beforeEach(() => {
mockShow = jest.fn();
+ mockClose = jest.fn();
createComponent({
shown: false,
stubs: {
- GlModal: getGlModalStub({ show: mockShow }),
+ GlModal: getGlModalStub({ show: mockShow, close: mockClose }),
},
});
});
@@ -329,6 +252,12 @@ describe('RunnerInstructionsModal component', () => {
expect(mockShow).toHaveBeenCalledTimes(1);
});
+
+ it('delegates close()', () => {
+ wrapper.vm.close();
+
+ expect(mockClose).toHaveBeenCalledTimes(1);
+ });
});
});
});
diff --git a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
index 33f370efdfa..5461d38599d 100644
--- a/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
+++ b/spec/frontend/vue_shared/components/source_viewer/source_viewer_spec.js
@@ -90,6 +90,17 @@ describe('Source Viewer component', () => {
});
});
+ describe('legacy fallbacks', () => {
+ it('tracks a fallback event and emits an error when viewing python files', () => {
+ const fallbackLanguage = 'python';
+ const eventData = { label: EVENT_LABEL_FALLBACK, property: fallbackLanguage };
+ createComponent({ language: fallbackLanguage });
+
+ expect(Tracking.event).toHaveBeenCalledWith(undefined, EVENT_ACTION, eventData);
+ expect(wrapper.emitted('error')).toHaveLength(1);
+ });
+ });
+
describe('highlight.js', () => {
beforeEach(() => createComponent({ language: mappedLanguage }));
@@ -114,10 +125,10 @@ describe('Source Viewer component', () => {
});
it('correctly maps languages starting with uppercase', async () => {
- await createComponent({ language: 'Python3' });
- const languageDefinition = await import(`highlight.js/lib/languages/python`);
+ await createComponent({ language: 'Ruby' });
+ const languageDefinition = await import(`highlight.js/lib/languages/ruby`);
- expect(hljs.registerLanguage).toHaveBeenCalledWith('python', languageDefinition.default);
+ expect(hljs.registerLanguage).toHaveBeenCalledWith('ruby', languageDefinition.default);
});
it('highlights the first chunk', () => {
diff --git a/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js b/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
index e5f56c63031..c8351ed61d7 100644
--- a/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
+++ b/spec/frontend/vue_shared/components/timezone_dropdown/timezone_dropdown_spec.js
@@ -1,4 +1,5 @@
import { GlDropdownItem, GlDropdown } from '@gitlab/ui';
+import { nextTick } from 'vue';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import TimezoneDropdown from '~/vue_shared/components/timezone_dropdown/timezone_dropdown.vue';
import { formatTimezone } from '~/lib/utils/datetime_utility';
@@ -105,7 +106,14 @@ describe('Deploy freeze timezone dropdown', () => {
});
it('renders selected time zone as dropdown label', () => {
- expect(wrapper.findComponent(GlDropdown).props().text).toBe('[UTC + 2] Berlin');
+ expect(wrapper.findComponent(GlDropdown).props().text).toBe('[UTC+2] Berlin');
+ });
+
+ it('adds a checkmark to the selected option', async () => {
+ const selectedTZOption = findAllDropdownItems().at(0);
+ selectedTZOption.vm.$emit('click');
+ await nextTick();
+ expect(selectedTZOption.attributes('ischecked')).toBe('true');
});
});
});
diff --git a/spec/frontend/vue_shared/components/web_ide_link_spec.js b/spec/frontend/vue_shared/components/web_ide_link_spec.js
index 3b0f0fe6e73..2a0d2089fe3 100644
--- a/spec/frontend/vue_shared/components/web_ide_link_spec.js
+++ b/spec/frontend/vue_shared/components/web_ide_link_spec.js
@@ -7,15 +7,19 @@ import WebIdeLink, {
i18n,
PREFERRED_EDITOR_RESET_KEY,
PREFERRED_EDITOR_KEY,
- KEY_WEB_IDE,
} from '~/vue_shared/components/web_ide_link.vue';
import ConfirmForkModal from '~/vue_shared/components/confirm_fork_modal.vue';
import UserCalloutDismisser from '~/vue_shared/components/user_callout_dismisser.vue';
+import { KEY_WEB_IDE } from '~/vue_shared/components/constants';
import { stubComponent } from 'helpers/stub_component';
import { shallowMountExtended, mountExtended } from 'helpers/vue_test_utils_helper';
import { useLocalStorageSpy } from 'helpers/local_storage_helper';
+import { visitUrl } from '~/lib/utils/url_utility';
+
+jest.mock('~/lib/utils/url_utility');
+
const TEST_EDIT_URL = '/gitlab-test/test/-/edit/main/';
const TEST_WEB_IDE_URL = '/-/ide/project/gitlab-test/test/edit/main/-/';
const TEST_GITPOD_URL = 'https://gitpod.test/';
@@ -52,6 +56,7 @@ const ACTION_WEB_IDE = {
'data-track-action': 'click_consolidated_edit_ide',
'data-track-label': 'web_ide',
},
+ handle: expect.any(Function),
};
const ACTION_WEB_IDE_CONFIRM_FORK = {
...ACTION_WEB_IDE,
@@ -258,6 +263,14 @@ describe('Web IDE link component', () => {
selectedKey: ACTION_PIPELINE_EDITOR.key,
});
});
+
+ it('when web ide button is clicked it opens in a new tab', async () => {
+ findActionsButton().props('actions')[1].handle({
+ preventDefault: jest.fn(),
+ });
+ await nextTick();
+ expect(visitUrl).toHaveBeenCalledWith(ACTION_WEB_IDE.href, true);
+ });
});
describe('with multiple actions', () => {
diff --git a/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js
index e5594b6d37e..159be4cd1ef 100644
--- a/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js
+++ b/spec/frontend/vue_shared/issuable/show/components/issuable_edit_form_spec.js
@@ -5,9 +5,12 @@ import { nextTick } from 'vue';
import IssuableEditForm from '~/vue_shared/issuable/show/components/issuable_edit_form.vue';
import IssuableEventHub from '~/vue_shared/issuable/show/event_hub';
import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import Autosave from '~/autosave';
import { mockIssuableShowProps, mockIssuable } from '../mock_data';
+jest.mock('~/autosave');
+
const issuableEditFormProps = {
issuable: mockIssuable,
...mockIssuableShowProps,
@@ -36,10 +39,12 @@ describe('IssuableEditForm', () => {
beforeEach(() => {
wrapper = createComponent();
+ jest.spyOn(Autosave.prototype, 'reset');
});
afterEach(() => {
wrapper.destroy();
+ jest.resetAllMocks();
});
describe('watch', () => {
@@ -100,21 +105,18 @@ describe('IssuableEditForm', () => {
describe('methods', () => {
describe('initAutosave', () => {
- it('initializes `autosaveTitle` and `autosaveDescription` props', () => {
- expect(wrapper.vm.autosaveTitle).toBeDefined();
- expect(wrapper.vm.autosaveDescription).toBeDefined();
+ it('initializes autosave', () => {
+ expect(Autosave.mock.calls).toEqual([
+ [expect.any(Element), ['/', '', 'title']],
+ [expect.any(Element), ['/', '', 'description']],
+ ]);
});
});
describe('resetAutosave', () => {
- it('calls `reset` on `autosaveTitle` and `autosaveDescription` props', () => {
- jest.spyOn(wrapper.vm.autosaveTitle, 'reset').mockImplementation(jest.fn);
- jest.spyOn(wrapper.vm.autosaveDescription, 'reset').mockImplementation(jest.fn);
-
- wrapper.vm.resetAutosave();
-
- expect(wrapper.vm.autosaveTitle.reset).toHaveBeenCalled();
- expect(wrapper.vm.autosaveDescription.reset).toHaveBeenCalled();
+ it('resets title and description on "update.issuable event"', () => {
+ IssuableEventHub.$emit('update.issuable');
+ expect(Autosave.prototype.reset.mock.calls).toEqual([[], []]);
});
});
});
diff --git a/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap b/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap
new file mode 100644
index 00000000000..52838dcd0bc
--- /dev/null
+++ b/spec/frontend/work_items/components/notes/__snapshots__/work_item_note_body_spec.js.snap
@@ -0,0 +1,9 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`Work Item Note Body should have the wrapper to show the note body 1`] = `
+"<div data-testid=\\"work-item-note-body\\" class=\\"note-text md\\">
+ <p dir=\\"auto\\" data-sourcepos=\\"1:1-1:76\\">
+ <gl-emoji data-unicode-version=\\"6.0\\" data-name=\\"wave\\" title=\\"waving hand sign\\">👋</gl-emoji> Hi <a title=\\"Sherie Nitzsche\\" class=\\"gfm gfm-project_member js-user-link\\" data-placement=\\"top\\" data-container=\\"body\\" data-user=\\"3\\" data-reference-type=\\"user\\" href=\\"/fredda.brekke\\">@fredda.brekke</a> How are you ? what do you think about this ? <gl-emoji data-unicode-version=\\"6.0\\" data-name=\\"pray\\" title=\\"person with folded hands\\">🙏</gl-emoji>
+ </p>
+</div>"
+`;
diff --git a/spec/frontend/work_items/components/notes/activity_filter_spec.js b/spec/frontend/work_items/components/notes/activity_filter_spec.js
new file mode 100644
index 00000000000..eb4bcbf942b
--- /dev/null
+++ b/spec/frontend/work_items/components/notes/activity_filter_spec.js
@@ -0,0 +1,74 @@
+import { GlDropdown, GlDropdownItem } from '@gitlab/ui';
+import { nextTick } from 'vue';
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import ActivityFilter from '~/work_items/components/notes/activity_filter.vue';
+import LocalStorageSync from '~/vue_shared/components/local_storage_sync.vue';
+import { ASC, DESC } from '~/notes/constants';
+
+import { mockTracking } from 'helpers/tracking_helper';
+import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+
+describe('Activity Filter', () => {
+ let wrapper;
+
+ const findLocalStorageSync = () => wrapper.findComponent(LocalStorageSync);
+ const findDropdown = () => wrapper.findComponent(GlDropdown);
+ const findAllDropdownItems = () => wrapper.findAllComponents(GlDropdownItem);
+ const findNewestFirstItem = () => wrapper.findByTestId('js-newest-first');
+
+ const createComponent = ({ sortOrder = ASC, loading = false, workItemType = 'Task' } = {}) => {
+ wrapper = shallowMountExtended(ActivityFilter, {
+ propsData: {
+ sortOrder,
+ loading,
+ workItemType,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ describe('default', () => {
+ it('has a dropdown with 2 options', () => {
+ expect(findDropdown().exists()).toBe(true);
+ expect(findAllDropdownItems()).toHaveLength(ActivityFilter.SORT_OPTIONS.length);
+ });
+
+ it('has local storage sync with the correct props', () => {
+ expect(findLocalStorageSync().props('asString')).toBe(true);
+ });
+
+ it('emits `updateSavedSortOrder` event when update is emitted', async () => {
+ findLocalStorageSync().vm.$emit('input', ASC);
+
+ await nextTick();
+ expect(wrapper.emitted('updateSavedSortOrder')).toHaveLength(1);
+ expect(wrapper.emitted('updateSavedSortOrder')).toEqual([[ASC]]);
+ });
+ });
+
+ describe('when asc', () => {
+ describe('when the dropdown is clicked', () => {
+ it('calls the right actions', async () => {
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+ findNewestFirstItem().vm.$emit('click');
+ await nextTick();
+
+ expect(wrapper.emitted('changeSortOrder')).toHaveLength(1);
+ expect(wrapper.emitted('changeSortOrder')).toEqual([[DESC]]);
+
+ expect(trackingSpy).toHaveBeenCalledWith(
+ TRACKING_CATEGORY_SHOW,
+ 'notes_sort_order_changed',
+ {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_track_notes_sorting',
+ property: 'type_Task',
+ },
+ );
+ });
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/notes/work_item_note_body_spec.js b/spec/frontend/work_items/components/notes/work_item_note_body_spec.js
new file mode 100644
index 00000000000..4fcbcfcaf30
--- /dev/null
+++ b/spec/frontend/work_items/components/notes/work_item_note_body_spec.js
@@ -0,0 +1,32 @@
+import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
+import WorkItemNoteBody from '~/work_items/components/notes/work_item_note_body.vue';
+import NoteEditedText from '~/notes/components/note_edited_text.vue';
+import { mockWorkItemCommentNote } from 'jest/work_items/mock_data';
+
+describe('Work Item Note Body', () => {
+ let wrapper;
+
+ const findNoteBody = () => wrapper.findByTestId('work-item-note-body');
+ const findNoteEditedText = () => wrapper.findComponent(NoteEditedText);
+
+ const createComponent = ({ note = mockWorkItemCommentNote } = {}) => {
+ wrapper = shallowMountExtended(WorkItemNoteBody, {
+ propsData: {
+ note,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('should have the wrapper to show the note body', () => {
+ expect(findNoteBody().exists()).toBe(true);
+ expect(findNoteBody().html()).toMatchSnapshot();
+ });
+
+ it('should not show the edited text when the value is not present', () => {
+ expect(findNoteEditedText().exists()).toBe(false);
+ });
+});
diff --git a/spec/frontend/work_items/components/notes/work_item_note_spec.js b/spec/frontend/work_items/components/notes/work_item_note_spec.js
new file mode 100644
index 00000000000..7257d5c8023
--- /dev/null
+++ b/spec/frontend/work_items/components/notes/work_item_note_spec.js
@@ -0,0 +1,53 @@
+import { GlAvatarLink, GlAvatar } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import TimelineEntryItem from '~/vue_shared/components/notes/timeline_entry_item.vue';
+import WorkItemNote from '~/work_items/components/notes/work_item_note.vue';
+import NoteBody from '~/work_items/components/notes/work_item_note_body.vue';
+import NoteHeader from '~/notes/components/note_header.vue';
+import { mockWorkItemCommentNote } from 'jest/work_items/mock_data';
+
+describe('Work Item Note', () => {
+ let wrapper;
+
+ const findTimelineEntryItem = () => wrapper.findComponent(TimelineEntryItem);
+ const findNoteHeader = () => wrapper.findComponent(NoteHeader);
+ const findNoteBody = () => wrapper.findComponent(NoteBody);
+ const findAvatarLink = () => wrapper.findComponent(GlAvatarLink);
+ const findAvatar = () => wrapper.findComponent(GlAvatar);
+
+ const createComponent = ({ note = mockWorkItemCommentNote } = {}) => {
+ wrapper = shallowMount(WorkItemNote, {
+ propsData: {
+ note,
+ },
+ });
+ };
+
+ beforeEach(() => {
+ createComponent();
+ });
+
+ it('Should be wrapped inside the timeline entry item', () => {
+ expect(findTimelineEntryItem().exists()).toBe(true);
+ });
+
+ it('should have the author avatar of the work item note', () => {
+ expect(findAvatarLink().exists()).toBe(true);
+ expect(findAvatarLink().attributes('href')).toBe(mockWorkItemCommentNote.author.webUrl);
+
+ expect(findAvatar().exists()).toBe(true);
+ expect(findAvatar().props('src')).toBe(mockWorkItemCommentNote.author.avatarUrl);
+ expect(findAvatar().props('entityName')).toBe(mockWorkItemCommentNote.author.username);
+ });
+
+ it('has note header', () => {
+ expect(findNoteHeader().exists()).toBe(true);
+ expect(findNoteHeader().props('author')).toEqual(mockWorkItemCommentNote.author);
+ expect(findNoteHeader().props('createdAt')).toBe(mockWorkItemCommentNote.createdAt);
+ });
+
+ it('has note body', () => {
+ expect(findNoteBody().exists()).toBe(true);
+ expect(findNoteBody().props('note')).toEqual(mockWorkItemCommentNote);
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_comment_form_spec.js b/spec/frontend/work_items/components/work_item_comment_form_spec.js
new file mode 100644
index 00000000000..07c00119398
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_comment_form_spec.js
@@ -0,0 +1,205 @@
+import { GlButton } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import Vue from 'vue';
+import VueApollo from 'vue-apollo';
+import createMockApollo from 'helpers/mock_apollo_helper';
+import { mockTracking } from 'helpers/tracking_helper';
+import waitForPromises from 'helpers/wait_for_promises';
+import { updateDraft } from '~/lib/utils/autosave';
+import MarkdownField from '~/vue_shared/components/markdown/field.vue';
+import MarkdownEditor from '~/vue_shared/components/markdown/markdown_editor.vue';
+import WorkItemCommentForm from '~/work_items/components/work_item_comment_form.vue';
+import WorkItemCommentLocked from '~/work_items/components/work_item_comment_locked.vue';
+import createNoteMutation from '~/work_items/graphql/create_work_item_note.mutation.graphql';
+import { TRACKING_CATEGORY_SHOW } from '~/work_items/constants';
+import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
+import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
+import {
+ workItemResponseFactory,
+ workItemQueryResponse,
+ projectWorkItemResponse,
+ createWorkItemNoteResponse,
+} from '../mock_data';
+
+jest.mock('~/lib/utils/confirm_via_gl_modal/confirm_via_gl_modal');
+jest.mock('~/lib/utils/autosave');
+
+const workItemId = workItemQueryResponse.data.workItem.id;
+
+describe('WorkItemCommentForm', () => {
+ let wrapper;
+
+ Vue.use(VueApollo);
+
+ const mutationSuccessHandler = jest.fn().mockResolvedValue(createWorkItemNoteResponse);
+ const workItemByIidResponseHandler = jest.fn().mockResolvedValue(projectWorkItemResponse);
+ let workItemResponseHandler;
+
+ const findMarkdownEditor = () => wrapper.findComponent(MarkdownEditor);
+
+ const setText = (newText) => {
+ return findMarkdownEditor().vm.$emit('input', newText);
+ };
+
+ const clickSave = () =>
+ wrapper
+ .findAllComponents(GlButton)
+ .filter((button) => button.text().startsWith('Comment'))
+ .at(0)
+ .vm.$emit('click', {});
+
+ const createComponent = async ({
+ mutationHandler = mutationSuccessHandler,
+ canUpdate = true,
+ workItemResponse = workItemResponseFactory({ canUpdate }),
+ queryVariables = { id: workItemId },
+ fetchByIid = false,
+ signedIn = true,
+ isEditing = true,
+ } = {}) => {
+ workItemResponseHandler = jest.fn().mockResolvedValue(workItemResponse);
+
+ if (signedIn) {
+ window.gon.current_user_id = '1';
+ window.gon.current_user_avatar_url = 'avatar.png';
+ }
+
+ const { id } = workItemQueryResponse.data.workItem;
+ wrapper = shallowMount(WorkItemCommentForm, {
+ apolloProvider: createMockApollo([
+ [workItemQuery, workItemResponseHandler],
+ [createNoteMutation, mutationHandler],
+ [workItemByIidQuery, workItemByIidResponseHandler],
+ ]),
+ propsData: {
+ workItemId: id,
+ fullPath: 'test-project-path',
+ queryVariables,
+ fetchByIid,
+ },
+ stubs: {
+ MarkdownField,
+ WorkItemCommentLocked,
+ },
+ });
+
+ await waitForPromises();
+
+ if (isEditing) {
+ wrapper.findComponent(GlButton).vm.$emit('click');
+ }
+ };
+
+ describe('adding a comment', () => {
+ it('calls update widgets mutation', async () => {
+ const noteText = 'updated desc';
+
+ await createComponent({
+ isEditing: true,
+ signedIn: true,
+ });
+
+ setText(noteText);
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(mutationSuccessHandler).toHaveBeenCalledWith({
+ input: {
+ noteableId: workItemId,
+ body: noteText,
+ },
+ });
+ });
+
+ it('tracks adding comment', async () => {
+ await createComponent();
+ const trackingSpy = mockTracking(undefined, wrapper.element, jest.spyOn);
+
+ setText('test');
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(trackingSpy).toHaveBeenCalledWith(TRACKING_CATEGORY_SHOW, 'add_work_item_comment', {
+ category: TRACKING_CATEGORY_SHOW,
+ label: 'item_comment',
+ property: 'type_Task',
+ });
+ });
+
+ it('emits error when mutation returns error', async () => {
+ const error = 'eror';
+
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockResolvedValue({
+ data: {
+ createNote: {
+ note: null,
+ errors: [error],
+ },
+ },
+ }),
+ });
+
+ setText('updated desc');
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
+
+ it('emits error when mutation fails', async () => {
+ const error = 'eror';
+
+ await createComponent({
+ isEditing: true,
+ mutationHandler: jest.fn().mockRejectedValue(new Error(error)),
+ });
+
+ setText('updated desc');
+
+ clickSave();
+
+ await waitForPromises();
+
+ expect(wrapper.emitted('error')).toEqual([[error]]);
+ });
+
+ it('autosaves', async () => {
+ await createComponent({
+ isEditing: true,
+ });
+
+ setText('updated');
+
+ expect(updateDraft).toHaveBeenCalled();
+ });
+ });
+
+ it('calls the global ID work item query when `fetchByIid` prop is false', async () => {
+ createComponent({ fetchByIid: false });
+ await waitForPromises();
+
+ expect(workItemResponseHandler).toHaveBeenCalled();
+ expect(workItemByIidResponseHandler).not.toHaveBeenCalled();
+ });
+
+ it('calls the IID work item query when when `fetchByIid` prop is true', async () => {
+ await createComponent({ fetchByIid: true, isEditing: false });
+
+ expect(workItemResponseHandler).not.toHaveBeenCalled();
+ expect(workItemByIidResponseHandler).toHaveBeenCalled();
+ });
+
+ it('skips calling the handlers when missing the needed queryVariables', async () => {
+ await createComponent({ queryVariables: {}, fetchByIid: false, isEditing: false });
+
+ expect(workItemResponseHandler).not.toHaveBeenCalled();
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_comment_locked_spec.js b/spec/frontend/work_items/components/work_item_comment_locked_spec.js
new file mode 100644
index 00000000000..58491c4b09c
--- /dev/null
+++ b/spec/frontend/work_items/components/work_item_comment_locked_spec.js
@@ -0,0 +1,41 @@
+import { GlLink, GlIcon } from '@gitlab/ui';
+import { shallowMount } from '@vue/test-utils';
+import WorkItemCommentLocked from '~/work_items/components/work_item_comment_locked.vue';
+
+const createComponent = ({ workItemType = 'Task', isProjectArchived = false } = {}) =>
+ shallowMount(WorkItemCommentLocked, {
+ propsData: {
+ workItemType,
+ isProjectArchived,
+ },
+ });
+
+describe('WorkItemCommentLocked', () => {
+ let wrapper;
+ const findLockedIcon = () => wrapper.findComponent(GlIcon);
+ const findLearnMoreLink = () => wrapper.findComponent(GlLink);
+
+ it('renders the locked icon', () => {
+ wrapper = createComponent();
+ expect(findLockedIcon().props('name')).toBe('lock');
+ });
+
+ it('has the learn more link', () => {
+ wrapper = createComponent();
+ expect(findLearnMoreLink().attributes('href')).toBe(
+ WorkItemCommentLocked.constantOptions.lockedIssueDocsPath,
+ );
+ });
+
+ describe('when the project is archived', () => {
+ beforeEach(() => {
+ wrapper = createComponent({ isProjectArchived: true });
+ });
+
+ it('learn more link is directed to archived project docs path', () => {
+ expect(findLearnMoreLink().attributes('href')).toBe(
+ WorkItemCommentLocked.constantOptions.archivedProjectDocsPath,
+ );
+ });
+ });
+});
diff --git a/spec/frontend/work_items/components/work_item_detail_modal_spec.js b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
index 686641800b3..8976cd6e22b 100644
--- a/spec/frontend/work_items/components/work_item_detail_modal_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_modal_spec.js
@@ -4,10 +4,11 @@ import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import waitForPromises from 'helpers/wait_for_promises';
import createMockApollo from 'helpers/mock_apollo_helper';
-import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
+import { stubComponent } from 'helpers/stub_component';
import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import deleteWorkItemFromTaskMutation from '~/work_items/graphql/delete_task_from_work_item.mutation.graphql';
import deleteWorkItemMutation from '~/work_items/graphql/delete_work_item.mutation.graphql';
+import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
import {
deleteWorkItemFromTaskMutationErrorResponse,
deleteWorkItemFromTaskMutationResponse,
@@ -69,8 +70,14 @@ describe('WorkItemDetailModal component', () => {
error,
};
},
+ provide: {
+ fullPath: 'group/project',
+ },
stubs: {
GlModal,
+ WorkItemDetail: stubComponent(WorkItemDetail, {
+ apollo: {},
+ }),
},
});
};
@@ -126,6 +133,15 @@ describe('WorkItemDetailModal component', () => {
expect(closeSpy).toHaveBeenCalled();
});
+ it('updates the work item when WorkItemDetail emits `update-modal` event', async () => {
+ createComponent();
+
+ findWorkItemDetail().vm.$emit('update-modal', null, 'updatedId');
+ await waitForPromises();
+
+ expect(findWorkItemDetail().props().workItemId).toEqual('updatedId');
+ });
+
describe('delete work item', () => {
describe('when there is task data', () => {
it('emits workItemDeleted and closes modal', async () => {
diff --git a/spec/frontend/work_items/components/work_item_detail_spec.js b/spec/frontend/work_items/components/work_item_detail_spec.js
index bbab45c7055..a50a48de921 100644
--- a/spec/frontend/work_items/components/work_item_detail_spec.js
+++ b/spec/frontend/work_items/components/work_item_detail_spec.js
@@ -12,6 +12,7 @@ import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import setWindowLocation from 'helpers/set_window_location_helper';
+import { stubComponent } from 'helpers/stub_component';
import WorkItemDetail from '~/work_items/components/work_item_detail.vue';
import WorkItemActions from '~/work_items/components/work_item_actions.vue';
import WorkItemDescription from '~/work_items/components/work_item_description.vue';
@@ -22,6 +23,8 @@ import WorkItemAssignees from '~/work_items/components/work_item_assignees.vue';
import WorkItemLabels from '~/work_items/components/work_item_labels.vue';
import WorkItemMilestone from '~/work_items/components/work_item_milestone.vue';
import WorkItemTree from '~/work_items/components/work_item_links/work_item_tree.vue';
+import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
+import WorkItemDetailModal from '~/work_items/components/work_item_detail_modal.vue';
import { i18n } from '~/work_items/constants';
import workItemQuery from '~/work_items/graphql/work_item.query.graphql';
import workItemByIidQuery from '~/work_items/graphql/work_item_by_iid.query.graphql';
@@ -63,6 +66,7 @@ describe('WorkItemDetail component', () => {
const assigneesSubscriptionHandler = jest
.fn()
.mockResolvedValue(workItemAssigneesSubscriptionResponse);
+ const showModalHandler = jest.fn();
const findAlert = () => wrapper.findComponent(GlAlert);
const findEmptyState = () => wrapper.findComponent(GlEmptyState);
@@ -81,6 +85,8 @@ describe('WorkItemDetail component', () => {
const findCloseButton = () => wrapper.find('[data-testid="work-item-close"]');
const findWorkItemType = () => wrapper.find('[data-testid="work-item-type"]');
const findHierarchyTree = () => wrapper.findComponent(WorkItemTree);
+ const findNotesWidget = () => wrapper.findComponent(WorkItemNotes);
+ const findModal = () => wrapper.findComponent(WorkItemDetailModal);
const createComponent = ({
isModal = false,
@@ -129,6 +135,12 @@ describe('WorkItemDetail component', () => {
stubs: {
WorkItemWeight: true,
WorkItemIteration: true,
+ WorkItemHealthStatus: true,
+ WorkItemDetailModal: stubComponent(WorkItemDetailModal, {
+ methods: {
+ show: showModalHandler,
+ },
+ }),
},
});
};
@@ -652,15 +664,89 @@ describe('WorkItemDetail component', () => {
expect(findHierarchyTree().exists()).toBe(false);
});
- it('renders children tree when work item is an Objective', async () => {
+ describe('work item has children', () => {
const objectiveWorkItem = workItemResponseFactory({
workItemType: objectiveType,
+ confidential: true,
});
const handler = jest.fn().mockResolvedValue(objectiveWorkItem);
- createComponent({ handler });
+
+ it('renders children tree when work item is an Objective', async () => {
+ createComponent({ handler });
+ await waitForPromises();
+
+ expect(findHierarchyTree().exists()).toBe(true);
+ });
+
+ it('renders a modal', async () => {
+ createComponent({ handler });
+ await waitForPromises();
+
+ expect(findModal().exists()).toBe(true);
+ });
+
+ it('opens the modal with the child when `show-modal` is emitted', async () => {
+ createComponent({ handler });
+ await waitForPromises();
+
+ const event = {
+ preventDefault: jest.fn(),
+ };
+
+ findHierarchyTree().vm.$emit('show-modal', event, { id: 'childWorkItemId' });
+ await waitForPromises();
+
+ expect(wrapper.findComponent(WorkItemDetailModal).props().workItemId).toBe(
+ 'childWorkItemId',
+ );
+ expect(showModalHandler).toHaveBeenCalled();
+ });
+
+ describe('work item is rendered in a modal and has children', () => {
+ beforeEach(async () => {
+ createComponent({
+ isModal: true,
+ handler,
+ });
+
+ await waitForPromises();
+ });
+
+ it('does not render a new modal', () => {
+ expect(findModal().exists()).toBe(false);
+ });
+
+ it('emits `update-modal` when `show-modal` is emitted', async () => {
+ const event = {
+ preventDefault: jest.fn(),
+ };
+
+ findHierarchyTree().vm.$emit('show-modal', event, { id: 'childWorkItemId' });
+ await waitForPromises();
+
+ expect(wrapper.emitted('update-modal')).toBeDefined();
+ });
+ });
+ });
+ });
+
+ describe('notes widget', () => {
+ it('does not render notes by default', async () => {
+ createComponent();
+ await waitForPromises();
+
+ expect(findNotesWidget().exists()).toBe(false);
+ });
+
+ it('renders notes when the work_items_mvc flag is on', async () => {
+ const notesWorkItem = workItemResponseFactory({
+ notesWidgetPresent: true,
+ });
+ const handler = jest.fn().mockResolvedValue(notesWorkItem);
+ createComponent({ workItemsMvcEnabled: true, handler });
await waitForPromises();
- expect(findHierarchyTree().exists()).toBe(true);
+ expect(findNotesWidget().exists()).toBe(true);
});
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js
index 47489d4796b..e693ccfb156 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_link_child_metadata_spec.js
@@ -5,23 +5,22 @@ import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import ItemMilestone from '~/issuable/components/issue_milestone.vue';
import WorkItemLinkChildMetadata from '~/work_items/components/work_item_links/work_item_link_child_metadata.vue';
-import { mockMilestone, mockAssignees, mockLabels } from '../../mock_data';
+import { workItemObjectiveMetadataWidgets } from '../../mock_data';
describe('WorkItemLinkChildMetadata', () => {
+ const { MILESTONE, ASSIGNEES, LABELS } = workItemObjectiveMetadataWidgets;
+ const mockMilestone = MILESTONE.milestone;
+ const mockAssignees = ASSIGNEES.assignees.nodes;
+ const mockLabels = LABELS.labels.nodes;
let wrapper;
- const createComponent = ({
- allowsScopedLabels = true,
- milestone = mockMilestone,
- assignees = mockAssignees,
- labels = mockLabels,
- } = {}) => {
+ const createComponent = ({ metadataWidgets = workItemObjectiveMetadataWidgets } = {}) => {
wrapper = shallowMountExtended(WorkItemLinkChildMetadata, {
propsData: {
- allowsScopedLabels,
- milestone,
- assignees,
- labels,
+ metadataWidgets,
+ },
+ slots: {
+ default: `<div data-testid="default-slot">Foo</div>`,
},
});
};
@@ -30,7 +29,11 @@ describe('WorkItemLinkChildMetadata', () => {
createComponent();
});
- it('renders milestone link button', () => {
+ it('renders default slot contents', () => {
+ expect(wrapper.findByTestId('default-slot').text()).toBe('Foo');
+ });
+
+ it('renders item milestone', () => {
const milestoneLink = wrapper.findComponent(ItemMilestone);
expect(milestoneLink.exists()).toBe(true);
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
index 73d498ad055..0470249d7ce 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_link_child_spec.js
@@ -5,11 +5,12 @@ import createMockApollo from 'helpers/mock_apollo_helper';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import waitForPromises from 'helpers/wait_for_promises';
+import WorkItemLinkChildMetadata from 'ee_else_ce/work_items/components/work_item_links/work_item_link_child_metadata.vue';
+
import { createAlert } from '~/flash';
import RichTimestampTooltip from '~/vue_shared/components/rich_timestamp_tooltip.vue';
import getWorkItemTreeQuery from '~/work_items/graphql/work_item_tree.query.graphql';
-import WorkItemLinkChildMetadata from '~/work_items/components/work_item_links/work_item_link_child_metadata.vue';
import WorkItemLinkChild from '~/work_items/components/work_item_links/work_item_link_child.vue';
import WorkItemLinksMenu from '~/work_items/components/work_item_links/work_item_links_menu.vue';
import WorkItemTreeChildren from '~/work_items/components/work_item_links/work_item_tree_children.vue';
@@ -25,11 +26,9 @@ import {
workItemObjectiveNoMetadata,
confidentialWorkItemTask,
closedWorkItemTask,
- mockMilestone,
- mockAssignees,
- mockLabels,
workItemHierarchyTreeResponse,
workItemHierarchyTreeFailureResponse,
+ workItemObjectiveMetadataWidgets,
} from '../../mock_data';
jest.mock('~/flash');
@@ -148,10 +147,7 @@ describe('WorkItemLinkChild', () => {
const metadataEl = findMetadataComponent();
expect(metadataEl.exists()).toBe(true);
expect(metadataEl.props()).toMatchObject({
- allowsScopedLabels: true,
- milestone: mockMilestone,
- assignees: mockAssignees,
- labels: mockLabels,
+ metadataWidgets: workItemObjectiveMetadataWidgets,
});
});
@@ -265,5 +261,14 @@ describe('WorkItemLinkChild', () => {
message: 'Something went wrong while fetching children.',
});
});
+
+ it('click event on child emits `click` event', async () => {
+ findExpandButton().vm.$emit('click');
+ await waitForPromises();
+
+ findTreeChildren().vm.$emit('click', 'event');
+
+ expect(wrapper.emitted('click')).toEqual([['event']]);
+ });
});
});
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
index bbe460a55ba..5e1c46826cc 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_links_form_spec.js
@@ -1,11 +1,18 @@
import Vue from 'vue';
-import { GlForm, GlFormInput, GlTokenSelector } from '@gitlab/ui';
+import { GlForm, GlFormInput, GlFormCheckbox, GlTooltip, GlTokenSelector } from '@gitlab/ui';
import VueApollo from 'vue-apollo';
+import { sprintf, s__ } from '~/locale';
import { shallowMountExtended } from 'helpers/vue_test_utils_helper';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import WorkItemLinksForm from '~/work_items/components/work_item_links/work_item_links_form.vue';
-import { FORM_TYPES } from '~/work_items/constants';
+import {
+ FORM_TYPES,
+ WORK_ITEM_TYPE_ENUM_TASK,
+ WORK_ITEM_TYPE_VALUE_ISSUE,
+ I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_LABEL,
+ I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_TOOLTIP,
+} from '~/work_items/constants';
import projectWorkItemsQuery from '~/work_items/graphql/project_work_items.query.graphql';
import projectWorkItemTypesQuery from '~/work_items/graphql/project_work_item_types.query.graphql';
import createWorkItemMutation from '~/work_items/graphql/create_work_item.mutation.graphql';
@@ -36,6 +43,8 @@ describe('WorkItemLinksForm', () => {
workItemsMvcEnabled = false,
parentIteration = null,
formType = FORM_TYPES.create,
+ parentWorkItemType = WORK_ITEM_TYPE_VALUE_ISSUE,
+ childrenType = WORK_ITEM_TYPE_ENUM_TASK,
} = {}) => {
wrapper = shallowMountExtended(WorkItemLinksForm, {
apolloProvider: createMockApollo([
@@ -48,6 +57,8 @@ describe('WorkItemLinksForm', () => {
issuableGid: 'gid://gitlab/WorkItem/1',
parentConfidential,
parentIteration,
+ parentWorkItemType,
+ childrenType,
formType,
},
provide: {
@@ -65,6 +76,7 @@ describe('WorkItemLinksForm', () => {
const findForm = () => wrapper.findComponent(GlForm);
const findTokenSelector = () => wrapper.findComponent(GlTokenSelector);
const findInput = () => wrapper.findComponent(GlFormInput);
+ const findConfidentialCheckbox = () => wrapper.findComponent(GlFormCheckbox);
const findAddChildButton = () => wrapper.findByTestId('add-child-button');
afterEach(() => {
@@ -90,6 +102,7 @@ describe('WorkItemLinksForm', () => {
preventDefault: jest.fn(),
});
await waitForPromises();
+ expect(wrapper.vm.childWorkItemType).toEqual('gid://gitlab/WorkItems::Type/3');
expect(createMutationResolver).toHaveBeenCalledWith({
input: {
title: 'Create task test',
@@ -112,6 +125,7 @@ describe('WorkItemLinksForm', () => {
preventDefault: jest.fn(),
});
await waitForPromises();
+ expect(wrapper.vm.childWorkItemType).toEqual('gid://gitlab/WorkItems::Type/3');
expect(createMutationResolver).toHaveBeenCalledWith({
input: {
title: 'Create confidential task',
@@ -124,9 +138,50 @@ describe('WorkItemLinksForm', () => {
},
});
});
+
+ describe('confidentiality checkbox', () => {
+ it('renders confidentiality checkbox', () => {
+ const confidentialCheckbox = findConfidentialCheckbox();
+
+ expect(confidentialCheckbox.exists()).toBe(true);
+ expect(wrapper.findComponent(GlTooltip).exists()).toBe(false);
+ expect(confidentialCheckbox.text()).toBe(
+ sprintf(I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_LABEL, {
+ workItemType: WORK_ITEM_TYPE_ENUM_TASK.toLocaleLowerCase(),
+ }),
+ );
+ });
+
+ it('renders confidentiality tooltip with checkbox checked and disabled when parent is confidential', () => {
+ createComponent({ parentConfidential: true });
+
+ const confidentialCheckbox = findConfidentialCheckbox();
+ const confidentialTooltip = wrapper.findComponent(GlTooltip);
+
+ expect(confidentialCheckbox.attributes('disabled')).toBe('true');
+ expect(confidentialCheckbox.attributes('checked')).toBe('true');
+ expect(confidentialTooltip.exists()).toBe(true);
+ expect(confidentialTooltip.text()).toBe(
+ sprintf(I18N_WORK_ITEM_CONFIDENTIALITY_CHECKBOX_TOOLTIP, {
+ workItemType: WORK_ITEM_TYPE_ENUM_TASK.toLocaleLowerCase(),
+ parentWorkItemType: WORK_ITEM_TYPE_VALUE_ISSUE.toLocaleLowerCase(),
+ }),
+ );
+ });
+ });
});
describe('adding an existing work item', () => {
+ const selectAvailableWorkItemTokens = async () => {
+ findTokenSelector().vm.$emit(
+ 'input',
+ availableWorkItemsResponse.data.workspace.workItems.nodes,
+ );
+ findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
+
+ await waitForPromises();
+ };
+
beforeEach(async () => {
await createComponent({ formType: FORM_TYPES.add });
});
@@ -136,6 +191,7 @@ describe('WorkItemLinksForm', () => {
expect(findTokenSelector().exists()).toBe(true);
expect(findAddChildButton().text()).toBe('Add task');
expect(findInput().exists()).toBe(false);
+ expect(findConfidentialCheckbox().exists()).toBe(false);
});
it('searches for available work items as prop when typing in input', async () => {
@@ -147,13 +203,7 @@ describe('WorkItemLinksForm', () => {
});
it('selects and adds children', async () => {
- findTokenSelector().vm.$emit(
- 'input',
- availableWorkItemsResponse.data.workspace.workItems.nodes,
- );
- findTokenSelector().vm.$emit('blur', new FocusEvent({ relatedTarget: null }));
-
- await waitForPromises();
+ await selectAvailableWorkItemTokens();
expect(findAddChildButton().text()).toBe('Add tasks');
findForm().vm.$emit('submit', {
@@ -162,6 +212,31 @@ describe('WorkItemLinksForm', () => {
await waitForPromises();
expect(updateMutationResolver).toHaveBeenCalled();
});
+
+ it('shows validation error when non-confidential child items are being added to confidential parent', async () => {
+ await createComponent({ formType: FORM_TYPES.add, parentConfidential: true });
+
+ await selectAvailableWorkItemTokens();
+
+ const validationEl = wrapper.findByTestId('work-items-invalid');
+ expect(validationEl.exists()).toBe(true);
+ expect(validationEl.text().trim()).toBe(
+ sprintf(
+ s__(
+ 'WorkItem|%{invalidWorkItemsList} cannot be added: Cannot assign a non-confidential %{childWorkItemType} to a confidential parent %{parentWorkItemType}. Make the selected %{childWorkItemType} confidential and try again.',
+ ),
+ {
+ // Only non-confidential work items are shown in the error message
+ invalidWorkItemsList: availableWorkItemsResponse.data.workspace.workItems.nodes
+ .filter((wi) => !wi.confidential)
+ .map((wi) => wi.title)
+ .join(', '),
+ childWorkItemType: 'Task',
+ parentWorkItemType: 'Issue',
+ },
+ ),
+ );
+ });
});
describe('associate iteration with task', () => {
diff --git a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
index 96211e12755..156f06a0d5e 100644
--- a/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
+++ b/spec/frontend/work_items/components/work_item_links/work_item_tree_spec.js
@@ -34,6 +34,8 @@ describe('WorkItemTree', () => {
const createComponent = ({
workItemType = 'Objective',
+ parentWorkItemType = 'Objective',
+ confidential = false,
children = childrenWorkItems,
apolloProvider = null,
} = {}) => {
@@ -55,7 +57,9 @@ describe('WorkItemTree', () => {
apolloProvider || createMockApollo([[workItemQuery, getWorkItemQueryHandler]]),
propsData: {
workItemType,
+ parentWorkItemType,
workItemId: 'gid://gitlab/WorkItem/515',
+ confidential,
children,
projectPath: 'test/project',
},
@@ -90,7 +94,11 @@ describe('WorkItemTree', () => {
});
it('renders all hierarchy widget children', () => {
- expect(findWorkItemLinkChildItems()).toHaveLength(4);
+ const workItemLinkChildren = findWorkItemLinkChildItems();
+ expect(workItemLinkChildren).toHaveLength(4);
+ expect(workItemLinkChildren.at(0).props().childItem.confidential).toBe(
+ childrenWorkItems[0].confidential,
+ );
});
it('does not display form by default', () => {
@@ -110,8 +118,12 @@ describe('WorkItemTree', () => {
await nextTick();
expect(findForm().exists()).toBe(true);
- expect(findForm().props('formType')).toBe(formType);
- expect(findForm().props('childrenType')).toBe(childType);
+ expect(findForm().props()).toMatchObject({
+ formType,
+ childrenType: childType,
+ parentWorkItemType: 'Objective',
+ parentConfidential: false,
+ });
},
);
@@ -122,6 +134,17 @@ describe('WorkItemTree', () => {
expect(wrapper.emitted('removeChild')).toEqual([['gid://gitlab/WorkItem/2']]);
});
+ it('emits `show-modal` on `click` event', () => {
+ const firstChild = findWorkItemLinkChildItems().at(0);
+ const event = {
+ childItem: 'gid://gitlab/WorkItem/2',
+ };
+
+ firstChild.vm.$emit('click', event);
+
+ expect(wrapper.emitted('show-modal')).toEqual([[event, event.childItem]]);
+ });
+
it.each`
description | workItemType | prefetch
${'prefetches'} | ${'Issue'} | ${true}
diff --git a/spec/frontend/work_items/components/work_item_notes_spec.js b/spec/frontend/work_items/components/work_item_notes_spec.js
index ed68d214fc9..23dd2b6bacb 100644
--- a/spec/frontend/work_items/components/work_item_notes_spec.js
+++ b/spec/frontend/work_items/components/work_item_notes_spec.js
@@ -1,18 +1,22 @@
import { GlSkeletonLoader } from '@gitlab/ui';
import { shallowMount } from '@vue/test-utils';
-import Vue from 'vue';
+import Vue, { nextTick } from 'vue';
import VueApollo from 'vue-apollo';
import createMockApollo from 'helpers/mock_apollo_helper';
import waitForPromises from 'helpers/wait_for_promises';
import SystemNote from '~/work_items/components/notes/system_note.vue';
import WorkItemNotes from '~/work_items/components/work_item_notes.vue';
+import WorkItemCommentForm from '~/work_items/components/work_item_comment_form.vue';
+import ActivityFilter from '~/work_items/components/notes/activity_filter.vue';
import workItemNotesQuery from '~/work_items/graphql/work_item_notes.query.graphql';
import workItemNotesByIidQuery from '~/work_items/graphql/work_item_notes_by_iid.query.graphql';
-import { WIDGET_TYPE_NOTES } from '~/work_items/constants';
+import { DEFAULT_PAGE_SIZE_NOTES, WIDGET_TYPE_NOTES } from '~/work_items/constants';
+import { DESC } from '~/notes/constants';
import {
mockWorkItemNotesResponse,
workItemQueryResponse,
mockWorkItemNotesByIidResponse,
+ mockMoreWorkItemNotesResponse,
} from '../mock_data';
const mockWorkItemId = workItemQueryResponse.data.workItem.id;
@@ -24,6 +28,12 @@ const mockNotesByIidWidgetResponse = mockWorkItemNotesByIidResponse.data.workspa
(widget) => widget.type === WIDGET_TYPE_NOTES,
);
+const mockMoreNotesWidgetResponse = mockMoreWorkItemNotesResponse.data.workItem.widgets.find(
+ (widget) => widget.type === WIDGET_TYPE_NOTES,
+);
+
+const firstSystemNodeId = mockNotesWidgetResponse.discussions.nodes[0].notes.nodes[0].id;
+
describe('WorkItemNotes component', () => {
let wrapper;
@@ -31,16 +41,24 @@ describe('WorkItemNotes component', () => {
const findAllSystemNotes = () => wrapper.findAllComponents(SystemNote);
const findActivityLabel = () => wrapper.find('label');
+ const findWorkItemCommentForm = () => wrapper.findComponent(WorkItemCommentForm);
const findSkeletonLoader = () => wrapper.findComponent(GlSkeletonLoader);
+ const findSortingFilter = () => wrapper.findComponent(ActivityFilter);
+ const findSystemNoteAtIndex = (index) => findAllSystemNotes().at(index);
const workItemNotesQueryHandler = jest.fn().mockResolvedValue(mockWorkItemNotesResponse);
const workItemNotesByIidQueryHandler = jest
.fn()
.mockResolvedValue(mockWorkItemNotesByIidResponse);
+ const workItemMoreNotesQueryHandler = jest.fn().mockResolvedValue(mockMoreWorkItemNotesResponse);
- const createComponent = ({ workItemId = mockWorkItemId, fetchByIid = false } = {}) => {
+ const createComponent = ({
+ workItemId = mockWorkItemId,
+ fetchByIid = false,
+ defaultWorkItemNotesQueryHandler = workItemNotesQueryHandler,
+ } = {}) => {
wrapper = shallowMount(WorkItemNotes, {
apolloProvider: createMockApollo([
- [workItemNotesQuery, workItemNotesQueryHandler],
+ [workItemNotesQuery, defaultWorkItemNotesQueryHandler],
[workItemNotesByIidQuery, workItemNotesByIidQueryHandler],
]),
propsData: {
@@ -50,6 +68,7 @@ describe('WorkItemNotes component', () => {
},
fullPath: 'test-path',
fetchByIid,
+ workItemType: 'task',
},
provide: {
glFeatures: {
@@ -63,14 +82,17 @@ describe('WorkItemNotes component', () => {
createComponent();
});
- afterEach(() => {
- wrapper.destroy();
- });
-
it('renders activity label', () => {
expect(findActivityLabel().exists()).toBe(true);
});
+ it('passes correct props to comment form component', async () => {
+ createComponent({ workItemId: mockWorkItemId, fetchByIid: false });
+ await waitForPromises();
+
+ expect(findWorkItemCommentForm().props('fetchByIid')).toEqual(false);
+ });
+
describe('when notes are loading', () => {
it('renders skeleton loader', () => {
expect(findSkeletonLoader().exists()).toBe(true);
@@ -98,10 +120,65 @@ describe('WorkItemNotes component', () => {
await waitForPromises();
});
- it('shows the notes list', () => {
+ it('renders the notes list to the length of the response', () => {
expect(findAllSystemNotes()).toHaveLength(
mockNotesByIidWidgetResponse.discussions.nodes.length,
);
});
+
+ it('passes correct props to comment form component', () => {
+ expect(findWorkItemCommentForm().props('fetchByIid')).toEqual(true);
+ });
+ });
+
+ describe('Pagination', () => {
+ describe('When there is no next page', () => {
+ it('fetch more notes is not called', async () => {
+ createComponent();
+ await nextTick();
+ expect(workItemMoreNotesQueryHandler).not.toHaveBeenCalled();
+ });
+ });
+
+ describe('when there is next page', () => {
+ beforeEach(async () => {
+ createComponent({ defaultWorkItemNotesQueryHandler: workItemMoreNotesQueryHandler });
+ await waitForPromises();
+ });
+
+ it('fetch more notes should be called', async () => {
+ expect(workItemMoreNotesQueryHandler).toHaveBeenCalledWith({
+ pageSize: DEFAULT_PAGE_SIZE_NOTES,
+ id: 'gid://gitlab/WorkItem/1',
+ });
+
+ await nextTick();
+
+ expect(workItemMoreNotesQueryHandler).toHaveBeenCalledWith({
+ pageSize: 45,
+ id: 'gid://gitlab/WorkItem/1',
+ after: mockMoreNotesWidgetResponse.discussions.pageInfo.endCursor,
+ });
+ });
+ });
+ });
+
+ describe('Sorting', () => {
+ beforeEach(async () => {
+ createComponent();
+ await waitForPromises();
+ });
+
+ it('filter exists', () => {
+ expect(findSortingFilter().exists()).toBe(true);
+ });
+
+ it('sorts the list when the `changeSortOrder` event is emitted', async () => {
+ expect(findSystemNoteAtIndex(0).props('note').id).toEqual(firstSystemNodeId);
+
+ await findSortingFilter().vm.$emit('changeSortOrder', DESC);
+
+ expect(findSystemNoteAtIndex(0).props('note').id).not.toEqual(firstSystemNodeId);
+ });
});
});
diff --git a/spec/frontend/work_items/mock_data.js b/spec/frontend/work_items/mock_data.js
index 850672b68d0..67b477b6eb0 100644
--- a/spec/frontend/work_items/mock_data.js
+++ b/spec/frontend/work_items/mock_data.js
@@ -62,6 +62,7 @@ export const workItemQueryResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
workItemType: {
__typename: 'WorkItemType',
@@ -156,6 +157,7 @@ export const updateWorkItemMutationResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
workItemType: {
__typename: 'WorkItemType',
@@ -268,6 +270,7 @@ export const workItemResponseFactory = ({
milestoneWidgetPresent = true,
iterationWidgetPresent = true,
healthStatusWidgetPresent = true,
+ notesWidgetPresent = true,
confidential = false,
canInviteMembers = false,
allowsScopedLabels = false,
@@ -292,6 +295,7 @@ export const workItemResponseFactory = ({
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
workItemType,
userPermissions: {
@@ -380,6 +384,23 @@ export const workItemResponseFactory = ({
healthStatus: 'onTrack',
}
: { type: 'MOCK TYPE' },
+ notesWidgetPresent
+ ? {
+ __typename: 'WorkItemWidgetNotes',
+ type: 'NOTES',
+ discussions: {
+ pageInfo: {
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: null,
+ endCursor:
+ 'eyJjcmVhdGVkX2F0IjoiMjAyMi0xMS0xNCAwNDoxOTowMC4wOTkxMTcwMDAgKzAwMDAiLCJpZCI6IjQyNyIsIl9rZCI6Im4ifQ==',
+ __typename: 'PageInfo',
+ },
+ nodes: [],
+ },
+ }
+ : { type: 'MOCK TYPE' },
{
__typename: 'WorkItemWidgetHierarchy',
type: 'HIERARCHY',
@@ -409,6 +430,12 @@ export const workItemResponseFactory = ({
},
parent,
},
+ notesWidgetPresent
+ ? {
+ __typename: 'WorkItemWidgetNotes',
+ type: 'NOTES',
+ }
+ : { type: 'MOCK TYPE' },
],
},
},
@@ -448,6 +475,7 @@ export const createWorkItemMutationResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
workItemType: {
__typename: 'WorkItemType',
@@ -485,6 +513,7 @@ export const createWorkItemFromTaskMutationResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
workItemType: {
__typename: 'WorkItemType',
@@ -524,6 +553,7 @@ export const createWorkItemFromTaskMutationResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
workItemType: {
__typename: 'WorkItemType',
@@ -698,6 +728,20 @@ export const workItemIterationSubscriptionResponse = {
},
};
+export const workItemHealthStatusSubscriptionResponse = {
+ data: {
+ issuableHealthStatusUpdated: {
+ id: 'gid://gitlab/WorkItem/1',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetHealthStatus',
+ healthStatus: 'needsAttention',
+ },
+ ],
+ },
+ },
+};
+
export const workItemMilestoneSubscriptionResponse = {
data: {
issuableMilestoneUpdated: {
@@ -734,6 +778,7 @@ export const workItemHierarchyEmptyResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
userPermissions: {
deleteWorkItem: false,
@@ -780,6 +825,7 @@ export const workItemHierarchyNoUpdatePermissionResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
confidential: false,
widgets: [
@@ -920,6 +966,7 @@ export const workItemHierarchyResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
widgets: [
{
@@ -942,6 +989,43 @@ export const workItemHierarchyResponse = {
},
};
+export const workItemObjectiveMetadataWidgets = {
+ ASSIGNEES: {
+ type: 'ASSIGNEES',
+ __typename: 'WorkItemWidgetAssignees',
+ canInviteMembers: true,
+ allowsMultipleAssignees: true,
+ assignees: {
+ __typename: 'UserCoreConnection',
+ nodes: mockAssignees,
+ },
+ },
+ HEALTH_STATUS: {
+ type: 'HEALTH_STATUS',
+ __typename: 'WorkItemWidgetHealthStatus',
+ healthStatus: 'onTrack',
+ },
+ LABELS: {
+ type: 'LABELS',
+ __typename: 'WorkItemWidgetLabels',
+ allowsScopedLabels: true,
+ labels: {
+ __typename: 'LabelConnection',
+ nodes: mockLabels,
+ },
+ },
+ MILESTONE: {
+ type: 'MILESTONE',
+ __typename: 'WorkItemWidgetMilestone',
+ milestone: mockMilestone,
+ },
+ PROGRESS: {
+ type: 'PROGRESS',
+ __typename: 'WorkItemWidgetProgress',
+ progress: 10,
+ },
+};
+
export const workItemObjectiveWithChild = {
id: 'gid://gitlab/WorkItem/12',
iid: '12',
@@ -955,6 +1039,7 @@ export const workItemObjectiveWithChild = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
userPermissions: {
deleteWorkItem: true,
@@ -976,30 +1061,11 @@ export const workItemObjectiveWithChild = {
},
__typename: 'WorkItemWidgetHierarchy',
},
- {
- type: 'MILESTONE',
- __typename: 'WorkItemWidgetMilestone',
- milestone: mockMilestone,
- },
- {
- type: 'ASSIGNEES',
- __typename: 'WorkItemWidgetAssignees',
- canInviteMembers: true,
- allowsMultipleAssignees: true,
- assignees: {
- __typename: 'UserCoreConnection',
- nodes: mockAssignees,
- },
- },
- {
- type: 'LABELS',
- __typename: 'WorkItemWidgetLabels',
- allowsScopedLabels: true,
- labels: {
- __typename: 'LabelConnection',
- nodes: mockLabels,
- },
- },
+ workItemObjectiveMetadataWidgets.PROGRESS,
+ workItemObjectiveMetadataWidgets.HEALTH_STATUS,
+ workItemObjectiveMetadataWidgets.MILESTONE,
+ workItemObjectiveMetadataWidgets.ASSIGNEES,
+ workItemObjectiveMetadataWidgets.LABELS,
],
__typename: 'WorkItem',
};
@@ -1012,6 +1078,16 @@ export const workItemObjectiveNoMetadata = {
hasChildren: true,
__typename: 'WorkItemWidgetHierarchy',
},
+ {
+ __typename: 'WorkItemWidgetProgress',
+ type: 'PROGRESS',
+ progress: null,
+ },
+ {
+ __typename: 'WorkItemWidgetMilestone',
+ type: 'MILESTONE',
+ milestone: null,
+ },
],
};
@@ -1036,6 +1112,7 @@ export const workItemHierarchyTreeResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
widgets: [
{
@@ -1118,6 +1195,7 @@ export const changeWorkItemParentMutationResponse = {
__typename: 'Project',
id: '1',
fullPath: 'test-project-path',
+ archived: false,
},
widgets: [
{
@@ -1149,6 +1227,7 @@ export const availableWorkItemsResponse = {
title: 'Task 1',
state: 'OPEN',
createdAt: '2022-08-03T12:41:54Z',
+ confidential: false,
__typename: 'WorkItem',
},
{
@@ -1156,6 +1235,15 @@ export const availableWorkItemsResponse = {
title: 'Task 2',
state: 'OPEN',
createdAt: '2022-08-03T12:41:54Z',
+ confidential: false,
+ __typename: 'WorkItem',
+ },
+ {
+ id: 'gid://gitlab/WorkItem/460',
+ title: 'Task 3',
+ state: 'OPEN',
+ createdAt: '2022-08-03T12:41:54Z',
+ confidential: true,
__typename: 'WorkItem',
},
],
@@ -1514,11 +1602,16 @@ export const mockWorkItemNotesResponse = {
nodes: [
{
id: 'gid://gitlab/Note/2428',
- body: 'added #31 as parent issue',
bodyHtml:
'<p data-sourcepos="1:1-1:25" dir="auto">added <a href="/flightjs/Flight/-/issues/31" data-reference-type="issue" data-original="#31" data-link="false" data-link-reference="false" data-project="6" data-issue="224" data-project-path="flightjs/Flight" data-iid="31" data-issue-type="issue" data-container=body data-placement="top" title="Perferendis est quae totam quia laborum tempore ut voluptatem." class="gfm gfm-issue">#31</a> as parent issue</p>',
systemNoteIconName: 'link',
createdAt: '2022-11-14T04:18:59Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -1541,12 +1634,17 @@ export const mockWorkItemNotesResponse = {
notes: {
nodes: [
{
- id: 'gid://gitlab/MilestoneNote/not-persisted',
- body: 'changed milestone to %5',
+ id: 'gid://gitlab/MilestoneNote/0f2f195ec0d1ef95ee9d5b10446b8e96a7d83864',
bodyHtml:
'<p data-sourcepos="1:1-1:23" dir="auto">changed milestone to <a href="/flightjs/Flight/-/milestones/5" data-reference-type="milestone" data-original="%5" data-link="false" data-link-reference="false" data-project="6" data-milestone="30" data-container=body data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%v4.0</a></p>',
systemNoteIconName: 'clock',
createdAt: '2022-11-14T04:18:59Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -1569,11 +1667,16 @@ export const mockWorkItemNotesResponse = {
notes: {
nodes: [
{
- id: 'gid://gitlab/WeightNote/not-persisted',
- body: 'changed weight to 89',
+ id: 'gid://gitlab/WeightNote/0f2f195ec0d1ef95ee9d5b10446b8e96a9883864',
bodyHtml: '<p dir="auto">changed weight to <strong>89</strong></p>',
systemNoteIconName: 'weight',
createdAt: '2022-11-25T07:16:20Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
author: {
avatarUrl:
'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
@@ -1656,11 +1759,16 @@ export const mockWorkItemNotesByIidResponse = {
nodes: [
{
id: 'gid://gitlab/Note/2428',
- body: 'added #31 as parent issue',
bodyHtml:
'\u003cp data-sourcepos="1:1-1:25" dir="auto"\u003eadded \u003ca href="/flightjs/Flight/-/issues/31" data-reference-type="issue" data-original="#31" data-link="false" data-link-reference="false" data-project="6" data-issue="224" data-project-path="flightjs/Flight" data-iid="31" data-issue-type="issue" data-container="body" data-placement="top" title="Perferendis est quae totam quia laborum tempore ut voluptatem." class="gfm gfm-issue"\u003e#31\u003c/a\u003e as parent issue\u003c/p\u003e',
systemNoteIconName: 'link',
createdAt: '2022-11-14T04:18:59Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
author: {
id: 'gid://gitlab/User/1',
avatarUrl:
@@ -1685,11 +1793,16 @@ export const mockWorkItemNotesByIidResponse = {
{
id:
'gid://gitlab/MilestoneNote/7b08b89a728a5ceb7de8334246837ba1d07270dc',
- body: 'changed milestone to %5',
bodyHtml:
'\u003cp data-sourcepos="1:1-1:23" dir="auto"\u003echanged milestone to \u003ca href="/flightjs/Flight/-/milestones/5" data-reference-type="milestone" data-original="%5" data-link="false" data-link-reference="false" data-project="6" data-milestone="30" data-container="body" data-placement="top" title="" class="gfm gfm-milestone has-tooltip"\u003e%v4.0\u003c/a\u003e\u003c/p\u003e',
systemNoteIconName: 'clock',
createdAt: '2022-11-14T04:18:59Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
author: {
id: 'gid://gitlab/User/1',
avatarUrl:
@@ -1714,11 +1827,16 @@ export const mockWorkItemNotesByIidResponse = {
{
id:
'gid://gitlab/IterationNote/addbc177f7664699a135130ab05ffb78c57e4db3',
- body: 'changed iteration to *iteration:5352',
bodyHtml:
'\u003cp data-sourcepos="1:1-1:36" dir="auto"\u003echanged iteration to \u003ca href="/groups/flightjs/-/iterations/5352" data-reference-type="iteration" data-original="*iteration:5352" data-link="false" data-link-reference="false" data-project="6" data-iteration="5352" data-container="body" data-placement="top" title="Iteration" class="gfm gfm-iteration has-tooltip"\u003eEt autem debitis nam suscipit eos ut. Jul 13, 2022 - Jul 19, 2022\u003c/a\u003e\u003c/p\u003e',
systemNoteIconName: 'iteration',
createdAt: '2022-11-14T04:19:00Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
author: {
id: 'gid://gitlab/User/1',
avatarUrl:
@@ -1750,3 +1868,183 @@ export const mockWorkItemNotesByIidResponse = {
},
},
};
+export const mockMoreWorkItemNotesResponse = {
+ data: {
+ workItem: {
+ id: 'gid://gitlab/WorkItem/600',
+ iid: '60',
+ widgets: [
+ {
+ __typename: 'WorkItemWidgetIteration',
+ },
+ {
+ __typename: 'WorkItemWidgetWeight',
+ },
+ {
+ __typename: 'WorkItemWidgetAssignees',
+ },
+ {
+ __typename: 'WorkItemWidgetLabels',
+ },
+ {
+ __typename: 'WorkItemWidgetDescription',
+ },
+ {
+ __typename: 'WorkItemWidgetHierarchy',
+ },
+ {
+ __typename: 'WorkItemWidgetStartAndDueDate',
+ },
+ {
+ __typename: 'WorkItemWidgetMilestone',
+ },
+ {
+ type: 'NOTES',
+ discussions: {
+ pageInfo: {
+ hasNextPage: true,
+ hasPreviousPage: false,
+ startCursor: null,
+ endCursor: 'endCursor',
+ __typename: 'PageInfo',
+ },
+ nodes: [
+ {
+ id:
+ 'gid://gitlab/IndividualNoteDiscussion/8bbc4890b6ff0f2cde93a5a0947cd2b8a13d3b6e',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/Note/2428',
+ bodyHtml:
+ '<p data-sourcepos="1:1-1:25" dir="auto">added <a href="/flightjs/Flight/-/issues/31" data-reference-type="issue" data-original="#31" data-link="false" data-link-reference="false" data-project="6" data-issue="224" data-project-path="flightjs/Flight" data-iid="31" data-issue-type="issue" data-container=body data-placement="top" title="Perferendis est quae totam quia laborum tempore ut voluptatem." class="gfm gfm-issue">#31</a> as parent issue</p>',
+ systemNoteIconName: 'link',
+ createdAt: '2022-11-14T04:18:59Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
+ author: {
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://127.0.0.1:3000/root',
+ __typename: 'UserCore',
+ },
+ __typename: 'Note',
+ },
+ ],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
+ },
+ {
+ id:
+ 'gid://gitlab/IndividualNoteDiscussion/7b08b89a728a5ceb7de8334246837ba1d07270dc',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/MilestoneNote/0f2f195ec0d1ef95ee9d5b10446b8e96a7d83823',
+ bodyHtml:
+ '<p data-sourcepos="1:1-1:23" dir="auto">changed milestone to <a href="/flightjs/Flight/-/milestones/5" data-reference-type="milestone" data-original="%5" data-link="false" data-link-reference="false" data-project="6" data-milestone="30" data-container=body data-placement="top" title="" class="gfm gfm-milestone has-tooltip">%v4.0</a></p>',
+ systemNoteIconName: 'clock',
+ createdAt: '2022-11-14T04:18:59Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
+ author: {
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://127.0.0.1:3000/root',
+ __typename: 'UserCore',
+ },
+ __typename: 'Note',
+ },
+ ],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
+ },
+ {
+ id:
+ 'gid://gitlab/IndividualNoteDiscussion/0f2f195ec0d1ef95ee9d5b10446b8e96a7d83864',
+ notes: {
+ nodes: [
+ {
+ id: 'gid://gitlab/WeightNote/0f2f195ec0d1ef95ee9d5b10446b8e96a7d83864',
+ bodyHtml: '<p dir="auto">changed weight to <strong>89</strong></p>',
+ systemNoteIconName: 'weight',
+ createdAt: '2022-11-25T07:16:20Z',
+ system: true,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
+ author: {
+ avatarUrl:
+ 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://127.0.0.1:3000/root',
+ __typename: 'UserCore',
+ },
+ __typename: 'Note',
+ },
+ ],
+ __typename: 'NoteConnection',
+ },
+ __typename: 'Discussion',
+ },
+ ],
+ __typename: 'DiscussionConnection',
+ },
+ __typename: 'WorkItemWidgetNotes',
+ },
+ ],
+ __typename: 'WorkItem',
+ },
+ },
+};
+
+export const createWorkItemNoteResponse = {
+ data: {
+ createNote: {
+ errors: [],
+ __typename: 'CreateNotePayload',
+ },
+ },
+};
+
+export const mockWorkItemCommentNote = {
+ id: 'gid://gitlab/Note/158',
+ bodyHtml:
+ '<p data-sourcepos="1:1-1:76" dir="auto"><gl-emoji title="waving hand sign" data-name="wave" data-unicode-version="6.0">👋</gl-emoji> Hi <a href="/fredda.brekke" data-reference-type="user" data-user="3" data-container="body" data-placement="top" class="gfm gfm-project_member js-user-link" title="Sherie Nitzsche">@fredda.brekke</a> How are you ? what do you think about this ? <gl-emoji title="person with folded hands" data-name="pray" data-unicode-version="6.0">🙏</gl-emoji></p>',
+ systemNoteIconName: false,
+ createdAt: '2022-11-25T07:16:20Z',
+ system: false,
+ internal: false,
+ userPermissions: {
+ adminNote: false,
+ __typename: 'NotePermissions',
+ },
+ author: {
+ avatarUrl: 'https://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=80&d=identicon',
+ id: 'gid://gitlab/User/1',
+ name: 'Administrator',
+ username: 'root',
+ webUrl: 'http://127.0.0.1:3000/root',
+ __typename: 'UserCore',
+ },
+};
diff --git a/spec/frontend/work_items/router_spec.js b/spec/frontend/work_items/router_spec.js
index b503d819435..ef9ae4a2eab 100644
--- a/spec/frontend/work_items/router_spec.js
+++ b/spec/frontend/work_items/router_spec.js
@@ -74,6 +74,7 @@ describe('Work items router', () => {
stubs: {
WorkItemWeight: true,
WorkItemIteration: true,
+ WorkItemHealthStatus: true,
},
});
};
diff --git a/spec/graphql/mutations/achievements/create_spec.rb b/spec/graphql/mutations/achievements/create_spec.rb
new file mode 100644
index 00000000000..4bad6164314
--- /dev/null
+++ b/spec/graphql/mutations/achievements/create_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Achievements::Create, feature_category: :users do
+ include GraphqlHelpers
+
+ let_it_be(:user) { create(:user) }
+
+ let(:group) { create(:group) }
+ let(:valid_params) do
+ attributes_for(:achievement, namespace: group)
+ end
+
+ describe '#resolve' do
+ subject(:resolve_mutation) do
+ described_class.new(object: nil, context: { current_user: user }, field: nil).resolve(
+ **valid_params,
+ namespace_id: group.to_global_id
+ )
+ end
+
+ context 'when the user does not have permission' do
+ before do
+ group.add_developer(user)
+ end
+
+ it 'raises an error' do
+ expect { resolve_mutation }.to raise_error(Gitlab::Graphql::Errors::ResourceNotAvailable)
+ .with_message(Gitlab::Graphql::Authorize::AuthorizeResource::RESOURCE_ACCESS_ERROR)
+ end
+ end
+
+ context 'when the user has permission' do
+ before do
+ group.add_maintainer(user)
+ end
+
+ context 'when the params are invalid' do
+ it 'returns the validation error' do
+ valid_params[:name] = nil
+
+ expect(resolve_mutation[:errors]).to match_array(["Name can't be blank"])
+ end
+ end
+
+ it 'creates contact with correct values' do
+ expect(resolve_mutation[:achievement]).to have_attributes(valid_params)
+ end
+ end
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:admin_achievement) }
+end
diff --git a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
index 80a70938dc4..581652a8cea 100644
--- a/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
+++ b/spec/graphql/resolvers/ci/jobs_resolver_spec.rb
@@ -59,5 +59,18 @@ RSpec.describe Resolvers::Ci::JobsResolver do
)
end
end
+
+ context 'when a job is manual' do
+ before_all do
+ create(:ci_build, name: 'Manual job', pipeline: pipeline, when: 'manual')
+ end
+
+ it "returns jobs with when set to 'manual'" do
+ jobs = resolve(described_class, obj: pipeline, arg_style: :internal, args: { when_executed: ['manual'] })
+ expect(jobs).to contain_exactly(
+ have_attributes(name: 'Manual job')
+ )
+ end
+ end
end
end
diff --git a/spec/graphql/resolvers/timelog_resolver_spec.rb b/spec/graphql/resolvers/timelog_resolver_spec.rb
index da2747fdf72..cd52308d895 100644
--- a/spec/graphql/resolvers/timelog_resolver_spec.rb
+++ b/spec/graphql/resolvers/timelog_resolver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Resolvers::TimelogResolver do
+RSpec.describe Resolvers::TimelogResolver, feature_category: :team_planning do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
@@ -262,18 +262,6 @@ RSpec.describe Resolvers::TimelogResolver do
it_behaves_like 'with a user'
end
- context 'when > `default_max_page_size` records' do
- let(:object) { nil }
- let!(:timelog_list) { create_list(:timelog, 101, issue: issue) }
- let(:args) { { project_id: global_id_of(project) } }
- let(:extra_args) { {} }
-
- it 'pagination returns `default_max_page_size` and sets `has_next_page` true' do
- expect(timelogs.items.count).to be(100)
- expect(timelogs.has_next_page).to be(true)
- end
- end
-
context 'when no object or arguments provided' do
let(:object) { nil }
let(:args) { {} }
@@ -286,6 +274,21 @@ RSpec.describe Resolvers::TimelogResolver do
end
end
+ context 'when the sort argument is provided' do
+ let_it_be(:timelog_a) { create(:issue_timelog, time_spent: 7200, spent_at: 1.hour.ago, user: current_user) }
+ let_it_be(:timelog_b) { create(:issue_timelog, time_spent: 5400, spent_at: 2.hours.ago, user: current_user) }
+ let_it_be(:timelog_c) { create(:issue_timelog, time_spent: 1800, spent_at: 30.minutes.ago, user: current_user) }
+ let_it_be(:timelog_d) { create(:issue_timelog, time_spent: 3600, spent_at: 1.day.ago, user: current_user) }
+
+ let(:object) { current_user }
+ let(:args) { { sort: 'TIME_SPENT_ASC' } }
+ let(:extra_args) { {} }
+
+ it 'returns all the timelogs in the correct order' do
+ expect(timelogs.items).to eq([timelog_c, timelog_d, timelog_b, timelog_a])
+ end
+ end
+
def resolve_timelogs(user: current_user, obj: object, **args)
context = { current_user: user }
resolve(described_class, obj: obj, args: args.merge(extra_args), ctx: context)
diff --git a/spec/graphql/types/access_level_enum_spec.rb b/spec/graphql/types/access_level_enum_spec.rb
index 1b379c56ff9..6a8d2e26e65 100644
--- a/spec/graphql/types/access_level_enum_spec.rb
+++ b/spec/graphql/types/access_level_enum_spec.rb
@@ -6,6 +6,6 @@ RSpec.describe GitlabSchema.types['AccessLevelEnum'] do
specify { expect(described_class.graphql_name).to eq('AccessLevelEnum') }
it 'exposes all the existing access levels' do
- expect(described_class.values.keys).to match_array(%w[NO_ACCESS MINIMAL_ACCESS GUEST REPORTER DEVELOPER MAINTAINER OWNER])
+ expect(described_class.values.keys).to include(*%w[NO_ACCESS MINIMAL_ACCESS GUEST REPORTER DEVELOPER MAINTAINER OWNER])
end
end
diff --git a/spec/graphql/types/achievements/achievement_type_spec.rb b/spec/graphql/types/achievements/achievement_type_spec.rb
new file mode 100644
index 00000000000..5c98753ac66
--- /dev/null
+++ b/spec/graphql/types/achievements/achievement_type_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Achievement'], feature_category: :users do
+ include GraphqlHelpers
+
+ let(:fields) do
+ %w[
+ id
+ namespace
+ name
+ avatar_url
+ description
+ revokeable
+ created_at
+ updated_at
+ ]
+ end
+
+ it { expect(described_class.graphql_name).to eq('Achievement') }
+ it { expect(described_class).to have_graphql_fields(fields) }
+ it { expect(described_class).to require_graphql_authorizations(:read_achievement) }
+
+ describe '#avatar_url' do
+ let(:object) { instance_double(Achievements::Achievement) }
+ let(:current_user) { instance_double(User) }
+
+ before do
+ allow(described_class).to receive(:authorized?).and_return(true)
+ end
+
+ it 'calls Achievement#avatar_url(only_path: false)' do
+ allow(object).to receive(:avatar_url).with(only_path: false)
+ resolve_field(:avatar_url, object, current_user: current_user)
+ expect(object).to have_received(:avatar_url).with(only_path: false).once
+ end
+ end
+end
diff --git a/spec/graphql/types/alert_management/alert_type_spec.rb b/spec/graphql/types/alert_management/alert_type_spec.rb
index c1df24ccb5c..4428fc0683a 100644
--- a/spec/graphql/types/alert_management/alert_type_spec.rb
+++ b/spec/graphql/types/alert_management/alert_type_spec.rb
@@ -38,6 +38,7 @@ RSpec.describe GitlabSchema.types['AlertManagementAlert'], feature_category: :in
prometheus_alert
environment
web_url
+ commenters
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/ci/runner_countable_connection_type_spec.rb b/spec/graphql/types/ci/runner_countable_connection_type_spec.rb
new file mode 100644
index 00000000000..49254ed0f93
--- /dev/null
+++ b/spec/graphql/types/ci/runner_countable_connection_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::Ci::RunnerCountableConnectionType, feature_category: :runner_fleet do
+ it 'contains attributes related to a runner connection' do
+ expected_fields = %w[count]
+
+ expect(described_class).to include_graphql_fields(*expected_fields)
+ end
+end
diff --git a/spec/graphql/types/description_version_type_spec.rb b/spec/graphql/types/description_version_type_spec.rb
new file mode 100644
index 00000000000..36bb1af7f7b
--- /dev/null
+++ b/spec/graphql/types/description_version_type_spec.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['DescriptionVersion'], feature_category: :team_planning do
+ it { expect(described_class).to have_graphql_field(:id) }
+ it { expect(described_class).to have_graphql_field(:description) }
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_issuable) }
+end
diff --git a/spec/graphql/types/design_management/design_type_spec.rb b/spec/graphql/types/design_management/design_type_spec.rb
index 9c460e9058a..24b007a6b33 100644
--- a/spec/graphql/types/design_management/design_type_spec.rb
+++ b/spec/graphql/types/design_management/design_type_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe GitlabSchema.types['Design'] do
specify { expect(described_class.interfaces).to include(Types::TodoableInterface) }
it_behaves_like 'a GraphQL type with design fields' do
- let(:extra_design_fields) { %i[notes current_user_todos discussions versions web_url] }
+ let(:extra_design_fields) { %i[notes current_user_todos discussions versions web_url commenters] }
let_it_be(:design) { create(:design, :with_versions) }
let(:object_id) { GitlabSchema.id_from_object(design) }
let_it_be(:object_id_b) { GitlabSchema.id_from_object(create(:design, :with_versions)) }
diff --git a/spec/graphql/types/issue_type_spec.rb b/spec/graphql/types/issue_type_spec.rb
index dc444f90627..498625dc642 100644
--- a/spec/graphql/types/issue_type_spec.rb
+++ b/spec/graphql/types/issue_type_spec.rb
@@ -3,6 +3,9 @@
require 'spec_helper'
RSpec.describe GitlabSchema.types['Issue'] do
+ let_it_be_with_reload(:project) { create(:project, :public) }
+ let_it_be(:user) { create(:user) }
+
specify { expect(described_class).to expose_permissions_using(Types::PermissionTypes::Issue) }
specify { expect(described_class.graphql_name).to eq('Issue') }
@@ -26,8 +29,6 @@ RSpec.describe GitlabSchema.types['Issue'] do
end
describe 'pagination and count' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public) }
let_it_be(:now) { Time.now.change(usec: 0) }
let_it_be(:issues) { create_list(:issue, 10, project: project, created_at: now) }
@@ -130,8 +131,6 @@ RSpec.describe GitlabSchema.types['Issue'] do
end
describe "issue notes" do
- let(:user) { create(:user) }
- let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
let(:confidential_issue) { create(:issue, :confidential, project: project) }
let(:private_note_body) { "mentioned in issue #{confidential_issue.to_reference(project)}" }
@@ -211,8 +210,6 @@ RSpec.describe GitlabSchema.types['Issue'] do
describe 'hidden', :enable_admin_mode do
let_it_be(:admin) { create(:user, :admin) }
let_it_be(:banned_user) { create(:user, :banned) }
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public) }
let_it_be(:hidden_issue) { create(:issue, project: project, author: banned_user) }
let_it_be(:visible_issue) { create(:issue, project: project, author: user) }
@@ -259,8 +256,6 @@ RSpec.describe GitlabSchema.types['Issue'] do
end
describe 'escalation_status' do
- let_it_be(:user) { create(:user) }
- let_it_be(:project) { create(:project, :public) }
let_it_be(:issue, reload: true) { create(:issue, project: project) }
let(:execute) { GitlabSchema.execute(query, context: { current_user: user }).as_json }
@@ -294,4 +289,44 @@ RSpec.describe GitlabSchema.types['Issue'] do
end
end
end
+
+ describe 'type' do
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ let(:query) do
+ %(
+ query {
+ issue(id: "#{issue.to_gid}") {
+ type
+ }
+ }
+ )
+ end
+
+ subject(:execute) { GitlabSchema.execute(query, context: { current_user: user }).as_json }
+
+ context 'when the issue_type_uses_work_item_types_table feature flag is enabled' do
+ it 'gets the type field from the work_item_types table' do
+ expect_next_instance_of(::IssuePresenter) do |presented_issue|
+ expect(presented_issue).to receive_message_chain(:work_item_type, :base_type)
+ end
+
+ execute
+ end
+ end
+
+ context 'when the issue_type_uses_work_item_types_table feature flag is disabled' do
+ before do
+ stub_feature_flags(issue_type_uses_work_item_types_table: false)
+ end
+
+ it 'does not get the type field from the work_item_types table' do
+ expect_next_instance_of(::IssuePresenter) do |presented_issue|
+ expect(presented_issue).not_to receive(:work_item_type)
+ end
+
+ execute
+ end
+ end
+ end
end
diff --git a/spec/graphql/types/member_access_level_enum_spec.rb b/spec/graphql/types/member_access_level_enum_spec.rb
new file mode 100644
index 00000000000..54aef667695
--- /dev/null
+++ b/spec/graphql/types/member_access_level_enum_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Types::MemberAccessLevelEnum, feature_category: :subgroups do
+ specify { expect(described_class.graphql_name).to eq('MemberAccessLevel') }
+
+ it 'exposes all the existing access levels' do
+ expect(described_class.values.keys).to include(*%w[GUEST REPORTER DEVELOPER MAINTAINER OWNER])
+ end
+end
diff --git a/spec/graphql/types/namespace_type_spec.rb b/spec/graphql/types/namespace_type_spec.rb
index 168a6ba4eaa..d80235023ef 100644
--- a/spec/graphql/types/namespace_type_spec.rb
+++ b/spec/graphql/types/namespace_type_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe GitlabSchema.types['Namespace'] do
expected_fields = %w[
id name path full_name full_path description description_html visibility
lfs_enabled request_access_enabled projects root_storage_statistics shared_runners_setting
- timelog_categories
+ timelog_categories achievements
]
expect(described_class).to include_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/notes/note_type_spec.rb b/spec/graphql/types/notes/note_type_spec.rb
index cbf7f086dbe..a9e45b29eea 100644
--- a/spec/graphql/types/notes/note_type_spec.rb
+++ b/spec/graphql/types/notes/note_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['Note'] do
+RSpec.describe GitlabSchema.types['Note'], feature_category: :team_planning do
it 'exposes the expected fields' do
expected_fields = %i[
author
@@ -24,6 +24,9 @@ RSpec.describe GitlabSchema.types['Note'] do
updated_at
user_permissions
url
+ last_edited_at
+ last_edited_by
+ system_note_metadata
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/notes/noteable_interface_spec.rb b/spec/graphql/types/notes/noteable_interface_spec.rb
index be2c30aac72..e11dece60b8 100644
--- a/spec/graphql/types/notes/noteable_interface_spec.rb
+++ b/spec/graphql/types/notes/noteable_interface_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Types::Notes::NoteableInterface do
expected_fields = %i[
discussions
notes
+ commenters
]
expect(described_class).to have_graphql_fields(*expected_fields)
diff --git a/spec/graphql/types/notes/system_note_metadata_type_spec.rb b/spec/graphql/types/notes/system_note_metadata_type_spec.rb
new file mode 100644
index 00000000000..d243e926ff5
--- /dev/null
+++ b/spec/graphql/types/notes/system_note_metadata_type_spec.rb
@@ -0,0 +1,11 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['SystemNoteMetadata'], feature_category: :team_planning do
+ it { expect(described_class).to have_graphql_field(:id) }
+ it { expect(described_class).to have_graphql_field(:action) }
+ it { expect(described_class).to have_graphql_field(:description_version) }
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_note) }
+end
diff --git a/spec/graphql/types/query_type_spec.rb b/spec/graphql/types/query_type_spec.rb
index 514d24a209e..f06759e30c8 100644
--- a/spec/graphql/types/query_type_spec.rb
+++ b/spec/graphql/types/query_type_spec.rb
@@ -8,29 +8,40 @@ RSpec.describe GitlabSchema.types['Query'] do
end
it 'has the expected fields' do
- expected_fields = %i[
- project
- namespace
- group
- echo
- metadata
- current_user
- snippets
- design_management
- milestone
- user
- users
- issue
- merge_request
- usage_trends_measurements
- runner_platforms
- runner
- runners
- timelogs
- board_list
- topics
- gitpod_enabled
- ci_variables
+ expected_fields = [
+ :board_list,
+ :ci_application_settings,
+ :ci_config,
+ :ci_variables,
+ :container_repository,
+ :current_user,
+ :design_management,
+ :echo,
+ :gitpod_enabled,
+ :group,
+ :issue,
+ :issues,
+ :jobs,
+ :merge_request,
+ :metadata,
+ :milestone,
+ :namespace,
+ :package,
+ :project,
+ :projects,
+ :query_complexity,
+ :runner,
+ :runner_platforms,
+ :runner_setup,
+ :runners,
+ :snippets,
+ :timelogs,
+ :todo,
+ :topics,
+ :usage_trends_measurements,
+ :user,
+ :users,
+ :work_item
]
expect(described_class).to have_graphql_fields(*expected_fields).at_least
@@ -135,7 +146,7 @@ RSpec.describe GitlabSchema.types['Query'] do
subject { described_class.fields['timelogs'] }
it 'returns timelogs' do
- is_expected.to have_graphql_arguments(:startDate, :endDate, :startTime, :endTime, :username, :projectId, :groupId, :after, :before, :first, :last)
+ is_expected.to have_graphql_arguments(:startDate, :endDate, :startTime, :endTime, :username, :projectId, :groupId, :after, :before, :first, :last, :sort)
is_expected.to have_graphql_type(Types::TimelogType.connection_type)
is_expected.to have_graphql_resolver(Resolvers::TimelogResolver)
end
diff --git a/spec/graphql/types/repository/blob_type_spec.rb b/spec/graphql/types/repository/blob_type_spec.rb
index 787b5f4a311..9537fca7322 100644
--- a/spec/graphql/types/repository/blob_type_spec.rb
+++ b/spec/graphql/types/repository/blob_type_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Types::Repository::BlobType do
+RSpec.describe Types::Repository::BlobType, feature_category: :source_code_management do
+ include GraphqlHelpers
+
specify { expect(described_class.graphql_name).to eq('RepositoryBlob') }
specify do
@@ -48,4 +50,13 @@ RSpec.describe Types::Repository::BlobType do
:language
).at_least
end
+
+ it 'handles blobs of huge size', :aggregate_failures do
+ huge_blob = Blob.new(double)
+ size = 10**10
+ allow(huge_blob).to receive_messages({ size: size, raw_size: size })
+
+ expect(resolve_field(:raw_size, huge_blob)).to eq(size)
+ expect(resolve_field(:size, huge_blob)).to eq(size)
+ end
end
diff --git a/spec/graphql/types/snippet_type_spec.rb b/spec/graphql/types/snippet_type_spec.rb
index f284d88180c..a46c51e0a27 100644
--- a/spec/graphql/types/snippet_type_spec.rb
+++ b/spec/graphql/types/snippet_type_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe GitlabSchema.types['Snippet'] do
:visibility_level, :created_at, :updated_at,
:web_url, :raw_url, :ssh_url_to_repo, :http_url_to_repo,
:notes, :discussions, :user_permissions,
- :description_html, :blobs]
+ :description_html, :blobs, :commenters]
expect(described_class).to have_graphql_fields(*expected_fields)
end
diff --git a/spec/graphql/types/time_tracking/timelog_connection_type_spec.rb b/spec/graphql/types/time_tracking/timelog_connection_type_spec.rb
new file mode 100644
index 00000000000..5cfe561b42c
--- /dev/null
+++ b/spec/graphql/types/time_tracking/timelog_connection_type_spec.rb
@@ -0,0 +1,44 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['TimelogConnection'], feature_category: :team_planning do
+ it 'has the expected fields' do
+ expected_fields = %i[count page_info edges nodes total_spent_time]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ context 'for total_spent_time field' do
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :empty_repo, :public, group: group) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ let_it_be(:timelog1) { create(:issue_timelog, issue: issue, time_spent: 1000) }
+ let_it_be(:timelog2) { create(:issue_timelog, issue: issue, time_spent: 1500) }
+ let_it_be(:timelog3) { create(:issue_timelog, issue: issue, time_spent: 2564) }
+
+ let(:query) do
+ %(
+ {
+ project(fullPath: "#{project.full_path}") {
+ timelogs {
+ totalSpentTime
+ }
+ }
+ }
+ )
+ end
+
+ let(:total_spent_time) { subject.dig('data', 'project', 'timelogs', 'totalSpentTime') }
+
+ subject { GitlabSchema.execute(query, context: { current_user: current_user }).as_json }
+
+ context 'when requested' do
+ it 'returns the total spent time' do
+ expect(total_spent_time).to eq(5064)
+ end
+ end
+ end
+end
diff --git a/spec/graphql/types/time_tracking/timelog_sort_enum_spec.rb b/spec/graphql/types/time_tracking/timelog_sort_enum_spec.rb
new file mode 100644
index 00000000000..ecc11256c85
--- /dev/null
+++ b/spec/graphql/types/time_tracking/timelog_sort_enum_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['TimelogSort'], feature_category: :team_planning do
+ specify { expect(described_class.graphql_name).to eq('TimelogSort') }
+
+ it_behaves_like 'common sort values'
+
+ it 'exposes all the contact sort values' do
+ expect(described_class.values.keys).to include(
+ *%w[
+ SPENT_AT_ASC
+ SPENT_AT_DESC
+ TIME_SPENT_ASC
+ TIME_SPENT_DESC
+ ]
+ )
+ end
+end
diff --git a/spec/graphql/types/timelog_type_spec.rb b/spec/graphql/types/timelog_type_spec.rb
index 3a26ba89e04..59a0e373c5d 100644
--- a/spec/graphql/types/timelog_type_spec.rb
+++ b/spec/graphql/types/timelog_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['Timelog'] do
+RSpec.describe GitlabSchema.types['Timelog'], feature_category: :team_planning do
let_it_be(:fields) { %i[id spent_at time_spent user issue merge_request note summary userPermissions] }
it { expect(described_class.graphql_name).to eq('Timelog') }
diff --git a/spec/graphql/types/user_type_spec.rb b/spec/graphql/types/user_type_spec.rb
index dcf25ff0667..45cb960cf20 100644
--- a/spec/graphql/types/user_type_spec.rb
+++ b/spec/graphql/types/user_type_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe GitlabSchema.types['User'] do
+RSpec.describe GitlabSchema.types['User'], feature_category: :users do
specify { expect(described_class.graphql_name).to eq('User') }
specify do
@@ -20,7 +20,10 @@ RSpec.describe GitlabSchema.types['User'] do
name
username
email
+ emails
publicEmail
+ commitEmail
+ namespaceCommitEmails
avatarUrl
webUrl
webPath
@@ -226,4 +229,20 @@ RSpec.describe GitlabSchema.types['User'] do
is_expected.to have_graphql_type(Types::TimelogType.connection_type)
end
end
+
+ describe 'emails field' do
+ subject { described_class.fields['emails'] }
+
+ it 'returns user emails' do
+ is_expected.to have_graphql_type(Types::Users::EmailType.connection_type)
+ end
+ end
+
+ describe 'namespaceCommitEmails field' do
+ subject { described_class.fields['namespaceCommitEmails'] }
+
+ it 'returns user namespace_commit_emails' do
+ is_expected.to have_graphql_type(Types::Users::NamespaceCommitEmailType.connection_type)
+ end
+ end
end
diff --git a/spec/graphql/types/users/email_type_spec.rb b/spec/graphql/types/users/email_type_spec.rb
new file mode 100644
index 00000000000..fb484915428
--- /dev/null
+++ b/spec/graphql/types/users/email_type_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['Email'], feature_category: :users do
+ it 'has the correct fields' do
+ expected_fields = [
+ :id,
+ :email,
+ :confirmed_at,
+ :created_at,
+ :updated_at
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_user_email_address) }
+end
diff --git a/spec/graphql/types/users/namespace_commit_email_type_spec.rb b/spec/graphql/types/users/namespace_commit_email_type_spec.rb
new file mode 100644
index 00000000000..ccab881676e
--- /dev/null
+++ b/spec/graphql/types/users/namespace_commit_email_type_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+require 'spec_helper'
+
+RSpec.describe GitlabSchema.types['NamespaceCommitEmail'], feature_category: :users do
+ it 'has the correct fields' do
+ expected_fields = [
+ :id,
+ :email,
+ :namespace,
+ :created_at,
+ :updated_at
+ ]
+
+ expect(described_class).to have_graphql_fields(*expected_fields)
+ end
+
+ specify { expect(described_class).to require_graphql_authorizations(:read_user_email_address) }
+end
diff --git a/spec/helpers/admin/components_helper_spec.rb b/spec/helpers/admin/components_helper_spec.rb
new file mode 100644
index 00000000000..bb590d003ad
--- /dev/null
+++ b/spec/helpers/admin/components_helper_spec.rb
@@ -0,0 +1,30 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Admin::ComponentsHelper, feature_category: :database do
+ describe '#database_versions' do
+ let(:expected_version) { '12.13' }
+ let(:expected_hash) do
+ main = {
+ main: { adapter_name: 'PostgreSQL', version: expected_version }
+ }
+ main[:ci] = { adapter_name: 'PostgreSQL', version: expected_version } if Gitlab::Database.has_config?(:ci)
+ main[:geo] = { adapter_name: 'PostgreSQL', version: expected_version } if Gitlab::Database.has_config?(:geo)
+
+ main
+ end
+
+ subject { helper.database_versions }
+
+ before do
+ allow_next_instance_of(Gitlab::Database::Reflection) do |reflection|
+ allow(reflection).to receive(:version).and_return(expected_version)
+ end
+ end
+
+ it 'returns expected database data' do
+ expect(subject).to eq(expected_hash)
+ end
+ end
+end
diff --git a/spec/helpers/appearances_helper_spec.rb b/spec/helpers/appearances_helper_spec.rb
index b3afd350397..8673353996e 100644
--- a/spec/helpers/appearances_helper_spec.rb
+++ b/spec/helpers/appearances_helper_spec.rb
@@ -10,6 +10,20 @@ RSpec.describe AppearancesHelper do
allow(helper).to receive(:current_user).and_return(user)
end
+ describe '#appearance_short_name' do
+ it 'returns the default value' do
+ create(:appearance)
+
+ expect(helper.appearance_short_name).to match('GitLab')
+ end
+
+ it 'returns the customized value' do
+ create(:appearance, pwa_short_name: 'Short')
+
+ expect(helper.appearance_short_name).to match('Short')
+ end
+ end
+
describe '.current_appearance' do
it 'memoizes empty appearance' do
expect(Appearance).to receive(:current).once
diff --git a/spec/helpers/application_helper_spec.rb b/spec/helpers/application_helper_spec.rb
index 3384f9fea05..a8514c373db 100644
--- a/spec/helpers/application_helper_spec.rb
+++ b/spec/helpers/application_helper_spec.rb
@@ -163,6 +163,13 @@ RSpec.describe ApplicationHelper do
expect(timeago_element.attr('class')).to eq 'js-short-timeago'
expect(timeago_element.next_element).to eq nil
end
+
+ it 'returns blank if time is nil' do
+ el = helper.time_ago_with_tooltip(nil)
+
+ expect(el).to eq('')
+ expect(el.html_safe).to eq('')
+ end
end
describe '#active_when' do
@@ -221,28 +228,43 @@ RSpec.describe ApplicationHelper do
end
describe '#instance_review_permitted?' do
- let_it_be(:non_admin_user) { create :user }
- let_it_be(:admin_user) { create :user, :admin }
+ shared_examples 'returns expected result depending on instance setting' do |instance_setting, expected_result|
+ before do
+ allow(::Gitlab::CurrentSettings).to receive(:instance_review_permitted?).and_return(instance_setting)
+ allow(helper).to receive(:current_user).and_return(current_user)
+ end
- before do
- allow(::Gitlab::CurrentSettings).to receive(:instance_review_permitted?).and_return(app_setting)
- allow(helper).to receive(:current_user).and_return(current_user)
+ it { is_expected.to be(expected_result) }
end
subject { helper.instance_review_permitted? }
- where(app_setting: [true, false], is_admin: [true, false, nil])
+ context 'as admin' do
+ let_it_be(:current_user) { build(:user, :admin) }
- with_them do
- let(:current_user) do
- if is_admin.nil?
- nil
- else
- is_admin ? admin_user : non_admin_user
+ context 'when admin mode setting is disabled', :do_not_mock_admin_mode_setting do
+ it_behaves_like 'returns expected result depending on instance setting', true, true
+ it_behaves_like 'returns expected result depending on instance setting', false, false
+ end
+
+ context 'when admin mode setting is enabled' do
+ context 'when in admin mode', :enable_admin_mode do
+ it_behaves_like 'returns expected result depending on instance setting', true, true
+ it_behaves_like 'returns expected result depending on instance setting', false, false
+ end
+
+ context 'when not in admin mode' do
+ it_behaves_like 'returns expected result depending on instance setting', true, false
+ it_behaves_like 'returns expected result depending on instance setting', false, false
end
end
+ end
+
+ context 'as normal user' do
+ let_it_be(:current_user) { build(:user) }
- it { is_expected.to be(app_setting && is_admin) }
+ it_behaves_like 'returns expected result depending on instance setting', true, false
+ it_behaves_like 'returns expected result depending on instance setting', false, false
end
end
@@ -597,16 +619,6 @@ RSpec.describe ApplicationHelper do
it 'returns nil' do
expect(helper.dispensable_render).to be_nil
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(dispensable_render: false)
- end
-
- it 'raises an error' do
- expect { helper.dispensable_render }.to raise_error(StandardError)
- end
- end
end
end
@@ -651,16 +663,6 @@ RSpec.describe ApplicationHelper do
it 'returns nil' do
expect(helper.dispensable_render_if_exists).to be_nil
end
-
- context 'when the feature flag is disabled' do
- before do
- stub_feature_flags(dispensable_render: false)
- end
-
- it 'raises an error' do
- expect { helper.dispensable_render_if_exists }.to raise_error(StandardError)
- end
- end
end
end
diff --git a/spec/helpers/button_helper_spec.rb b/spec/helpers/button_helper_spec.rb
index a7f65aa3134..8a5669867bf 100644
--- a/spec/helpers/button_helper_spec.rb
+++ b/spec/helpers/button_helper_spec.rb
@@ -165,6 +165,7 @@ RSpec.describe ButtonHelper do
context 'when no `text` attribute is not provided' do
it 'shows copy to clipboard button with default configuration and no text set to copy' do
expect(element.attr('class')).to eq('btn btn-clipboard gl-button btn-default-tertiary btn-icon btn-sm')
+ expect(element.attr('title')).to eq('Copy')
expect(element.attr('type')).to eq('button')
expect(element.attr('aria-label')).to eq('Copy')
expect(element.attr('aria-live')).to eq('polite')
diff --git a/spec/helpers/ci/runners_helper_spec.rb b/spec/helpers/ci/runners_helper_spec.rb
index 1b1edde8faf..6d14abd6574 100644
--- a/spec/helpers/ci/runners_helper_spec.rb
+++ b/spec/helpers/ci/runners_helper_spec.rb
@@ -103,7 +103,7 @@ RSpec.describe Ci::RunnersHelper do
{
runner_enabled_value: Namespace::SR_ENABLED,
runner_disabled_value: Namespace::SR_DISABLED_AND_UNOVERRIDABLE,
- runner_allow_override_value: Namespace::SR_DISABLED_WITH_OVERRIDE
+ runner_allow_override_value: Namespace::SR_DISABLED_AND_OVERRIDABLE
}
end
@@ -197,7 +197,7 @@ RSpec.describe Ci::RunnersHelper do
where(:shared_runners_setting, :is_disabled_and_unoverridable) do
:shared_runners_enabled | "false"
- :disabled_with_override | "false"
+ :disabled_and_overridable | "false"
:disabled_and_unoverridable | "true"
end
diff --git a/spec/helpers/emails_helper_spec.rb b/spec/helpers/emails_helper_spec.rb
index 04653d9ff03..1f7400983da 100644
--- a/spec/helpers/emails_helper_spec.rb
+++ b/spec/helpers/emails_helper_spec.rb
@@ -385,7 +385,7 @@ RSpec.describe EmailsHelper do
context 'with no html tag' do
let(:expected_output) do
- 'Reviewer changed to John'
+ 'John was added as a reviewer.<br>'
end
it 'returns the expected output' do
@@ -395,7 +395,7 @@ RSpec.describe EmailsHelper do
context 'with <strong> tag' do
let(:expected_output) do
- 'Reviewer changed to <strong>John</strong>'
+ '<strong>John</strong> was added as a reviewer.<br>'
end
it 'returns the expected output' do
@@ -410,7 +410,7 @@ RSpec.describe EmailsHelper do
context 'with no html tag' do
let(:expected_output) do
- 'Reviewer changed from John and Mary to Ted'
+ 'Ted was added as a reviewer.<br>John and Mary were removed from reviewers.'
end
it 'returns the expected output' do
@@ -420,7 +420,7 @@ RSpec.describe EmailsHelper do
context 'with <strong> tag' do
let(:expected_output) do
- 'Reviewer changed from <strong>John and Mary</strong> to <strong>Ted</strong>'
+ '<strong>Ted</strong> was added as a reviewer.<br><strong>John and Mary</strong> were removed from reviewers.'
end
it 'returns the expected output' do
@@ -435,7 +435,7 @@ RSpec.describe EmailsHelper do
context 'with no html tag' do
let(:expected_output) do
- 'Reviewer changed from John and Mary to Unassigned'
+ 'All reviewers were removed.'
end
it 'returns the expected output' do
@@ -445,7 +445,7 @@ RSpec.describe EmailsHelper do
context 'with <strong> tag' do
let(:expected_output) do
- 'Reviewer changed from <strong>John and Mary</strong> to <strong>Unassigned</strong>'
+ 'All reviewers were removed.'
end
it 'returns the expected output' do
@@ -460,7 +460,7 @@ RSpec.describe EmailsHelper do
let(:fishy_user) { build(:user, name: "<script>alert('hi')</script>") }
let(:expected_output) do
- 'Reviewer changed to <strong>&lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;</strong>'
+ '<strong>&lt;script&gt;alert(&#39;hi&#39;)&lt;/script&gt;</strong> was added as a reviewer.<br>'
end
it 'escapes the html tag' do
@@ -476,7 +476,7 @@ RSpec.describe EmailsHelper do
let(:fishy_user) { build(:user, name: "example.com") }
let(:expected_output) do
- 'Reviewer changed to example_com'
+ 'example_com was added as a reviewer.<br>'
end
it "sanitizes user's name" do
diff --git a/spec/helpers/feature_flags_helper_spec.rb b/spec/helpers/feature_flags_helper_spec.rb
index 228459277ca..786454c6c4d 100644
--- a/spec/helpers/feature_flags_helper_spec.rb
+++ b/spec/helpers/feature_flags_helper_spec.rb
@@ -38,7 +38,7 @@ RSpec.describe FeatureFlagsHelper do
feature_flags_path: "/#{project.full_path}/-/feature_flags",
environments_endpoint: "/#{project.full_path}/-/environments/search.json",
strategy_type_docs_page_path: "/help/operations/feature_flags#feature-flag-strategies",
- environments_scope_docs_path: "/help/ci/environments/index.md#scope-environments-with-specs")
+ environments_scope_docs_path: "/help/ci/environments/index.md#limit-the-environment-scope-of-a-cicd-variable")
end
end
end
diff --git a/spec/helpers/form_helper_spec.rb b/spec/helpers/form_helper_spec.rb
index 1797b0e32cd..7c8c59be409 100644
--- a/spec/helpers/form_helper_spec.rb
+++ b/spec/helpers/form_helper_spec.rb
@@ -162,6 +162,24 @@ RSpec.describe FormHelper do
end
end
+ it 'renders custom messages without the attribute name prefix' do
+ model = double(errors: errors_stub('Error 1'))
+ model.errors.add(:name, 'is already taken')
+ model.errors.add(:code_name, 'This code name is not allowed')
+
+ allow(model.class).to receive(:human_attribute_name) do |attribute|
+ attribute.to_s.capitalize
+ end
+
+ errors = helper.form_errors(model, custom_message: [:code_name])
+
+ aggregate_failures do
+ expect(errors).to include('<li>Error 1</li>')
+ expect(errors).to include('<li>Name is already taken</li>')
+ expect(errors).to include('<li>This code name is not allowed</li>')
+ end
+ end
+
it 'renders help page links' do
stubbed_errors = ActiveModel::Errors.new(double).tap do |errors|
errors.add(:base, 'No text.', help_page_url: 'http://localhost/doc/user/index.html')
diff --git a/spec/helpers/groups/group_members_helper_spec.rb b/spec/helpers/groups/group_members_helper_spec.rb
index 4d1280533dd..a9c6822e2c1 100644
--- a/spec/helpers/groups/group_members_helper_spec.rb
+++ b/spec/helpers/groups/group_members_helper_spec.rb
@@ -55,7 +55,9 @@ RSpec.describe Groups::GroupMembersHelper do
expected = {
source_id: shared_group.id,
can_manage_members: true,
- can_manage_access_requests: true
+ can_manage_access_requests: true,
+ group_name: shared_group.name,
+ group_path: shared_group.full_path
}
expect(subject).to include(expected)
diff --git a/spec/helpers/groups/observability_helper_spec.rb b/spec/helpers/groups/observability_helper_spec.rb
index 6d0a8631f78..ee33a853f9c 100644
--- a/spec/helpers/groups/observability_helper_spec.rb
+++ b/spec/helpers/groups/observability_helper_spec.rb
@@ -22,6 +22,11 @@ RSpec.describe Groups::ObservabilityHelper do
allow(helper).to receive(:params).and_return({ action: 'explore' })
expect(helper.observability_iframe_src(group)).to eq("#{observability_url}/-/#{group.id}/explore")
end
+
+ it 'returns the iframe src for action: datasources' do
+ allow(helper).to receive(:params).and_return({ action: 'datasources' })
+ expect(helper.observability_iframe_src(group)).to eq("#{observability_url}/-/#{group.id}/datasources")
+ end
end
context 'if observability_path exists in params' do
@@ -65,6 +70,11 @@ RSpec.describe Groups::ObservabilityHelper do
allow(helper).to receive(:params).and_return({ action: 'explore' })
expect(helper.observability_iframe_src(group)).to eq("#{observability_url}/explore")
end
+
+ it 'returns the iframe src without group.id for action: datasources' do
+ allow(helper).to receive(:params).and_return({ action: 'datasources' })
+ expect(helper.observability_iframe_src(group)).to eq("#{observability_url}/datasources")
+ end
end
end
@@ -76,12 +86,17 @@ RSpec.describe Groups::ObservabilityHelper do
it 'returns the title for action: manage' do
allow(helper).to receive(:params).and_return({ action: 'manage' })
- expect(helper.observability_page_title).to eq("Manage Dashboards")
+ expect(helper.observability_page_title).to eq("Manage dashboards")
end
it 'returns the title for action: explore' do
allow(helper).to receive(:params).and_return({ action: 'explore' })
- expect(helper.observability_page_title).to eq("Explore")
+ expect(helper.observability_page_title).to eq("Explore telemetry data")
+ end
+
+ it 'returns the title for action: datasources' do
+ allow(helper).to receive(:params).and_return({ action: 'datasources' })
+ expect(helper.observability_page_title).to eq("Data sources")
end
it 'returns the default title for unknown action' do
diff --git a/spec/helpers/import_helper_spec.rb b/spec/helpers/import_helper_spec.rb
index 18cbbdfd804..7a5a69ea5b5 100644
--- a/spec/helpers/import_helper_spec.rb
+++ b/spec/helpers/import_helper_spec.rb
@@ -49,4 +49,20 @@ RSpec.describe ImportHelper do
expect(helper.provider_project_link_url(host_url, full_path)).to match('http://provider.com/repo/path')
end
end
+
+ describe '#import_configure_github_admin_message' do
+ subject { helper.import_configure_github_admin_message }
+
+ it 'returns note for admin' do
+ allow(helper).to receive(:current_user) { instance_double('User', can_admin_all_resources?: true) }
+
+ is_expected.to have_text('Note: As an administrator')
+ end
+
+ it 'returns note for other user' do
+ allow(helper).to receive(:current_user) { instance_double('User', can_admin_all_resources?: false) }
+
+ is_expected.to have_text('Note: Consider asking your GitLab administrator')
+ end
+ end
end
diff --git a/spec/helpers/issuables_helper_spec.rb b/spec/helpers/issuables_helper_spec.rb
index 15b57a4c9eb..f2e3e401766 100644
--- a/spec/helpers/issuables_helper_spec.rb
+++ b/spec/helpers/issuables_helper_spec.rb
@@ -112,19 +112,7 @@ RSpec.describe IssuablesHelper do
context 'when assigned issues count is over 100' do
let_it_be(:issues) { create_list(:issue, 101, project: project, assignees: [user]) }
- before do
- stub_feature_flags(limit_assigned_issues_count: false)
- end
-
- it { is_expected.to eq 101 }
-
- context 'when FF limit_assigned_issues_count is enabled' do
- before do
- stub_feature_flags(limit_assigned_issues_count: true)
- end
-
- it { is_expected.to eq 100 }
- end
+ it { is_expected.to eq 100 }
end
end
end
@@ -142,19 +130,7 @@ RSpec.describe IssuablesHelper do
context 'when assigned issues count is over 99' do
let_it_be(:issues) { create_list(:issue, 100, project: project, assignees: [user]) }
- before do
- stub_feature_flags(limit_assigned_issues_count: false)
- end
-
- it { is_expected.to eq '100' }
-
- context 'when FF limit_assigned_issues_count is enabled' do
- before do
- stub_feature_flags(limit_assigned_issues_count: true)
- end
-
- it { is_expected.to eq '99+' }
- end
+ it { is_expected.to eq '99+' }
end
end
@@ -629,4 +605,28 @@ RSpec.describe IssuablesHelper do
expect(helper.sidebar_milestone_tooltip_label(milestone)).to eq('&lt;img onerror=alert(1)&gt;<br/>Milestone')
end
end
+
+ describe '#hidden_issuable_icon', feature_category: :insider_threat do
+ let_it_be(:mock_svg) { '<svg></svg>'.html_safe }
+
+ before do
+ allow(helper).to receive(:sprite_icon).and_return(mock_svg)
+ end
+
+ context 'when issuable is an issue' do
+ let_it_be(:issuable) { build(:issue) }
+
+ it 'returns icon with tooltip' do
+ expect(helper.hidden_issuable_icon(issuable)).to eq("<span class=\"has-tooltip\" title=\"This issue is hidden because its author has been banned\">#{mock_svg}</span>")
+ end
+ end
+
+ context 'when issuable is a merge request' do
+ let_it_be(:issuable) { build(:merge_request) }
+
+ it 'returns icon with tooltip' do
+ expect(helper.hidden_issuable_icon(issuable)).to eq("<span class=\"has-tooltip\" title=\"This merge request is hidden because its author has been banned\">#{mock_svg}</span>")
+ end
+ end
+ end
end
diff --git a/spec/helpers/issues_helper_spec.rb b/spec/helpers/issues_helper_spec.rb
index ed363268cdf..0024d6b7b4e 100644
--- a/spec/helpers/issues_helper_spec.rb
+++ b/spec/helpers/issues_helper_spec.rb
@@ -266,7 +266,9 @@ RSpec.describe IssuesHelper do
issue_type: 'issue',
new_issue_path: new_project_issue_path(project, { add_related_issue: issue.iid }),
project_path: project.full_path,
- report_abuse_path: new_abuse_report_path(user_id: issue.author.id, ref_url: issue_url(issue)),
+ report_abuse_path: add_category_abuse_reports_path,
+ reported_user_id: issue.author.id,
+ reported_from_url: issue_url(issue),
submit_as_spam_path: mark_as_spam_project_issue_path(project, issue)
}
@@ -389,8 +391,12 @@ RSpec.describe IssuesHelper do
allow(helper).to receive(:url_for).and_return('#')
expected = {
+ autocomplete_award_emojis_path: autocomplete_award_emojis_path,
calendar_path: '#',
- empty_state_svg_path: '#',
+ dashboard_labels_path: dashboard_labels_path(format: :json, include_ancestor_groups: true),
+ dashboard_milestones_path: dashboard_milestones_path(format: :json),
+ empty_state_with_filter_svg_path: '#',
+ empty_state_without_filter_svg_path: '#',
initial_sort: current_user&.user_preference&.issues_sort,
is_public_visibility_restricted: Gitlab::CurrentSettings.restricted_visibility_levels ? 'false' : '',
is_signed_in: current_user.present?.to_s,
@@ -472,43 +478,6 @@ RSpec.describe IssuesHelper do
end
end
- describe '#status_box_class' do
- context 'when object is expired' do
- it 'returns orange background' do
- milestone = build(:milestone, due_date: Date.today.prev_month)
- expect(helper.status_box_class(milestone)).to eq('gl-bg-orange-500')
- end
- end
-
- context 'when object is merged' do
- it 'returns blue background' do
- merge_request = build(:merge_request, :merged)
- expect(helper.status_box_class(merge_request)).to eq('badge-info')
- end
- end
-
- context 'when object is closed' do
- it 'returns red background' do
- merge_request = build(:merge_request, :closed)
- expect(helper.status_box_class(merge_request)).to eq('badge-danger')
- end
- end
-
- context 'when object is upcoming' do
- it 'returns gray background' do
- milestone = build(:milestone, start_date: Date.today.next_month)
- expect(helper.status_box_class(milestone)).to eq('gl-bg-gray-500')
- end
- end
-
- context 'when object is opened' do
- it 'returns green background' do
- merge_request = build(:merge_request, :opened)
- expect(helper.status_box_class(merge_request)).to eq('badge-success')
- end
- end
- end
-
describe '#issue_hidden?' do
context 'when issue is hidden' do
let_it_be(:banned_user) { build(:user, :banned) }
diff --git a/spec/helpers/markup_helper_spec.rb b/spec/helpers/markup_helper_spec.rb
index d1c86abf6e9..088519248c6 100644
--- a/spec/helpers/markup_helper_spec.rb
+++ b/spec/helpers/markup_helper_spec.rb
@@ -449,21 +449,21 @@ RSpec.describe MarkupHelper do
object = create_object('Text with `inline code`')
expected = 'Text with <code>inline code</code>'
- expect(first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
+ expect(helper.first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
end
it 'truncates the text with multiple paragraphs' do
object = create_object("Paragraph 1\n\nParagraph 2")
expected = 'Paragraph 1...'
- expect(first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
+ expect(helper.first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
end
it 'displays the first line of a code block' do
object = create_object("```\nCode block\nwith two lines\n```")
expected = %r{<pre.+><code><span class="line">Code block\.\.\.</span>\n</code></pre>}
- expect(first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
+ expect(helper.first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)).to match(expected)
end
it 'truncates a single long line of text' do
@@ -471,7 +471,7 @@ RSpec.describe MarkupHelper do
object = create_object(text * 4)
expected = (text * 2).sub(/.{3}/, '...')
- expect(first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)).to match(expected)
+ expect(helper.first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)).to match(expected)
end
it 'preserves code color scheme' do
@@ -480,12 +480,12 @@ RSpec.describe MarkupHelper do
"<code><span class=\"line\"><span class=\"k\">def</span> <span class=\"nf\">test</span>...</span>\n" \
"</code></pre>\n"
- expect(first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)).to eq(expected)
+ expect(helper.first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)).to eq(expected)
end
it 'removes any images' do
object = create_object("![ImageTest](/uploads/test.png)")
- text = first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)
+ text = helper.first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)
expect(text).not_to match('<img')
expect(text).not_to match('<a')
@@ -498,7 +498,7 @@ RSpec.describe MarkupHelper do
create(:label, title: 'label_1', project: project)
object = create_object(label_title, project: project)
- first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)
+ helper.first_line_in_markdown(object, attribute, 150, is_todo: true, project: project)
end
it 'preserves style attribute for a label that can be accessed by current_user' do
@@ -522,7 +522,7 @@ RSpec.describe MarkupHelper do
html = '<i></i> <strong>strong</strong><em>em</em><b>b</b>'
object = create_object(html)
- result = first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)
+ result = helper.first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)
expect(result).to include(html)
end
@@ -531,7 +531,7 @@ RSpec.describe MarkupHelper do
object = create_object("hello \n\n [Test](README.md)")
expect do
- first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)
+ helper.first_line_in_markdown(object, attribute, 100, is_todo: true, project: project)
end.not_to change { Gitlab::GitalyClient.get_request_count }
end
end
diff --git a/spec/helpers/nav_helper_spec.rb b/spec/helpers/nav_helper_spec.rb
index 4a37e17fb08..adf784360c2 100644
--- a/spec/helpers/nav_helper_spec.rb
+++ b/spec/helpers/nav_helper_spec.rb
@@ -134,4 +134,62 @@ RSpec.describe NavHelper do
it { is_expected.to eq(true) }
end
end
+
+ describe '#show_super_sidebar?' do
+ shared_examples '#show_super_sidebar returns false' do
+ it 'returns false' do
+ expect(helper.show_super_sidebar?).to eq(false)
+ end
+ end
+
+ it 'returns false by default' do
+ allow(helper).to receive(:current_user).and_return(nil)
+
+ expect(helper.show_super_sidebar?).to be_falsy
+ end
+
+ context 'when used is signed-in' do
+ let_it_be(:user) { create(:user) }
+
+ before do
+ allow(helper).to receive(:current_user).and_return(user)
+ stub_feature_flags(super_sidebar_nav: new_nav_ff)
+ user.update!(use_new_navigation: user_preference)
+ end
+
+ context 'with feature flag off' do
+ let(:new_nav_ff) { false }
+
+ context 'when user has new nav disabled' do
+ let(:user_preference) { false }
+
+ it_behaves_like '#show_super_sidebar returns false'
+ end
+
+ context 'when user has new nav enabled' do
+ let(:user_preference) { true }
+
+ it_behaves_like '#show_super_sidebar returns false'
+ end
+ end
+
+ context 'with feature flag on' do
+ let(:new_nav_ff) { true }
+
+ context 'when user has new nav disabled' do
+ let(:user_preference) { false }
+
+ it_behaves_like '#show_super_sidebar returns false'
+ end
+
+ context 'when user has new nav enabled' do
+ let(:user_preference) { true }
+
+ it 'returns true' do
+ expect(helper.show_super_sidebar?).to eq(true)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/helpers/preferences_helper_spec.rb b/spec/helpers/preferences_helper_spec.rb
index 99f750bb858..898999e328e 100644
--- a/spec/helpers/preferences_helper_spec.rb
+++ b/spec/helpers/preferences_helper_spec.rb
@@ -25,15 +25,15 @@ RSpec.describe PreferencesHelper do
it 'provides better option descriptions' do
expect(helper.dashboard_choices).to match_array [
- ['Your Projects (default)', 'projects'],
- ['Starred Projects', 'stars'],
- ["Your Projects' Activity", 'project_activity'],
- ["Starred Projects' Activity", 'starred_project_activity'],
- ["Followed Users' Activity", 'followed_user_activity'],
- ["Your Groups", 'groups'],
- ["Your To-Do List", 'todos'],
- ["Assigned Issues", 'issues'],
- ["Assigned merge requests", 'merge_requests']
+ { text: "Your Projects (default)", value: 'projects' },
+ { text: "Starred Projects", value: 'stars' },
+ { text: "Your Projects' Activity", value: 'project_activity' },
+ { text: "Starred Projects' Activity", value: 'starred_project_activity' },
+ { text: "Followed Users' Activity", value: 'followed_user_activity' },
+ { text: "Your Groups", value: 'groups' },
+ { text: "Your To-Do List", value: 'todos' },
+ { text: "Assigned Issues", value: 'issues' },
+ { text: "Assigned merge requests", value: 'merge_requests' }
]
end
end
@@ -214,9 +214,9 @@ RSpec.describe PreferencesHelper do
stub_user(preferred_language: :en)
expect(helper.language_choices).to eq([
- '<option selected="selected" value="en">English (100% translated)</option>',
- '<option value="es">Spanish - español (65% translated)</option>'
- ].join("\n"))
+ { text: "English (100% translated)", value: 'en' },
+ { text: "Spanish - español (65% translated)", value: 'es' }
+ ])
end
end
diff --git a/spec/helpers/projects/ml/experiments_helper_spec.rb b/spec/helpers/projects/ml/experiments_helper_spec.rb
index e6959a03c4a..2b70201456a 100644
--- a/spec/helpers/projects/ml/experiments_helper_spec.rb
+++ b/spec/helpers/projects/ml/experiments_helper_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
let_it_be(:project) { create(:project, :private) }
let_it_be(:experiment) { create(:ml_experiments, user_id: project.creator, project: project) }
let_it_be(:candidate0) do
- create(:ml_candidates, experiment: experiment, user: project.creator).tap do |c|
+ create(:ml_candidates, :with_artifact, experiment: experiment, user: project.creator).tap do |c|
c.params.build([{ name: 'param1', value: 'p1' }, { name: 'param2', value: 'p2' }])
c.metrics.create!(
[{ name: 'metric1', value: 0.1 }, { name: 'metric2', value: 0.2 }, { name: 'metric3', value: 0.3 }]
@@ -18,7 +18,7 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
end
let_it_be(:candidate1) do
- create(:ml_candidates, experiment: experiment, user: project.creator).tap do |c|
+ create(:ml_candidates, experiment: experiment, user: project.creator, name: 'candidate1').tap do |c|
c.params.build([{ name: 'param2', value: 'p3' }, { name: 'param3', value: 'p4' }])
c.metrics.create!(name: 'metric3', value: 0.4)
end
@@ -27,17 +27,39 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
let_it_be(:candidates) { [candidate0, candidate1] }
describe '#candidates_table_items' do
- subject { helper.candidates_table_items(candidates) }
+ subject { Gitlab::Json.parse(helper.candidates_table_items(candidates)) }
- it 'creates the correct model for the table' do
- expected_value = [
+ it 'creates the correct model for the table', :aggregate_failures do
+ expected_values = [
{ 'param1' => 'p1', 'param2' => 'p2', 'metric1' => '0.1000', 'metric2' => '0.2000', 'metric3' => '0.3000',
- 'artifact' => nil, 'details' => "/#{project.full_path}/-/ml/candidates/#{candidate0.iid}" },
+ 'artifact' => "/#{project.full_path}/-/packages/#{candidate0.artifact.id}",
+ 'details' => "/#{project.full_path}/-/ml/candidates/#{candidate0.iid}",
+ 'name' => candidate0.name,
+ 'created_at' => candidate0.created_at.strftime('%Y-%m-%dT%H:%M:%S.%LZ'),
+ 'user' => { 'username' => candidate0.user.username, 'path' => "/#{candidate0.user.username}" } },
{ 'param2' => 'p3', 'param3' => 'p4', 'metric3' => '0.4000',
- 'artifact' => nil, 'details' => "/#{project.full_path}/-/ml/candidates/#{candidate1.iid}" }
+ 'artifact' => nil, 'details' => "/#{project.full_path}/-/ml/candidates/#{candidate1.iid}",
+ 'name' => candidate1.name,
+ 'created_at' => candidate1.created_at.strftime('%Y-%m-%dT%H:%M:%S.%LZ'),
+ 'user' => { 'username' => candidate1.user.username, 'path' => "/#{candidate1.user.username}" } }
]
- expect(Gitlab::Json.parse(subject)).to match_array(expected_value)
+ subject.sort_by! { |s| s[:name] }
+
+ expect(subject[0]).to eq(expected_values[0])
+ expect(subject[1]).to eq(expected_values[1])
+ end
+
+ context 'when candidate does not have user' do
+ let(:candidates) { [candidate0] }
+
+ before do
+ allow(candidate0).to receive(:user).and_return(nil)
+ end
+
+ it 'has the user property, but is nil' do
+ expect(subject[0]['user']).to be_nil
+ end
end
end
@@ -57,9 +79,6 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
describe '#candidate_as_data' do
let(:candidate) { candidate0 }
- let(:package) do
- create(:generic_package, name: candidate.package_name, version: candidate.package_version, project: project)
- end
subject { Gitlab::Json.parse(helper.candidate_as_data(candidate)) }
@@ -81,7 +100,7 @@ RSpec.describe Projects::Ml::ExperimentsHelper, feature_category: :mlops do
it 'generates the correct info' do
expected_info = {
'iid' => candidate.iid,
- 'path_to_artifact' => "/#{project.full_path}/-/packages/#{package.id}",
+ 'path_to_artifact' => "/#{project.full_path}/-/packages/#{candidate.artifact.id}",
'experiment_name' => candidate.experiment.name,
'path_to_experiment' => "/#{project.full_path}/-/ml/experiments/#{experiment.iid}",
'status' => 'running'
diff --git a/spec/helpers/projects/project_members_helper_spec.rb b/spec/helpers/projects/project_members_helper_spec.rb
index f3201ce0e14..2cc87e8aeb9 100644
--- a/spec/helpers/projects/project_members_helper_spec.rb
+++ b/spec/helpers/projects/project_members_helper_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::ProjectMembersHelper do
include MembersPresentation
let_it_be(:current_user) { create(:user) }
- let_it_be(:project) { create(:project) }
+ let_it_be(:project) { create(:project, group: create(:group)) }
before do
allow(helper).to receive(:current_user).and_return(current_user)
@@ -42,7 +42,9 @@ RSpec.describe Projects::ProjectMembersHelper do
expected = {
source_id: project.id,
can_manage_members: true,
- can_manage_access_requests: true
+ can_manage_access_requests: true,
+ group_name: project.group.name,
+ group_path: project.group.path
}.as_json
expect(subject).to include(expected)
@@ -138,8 +140,8 @@ RSpec.describe Projects::ProjectMembersHelper do
where(:include_relations, :result) do
[:inherited, :direct] | lazy { [group_link_7, group_link_4, group_link_9, group_link_5, group_link_3].map(&:id) }
- [:inherited] | lazy { [group_link_1, group_link_4, group_link_5, group_link_3].map(&:id) }
- [:direct] | lazy { [group_link_7, group_link_8, group_link_9].map(&:id) }
+ [:inherited] | lazy { [group_link_1, group_link_4, group_link_5, group_link_3].map(&:id) }
+ [:direct] | lazy { [group_link_7, group_link_8, group_link_9].map(&:id) }
end
with_them do
diff --git a/spec/helpers/projects_helper_spec.rb b/spec/helpers/projects_helper_spec.rb
index db50c74ec4e..91dd4c46a74 100644
--- a/spec/helpers/projects_helper_spec.rb
+++ b/spec/helpers/projects_helper_spec.rb
@@ -206,7 +206,7 @@ RSpec.describe ProjectsHelper do
it 'loads the pipeline status in batch' do
helper.load_pipeline_status([project])
# Skip lazy loading of the `pipeline_status` attribute
- pipeline_status = project.instance_variable_get('@pipeline_status')
+ pipeline_status = project.instance_variable_get(:@pipeline_status)
expect(pipeline_status).to be_a(Gitlab::Cache::Ci::ProjectPipelineStatus)
end
@@ -1086,7 +1086,7 @@ RSpec.describe ProjectsHelper do
context 'as a user' do
it 'returns a link to contact an administrator' do
- allow(user).to receive(:admin?).and_return(false)
+ allow(user).to receive(:can_admin_all_resources?).and_return(false)
expect(subject).to have_text("To enable importing projects from #{import_method}, ask your GitLab administrator to configure OAuth integration")
end
@@ -1094,7 +1094,7 @@ RSpec.describe ProjectsHelper do
context 'as an administrator' do
it 'returns a link to configure bitbucket' do
- allow(user).to receive(:admin?).and_return(true)
+ allow(user).to receive(:can_admin_all_resources?).and_return(true)
expect(subject).to have_text("To enable importing projects from #{import_method}, as administrator you need to configure OAuth integration")
end
@@ -1333,27 +1333,6 @@ RSpec.describe ProjectsHelper do
end
end
- describe '#fork_divergence_message' do
- using RSpec::Parameterized::TableSyntax
-
- where(:behind, :ahead, :message) do
- 0 | 0 | 'Up to date with upstream repository'
- 1 | 0 | '1 commit behind upstream repository'
- 2 | 0 | '2 commits behind upstream repository'
- 0 | 1 | '1 commit ahead of upstream repository'
- 0 | 2 | '2 commits ahead of upstream repository'
- 5 | 7 | '5 commits behind, 7 commits ahead of upstream repository'
- nil | 7 | 'Fork has diverged from upstream repository'
- 7 | nil | 'Fork has diverged from upstream repository'
- end
-
- with_them do
- it 'returns message based on behind/ahead values' do
- expect(helper.fork_divergence_message({ behind: behind, ahead: ahead })).to eq(message)
- end
- end
- end
-
describe '#localized_project_human_access' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/helpers/search_helper_spec.rb b/spec/helpers/search_helper_spec.rb
index 45864320115..c7afe0bf391 100644
--- a/spec/helpers/search_helper_spec.rb
+++ b/spec/helpers/search_helper_spec.rb
@@ -284,8 +284,24 @@ RSpec.describe SearchHelper, feature_category: :global_search do
allow(self).to receive(:current_user).and_return(admin)
end
- it "includes admin sections" do
- expect(search_autocomplete_opts("admin").size).to eq(1)
+ context 'when admin mode setting is disabled', :do_not_mock_admin_mode_setting do
+ it 'includes admin sections' do
+ expect(search_autocomplete_opts('admin').size).to eq(1)
+ end
+ end
+
+ context 'when admin mode setting is enabled' do
+ context 'when in admin mode', :enable_admin_mode do
+ it 'includes admin sections' do
+ expect(search_autocomplete_opts('admin').size).to eq(1)
+ end
+ end
+
+ context 'when not in admin mode' do
+ it 'does not include admin sections' do
+ expect(search_autocomplete_opts('admin').size).to eq(0)
+ end
+ end
end
end
end
diff --git a/spec/helpers/sidebars_helper_spec.rb b/spec/helpers/sidebars_helper_spec.rb
index 6db955f3637..299e4cb0133 100644
--- a/spec/helpers/sidebars_helper_spec.rb
+++ b/spec/helpers/sidebars_helper_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe SidebarsHelper do
+ include Devise::Test::ControllerHelpers
+
describe '#sidebar_tracking_attributes_by_object' do
subject { helper.sidebar_tracking_attributes_by_object(object) }
@@ -42,4 +44,26 @@ RSpec.describe SidebarsHelper do
end
end
end
+
+ describe '#super_sidebar_context' do
+ let(:user) { build(:user) }
+
+ subject { helper.super_sidebar_context(user) }
+
+ it 'returns sidebar values from user', :use_clean_rails_memory_store_caching do
+ Rails.cache.write(['users', user.id, 'assigned_open_issues_count'], 1)
+ Rails.cache.write(['users', user.id, 'assigned_open_merge_requests_count'], 2)
+ Rails.cache.write(['users', user.id, 'todos_pending_count'], 3)
+
+ expect(subject).to eq({
+ name: user.name,
+ username: user.username,
+ avatar_url: user.avatar_url,
+ assigned_open_issues_count: 1,
+ assigned_open_merge_requests_count: 2,
+ todos_pending_count: 3,
+ issues_dashboard_path: issues_dashboard_path(assignee_username: user.username)
+ })
+ end
+ end
end
diff --git a/spec/helpers/timeboxes_helper_spec.rb b/spec/helpers/timeboxes_helper_spec.rb
index f9fb40a616b..f1f8683825e 100644
--- a/spec/helpers/timeboxes_helper_spec.rb
+++ b/spec/helpers/timeboxes_helper_spec.rb
@@ -2,8 +2,16 @@
require 'spec_helper'
-RSpec.describe TimeboxesHelper do
- describe "#timebox_date_range" do
+RSpec.describe TimeboxesHelper, feature_category: :team_planning do
+ using RSpec::Parameterized::TableSyntax
+
+ let_it_be(:milestone_expired) { build(:milestone, due_date: Date.today.prev_month) }
+ let_it_be(:milestone_closed) { build(:milestone, :closed) }
+ let_it_be(:milestone_upcoming) { build(:milestone, start_date: Date.today.next_month) }
+ let_it_be(:milestone_open) { build(:milestone) }
+ let_it_be(:milestone_closed_and_expired) { build(:milestone, :closed, due_date: Date.today.prev_month) }
+
+ describe '#timebox_date_range' do
let(:yesterday) { Date.yesterday }
let(:tomorrow) { yesterday + 2 }
let(:format) { '%b %-d, %Y' }
@@ -24,11 +32,11 @@ RSpec.describe TimeboxesHelper do
end
end
- describe "#group_milestone_route" do
+ describe '#group_milestone_route' do
let(:group) { build_stubbed(:group) }
- let(:subgroup) { build_stubbed(:group, parent: group, name: "Test Subgrp") }
+ let(:subgroup) { build_stubbed(:group, parent: group, name: 'Test Subgrp') }
- context "when in subgroup" do
+ context 'when in subgroup' do
let(:milestone) { build_stubbed(:group_milestone, group: subgroup) }
it 'generates correct url despite assigned @group' do
@@ -39,22 +47,53 @@ RSpec.describe TimeboxesHelper do
end
end
- describe "#recent_releases_with_counts" do
- let_it_be(:milestone) { create(:milestone) }
- let_it_be(:project) { milestone.project }
+ describe '#recent_releases_with_counts' do
+ let_it_be(:project) { milestone_open.project }
let_it_be(:user) { create(:user) }
- subject { helper.recent_releases_with_counts(milestone, user) }
+ subject { helper.recent_releases_with_counts(milestone_open, user) }
before do
project.add_developer(user)
end
- it "returns releases with counts" do
- _old_releases = create_list(:release, 2, project: project, milestones: [milestone])
- recent_public_releases = create_list(:release, 3, project: project, milestones: [milestone], released_at: '2022-01-01T18:00:00Z')
+ it 'returns releases with counts' do
+ _old_releases = create_list(:release, 2, project: project, milestones: [milestone_open])
+ recent_public_releases = create_list(:release, 3, project: project, milestones: [milestone_open], released_at: '2022-01-01T18:00:00Z')
is_expected.to match([match_array(recent_public_releases), 5, 2])
end
end
+
+ describe '#milestone_status_string' do
+ where(:milestone, :status) do
+ lazy { milestone_expired } | 'Expired'
+ lazy { milestone_closed } | 'Closed'
+ lazy { milestone_closed_and_expired } | 'Closed'
+ lazy { milestone_upcoming } | 'Upcoming'
+ lazy { milestone_open } | 'Open'
+ end
+
+ with_them do
+ it 'returns status string' do
+ expect(helper.milestone_status_string(milestone)).to eq(status)
+ end
+ end
+ end
+
+ describe '#milestone_badge_variant' do
+ where(:milestone, :variant) do
+ lazy { milestone_expired } | :warning
+ lazy { milestone_closed } | :danger
+ lazy { milestone_closed_and_expired } | :danger
+ lazy { milestone_upcoming } | :neutral
+ lazy { milestone_open } | :success
+ end
+
+ with_them do
+ it 'returns badge variant' do
+ expect(helper.milestone_badge_variant(milestone)).to eq(variant)
+ end
+ end
+ end
end
diff --git a/spec/helpers/todos_helper_spec.rb b/spec/helpers/todos_helper_spec.rb
index ca334a04fe9..fcdb41eb4af 100644
--- a/spec/helpers/todos_helper_spec.rb
+++ b/spec/helpers/todos_helper_spec.rb
@@ -43,6 +43,10 @@ RSpec.describe TodosHelper do
create(:todo, target: group)
end
+ let_it_be(:project_access_request_todo) do
+ create(:todo, target: project, action: Todo::MEMBER_ACCESS_REQUESTED)
+ end
+
describe '#todos_count_format' do
it 'shows fuzzy count for 100 or more items' do
expect(helper.todos_count_format(100)).to eq '99+'
@@ -172,7 +176,17 @@ RSpec.describe TodosHelper do
it 'responds with access requests tab' do
path = helper.todo_target_path(group_access_request_todo)
- access_request_path = Gitlab::Routing.url_helpers.group_group_members_url(group, tab: 'access_requests')
+ access_request_path = Gitlab::Routing.url_helpers.group_group_members_path(group, tab: 'access_requests')
+
+ expect(path).to eq(access_request_path)
+ end
+ end
+
+ context 'when a user requests access to project' do
+ it 'responds with access requests tab' do
+ path = helper.todo_target_path(project_access_request_todo)
+
+ access_request_path = Gitlab::Routing.url_helpers.project_project_members_path(project, tab: 'access_requests')
expect(path).to eq(access_request_path)
end
@@ -374,7 +388,7 @@ RSpec.describe TodosHelper do
end
context 'member access requested' do
- context 'when source is group' do
+ context 'when target is group' do
it 'returns group access message' do
group_todo.action = Todo::MEMBER_ACCESS_REQUESTED
@@ -383,6 +397,14 @@ RSpec.describe TodosHelper do
)
end
end
+
+ context 'when target is project' do
+ it 'returns project access message' do
+ expect(helper.todo_action_name(project_access_request_todo)).to eq(
+ format(s_("Todos|has requested access to project %{which}"), which: _(project.name))
+ )
+ end
+ end
end
end
diff --git a/spec/helpers/url_helper_spec.rb b/spec/helpers/url_helper_spec.rb
new file mode 100644
index 00000000000..7955a41b63a
--- /dev/null
+++ b/spec/helpers/url_helper_spec.rb
@@ -0,0 +1,25 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe UrlHelper, feature_category: :integrations do
+ describe '#escaped_url' do
+ it 'escapes url' do
+ expect(helper.escaped_url('https://example.com?param=test value')).to eq('https://example.com?param=test%20value')
+ end
+
+ it 'escapes XSS injection' do
+ expect(helper.escaped_url('https://example.com?injected_here"+eval(1)+"'))
+ .to eq('https://example.com?injected_here%22+eval(1)+%22')
+ end
+
+ it 'returns nil if url is nil' do
+ expect(helper.escaped_url(nil)).to be_nil
+ end
+
+ it 'returns nil when url is invalid' do
+ expect(helper.escaped_url('https://?&*^invalid-url'))
+ .to be_nil
+ end
+ end
+end
diff --git a/spec/helpers/users/callouts_helper_spec.rb b/spec/helpers/users/callouts_helper_spec.rb
index 170ae098a2f..a43a73edd53 100644
--- a/spec/helpers/users/callouts_helper_spec.rb
+++ b/spec/helpers/users/callouts_helper_spec.rb
@@ -92,81 +92,32 @@ RSpec.describe Users::CalloutsHelper do
end
end
- describe '.show_registration_enabled_user_callout?' do
+ describe '.show_registration_enabled_user_callout?', :do_not_mock_admin_mode_setting do
let_it_be(:admin) { create(:user, :admin) }
subject { helper.show_registration_enabled_user_callout? }
- context 'when on gitlab.com' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(true)
- allow(helper).to receive(:current_user).and_return(admin)
- stub_application_setting(signup_enabled: true)
- allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
- allow(helper.controller).to receive(:controller_path).and_return("admin/users")
- end
-
- it { is_expected.to be false }
- end
-
- context 'when `current_user` is not an admin' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(false)
- allow(helper).to receive(:current_user).and_return(user)
- stub_application_setting(signup_enabled: true)
- allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
- allow(helper.controller).to receive(:controller_path).and_return("admin/users")
- end
-
- it { is_expected.to be false }
- end
-
- context 'when signup is disabled' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(false)
- allow(helper).to receive(:current_user).and_return(admin)
- stub_application_setting(signup_enabled: false)
- allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
- allow(helper.controller).to receive(:controller_path).and_return("admin/users")
- end
+ using RSpec::Parameterized::TableSyntax
- it { is_expected.to be false }
- end
-
- context 'when user has dismissed callout' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(false)
- allow(helper).to receive(:current_user).and_return(admin)
- stub_application_setting(signup_enabled: true)
- allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { true }
- allow(helper.controller).to receive(:controller_path).and_return("admin/users")
- end
-
- it { is_expected.to be false }
+ where(:gitlab_com, :current_user, :signup_enabled, :user_dismissed, :controller_path, :expected_result) do
+ false | ref(:admin) | true | false | 'admin/users' | true
+ true | ref(:admin) | true | false | 'admin/users' | false
+ false | ref(:user) | true | false | 'admin/users' | false
+ false | ref(:admin) | false | false | 'admin/users' | false
+ false | ref(:admin) | true | true | 'admin/users' | false
+ false | ref(:admin) | true | false | 'projects/issues' | false
end
- context 'when controller path is not allowed' do
+ with_them do
before do
- allow(::Gitlab).to receive(:com?).and_return(false)
- allow(helper).to receive(:current_user).and_return(admin)
- stub_application_setting(signup_enabled: true)
- allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
- allow(helper.controller).to receive(:controller_path).and_return("projects/issues")
+ allow(::Gitlab).to receive(:com?).and_return(gitlab_com)
+ allow(helper).to receive(:current_user).and_return(current_user)
+ stub_application_setting(signup_enabled: signup_enabled)
+ allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { user_dismissed }
+ allow(helper.controller).to receive(:controller_path).and_return(controller_path)
end
- it { is_expected.to be false }
- end
-
- context 'when not gitlab.com, `current_user` is an admin, signup is enabled, user has not dismissed callout, and controller path is allowed' do
- before do
- allow(::Gitlab).to receive(:com?).and_return(false)
- allow(helper).to receive(:current_user).and_return(admin)
- stub_application_setting(signup_enabled: true)
- allow(helper).to receive(:user_dismissed?).with(described_class::REGISTRATION_ENABLED_CALLOUT) { false }
- allow(helper.controller).to receive(:controller_path).and_return("admin/users")
- end
-
- it { is_expected.to be true }
+ it { is_expected.to be expected_result }
end
end
@@ -190,7 +141,7 @@ RSpec.describe Users::CalloutsHelper do
end
end
- describe '.show_security_newsletter_user_callout?' do
+ describe '.show_security_newsletter_user_callout?', :do_not_mock_admin_mode_setting do
let_it_be(:admin) { create(:user, :admin) }
subject { helper.show_security_newsletter_user_callout? }
diff --git a/spec/helpers/version_check_helper_spec.rb b/spec/helpers/version_check_helper_spec.rb
index 2bb85e7b6b8..c76eb08820a 100644
--- a/spec/helpers/version_check_helper_spec.rb
+++ b/spec/helpers/version_check_helper_spec.rb
@@ -49,19 +49,26 @@ RSpec.describe VersionCheckHelper do
describe '#show_security_patch_upgrade_alert?' do
describe 'return conditions' do
- where(:show_version_check, :gitlab_version_check, :result) do
+ where(:feature_enabled, :show_version_check, :gitlab_version_check, :result) do
[
- [false, nil, false],
- [false, { "severity" => "success" }, false],
- [false, { "severity" => "danger" }, false],
- [true, nil, false],
- [true, { "severity" => "success" }, false],
- [true, { "severity" => "danger" }, true]
+ [false, false, nil, false],
+ [false, false, { "severity" => "success" }, false],
+ [false, false, { "severity" => "danger" }, false],
+ [false, true, nil, false],
+ [false, true, { "severity" => "success" }, false],
+ [false, true, { "severity" => "danger" }, false],
+ [true, false, nil, false],
+ [true, false, { "severity" => "success" }, false],
+ [true, false, { "severity" => "danger" }, false],
+ [true, true, nil, false],
+ [true, true, { "severity" => "success" }, false],
+ [true, true, { "severity" => "danger" }, true]
]
end
with_them do
before do
+ stub_feature_flags(critical_security_alert: feature_enabled)
allow(helper).to receive(:show_version_check?).and_return(show_version_check)
allow(helper).to receive(:gitlab_version_check).and_return(gitlab_version_check)
end
diff --git a/spec/lib/api/entities/basic_project_details_spec.rb b/spec/lib/api/entities/basic_project_details_spec.rb
index 8419eb0a932..425252ea315 100644
--- a/spec/lib/api/entities/basic_project_details_spec.rb
+++ b/spec/lib/api/entities/basic_project_details_spec.rb
@@ -2,14 +2,16 @@
require 'spec_helper'
-RSpec.describe API::Entities::BasicProjectDetails do
- let_it_be(:project) { create(:project) }
-
- let(:current_user) { project.first_owner }
+RSpec.describe API::Entities::BasicProjectDetails, feature_category: :api do
+ let_it_be(:project_with_repository_restriction) { create(:project, :public, :repository_private) }
+ let(:member_user) { project_with_repository_restriction.first_owner }
subject(:output) { described_class.new(project, current_user: current_user).as_json }
describe '#default_branch' do
+ let(:current_user) { member_user }
+ let(:project) { project_with_repository_restriction }
+
it 'delegates to Project#default_branch_or_main' do
expect(project).to receive(:default_branch_or_main).twice.and_call_original
@@ -20,7 +22,42 @@ RSpec.describe API::Entities::BasicProjectDetails do
let(:current_user) { nil }
it 'is not included' do
- expect(output.keys).not_to include(:default_branch)
+ expect(output).not_to include(:default_branch)
+ end
+ end
+ end
+
+ describe '#readme_url #forks_count' do
+ using RSpec::Parameterized::TableSyntax
+ let_it_be(:non_member_user) { create(:user) } # Creates a fresh user that is why it is not the member of the project
+
+ context 'public project with repository is accessible by the user' do
+ let_it_be(:project_without_restriction) { create(:project, :public) }
+
+ where(:current_user, :project) do
+ ref(:member_user) | ref(:project_without_restriction)
+ ref(:non_member_user) | ref(:project_without_restriction)
+ nil | ref(:project_without_restriction)
+ ref(:member_user) | ref(:project_with_repository_restriction)
+ end
+
+ with_them do
+ it 'exposes readme_url and forks_count' do
+ expect(output).to include readme_url: project.readme_url, forks_count: project.forks_count
+ end
+ end
+ end
+
+ context 'public project with repository is not accessible by the user' do
+ where(:current_user, :project) do
+ ref(:non_member_user) | ref(:project_with_repository_restriction)
+ nil | ref(:project_with_repository_restriction)
+ end
+
+ with_them do
+ it 'does not expose readme_url and forks_count' do
+ expect(output).not_to include :readme_url, :forks_count
+ end
end
end
end
diff --git a/spec/lib/api/entities/bulk_imports/entity_spec.rb b/spec/lib/api/entities/bulk_imports/entity_spec.rb
index 4de85862ab9..ba8a2ddffcb 100644
--- a/spec/lib/api/entities/bulk_imports/entity_spec.rb
+++ b/spec/lib/api/entities/bulk_imports/entity_spec.rb
@@ -21,7 +21,8 @@ RSpec.describe API::Entities::BulkImports::Entity do
:project_id,
:created_at,
:updated_at,
- :failures
+ :failures,
+ :migrate_projects
)
end
end
diff --git a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
index d5a37f53e21..db8f106c9fe 100644
--- a/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
+++ b/spec/lib/api/entities/ml/mlflow/run_info_spec.rb
@@ -33,6 +33,20 @@ RSpec.describe API::Entities::Ml::Mlflow::RunInfo do
end
end
+ describe 'run_name' do
+ context 'when nil' do
+ it { is_expected.not_to have_key(:run_name) }
+ end
+
+ context 'when not nil' do
+ before do
+ allow(candidate).to receive(:name).and_return('hello')
+ end
+
+ it { expect(subject[:run_name]).to eq('hello') }
+ end
+ end
+
describe 'experiment_id' do
it 'is the experiment iid as string' do
expect(subject[:experiment_id]).to eq(candidate.experiment.iid.to_s)
diff --git a/spec/lib/api/helpers/members_helpers_spec.rb b/spec/lib/api/helpers/members_helpers_spec.rb
new file mode 100644
index 00000000000..987d5ba9f6c
--- /dev/null
+++ b/spec/lib/api/helpers/members_helpers_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe API::Helpers::MembersHelpers, feature_category: :subgroups do
+ let(:helper) do
+ Class.new.include(described_class).new
+ end
+
+ describe '#source_members' do
+ subject(:source_members) { helper.source_members(source) }
+
+ shared_examples_for 'returns all direct members' do
+ specify do
+ expect(source_members).to match_array(direct_members)
+ end
+ end
+
+ context 'for a group' do
+ let_it_be(:source) { create(:group) }
+ let_it_be(:direct_members) { create_list(:group_member, 2, group: source) }
+
+ it_behaves_like 'returns all direct members'
+ it_behaves_like 'query with source filters'
+
+ context 'when project_members_index_by_project_namespace feature flag is disabled' do
+ before do
+ stub_feature_flags(project_members_index_by_project_namespace: false)
+ end
+
+ it_behaves_like 'returns all direct members'
+ it_behaves_like 'query with source filters'
+ end
+ end
+
+ context 'for a project' do
+ let_it_be(:source) { create(:project, group: create(:group)) }
+ let_it_be(:direct_members) { create_list(:project_member, 2, project: source) }
+
+ it_behaves_like 'returns all direct members'
+ it_behaves_like 'query without source filters'
+
+ context 'when project_members_index_by_project_namespace feature flag is disabled' do
+ before do
+ stub_feature_flags(project_members_index_by_project_namespace: false)
+ end
+
+ it_behaves_like 'returns all direct members'
+ it_behaves_like 'query with source filters'
+ end
+ end
+ end
+end
diff --git a/spec/lib/api/helpers/packages_helpers_spec.rb b/spec/lib/api/helpers/packages_helpers_spec.rb
index a3b21059334..de9d139a7b6 100644
--- a/spec/lib/api/helpers/packages_helpers_spec.rb
+++ b/spec/lib/api/helpers/packages_helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Helpers::PackagesHelpers do
+RSpec.describe API::Helpers::PackagesHelpers, feature_category: :package_registry do
let_it_be(:helper) { Class.new.include(API::Helpers).include(described_class).new }
let_it_be(:project) { create(:project) }
let_it_be(:group) { create(:group) }
@@ -17,6 +17,31 @@ RSpec.describe API::Helpers::PackagesHelpers do
expect(subject).to eq nil
end
+
+ context 'with an allowed required permission' do
+ subject { helper.authorize_packages_access!(project, :read_group) }
+
+ it 'authorizes packages access' do
+ expect(helper).to receive(:require_packages_enabled!)
+ expect(helper).not_to receive(:authorize_read_package!)
+ expect(helper).to receive(:authorize!).with(:read_group, project)
+
+ expect(subject).to eq nil
+ end
+ end
+
+ context 'with a not allowed permission' do
+ subject { helper.authorize_packages_access!(project, :read_permission) }
+
+ it 'rejects packages access' do
+ expect(helper).to receive(:require_packages_enabled!)
+ expect(helper).not_to receive(:authorize_read_package!)
+ expect(helper).not_to receive(:authorize!).with(:test_permission, project)
+ expect(helper).to receive(:forbidden!)
+
+ expect(subject).to eq nil
+ end
+ end
end
describe 'authorize_read_package!' do
@@ -32,7 +57,7 @@ RSpec.describe API::Helpers::PackagesHelpers do
it 'calls authorize! with correct subject' do
expect(helper).to receive(:authorize!).with(:read_package, have_attributes(id: subject.id, class: expected_class))
- expect(helper.send('authorize_read_package!', subject)).to eq nil
+ expect(helper.send(:authorize_read_package!, subject)).to eq nil
end
end
end
diff --git a/spec/lib/api/helpers/pagination_strategies_spec.rb b/spec/lib/api/helpers/pagination_strategies_spec.rb
index 16cc10182b0..f6e8e3cc756 100644
--- a/spec/lib/api/helpers/pagination_strategies_spec.rb
+++ b/spec/lib/api/helpers/pagination_strategies_spec.rb
@@ -43,6 +43,14 @@ RSpec.describe API::Helpers::PaginationStrategies do
expect(result).to eq(return_value)
end
+
+ context "with paginator_params" do
+ it 'correctly passes multiple parameters' do
+ expect(paginator).to receive(:paginate).with(relation, parameter_one: true, parameter_two: 'two')
+
+ subject.paginate_with_strategies(relation, nil, paginator_params: { parameter_one: true, parameter_two: 'two' })
+ end
+ end
end
describe '#paginator' do
diff --git a/spec/lib/api/helpers/rate_limiter_spec.rb b/spec/lib/api/helpers/rate_limiter_spec.rb
index 3640c7e30e7..531140a32a3 100644
--- a/spec/lib/api/helpers/rate_limiter_spec.rb
+++ b/spec/lib/api/helpers/rate_limiter_spec.rb
@@ -31,8 +31,8 @@ RSpec.describe API::Helpers::RateLimiter do
end
describe '#check_rate_limit!' do
- it 'calls ApplicationRateLimiter#throttled? with the right arguments' do
- expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled?).with(key, scope: scope).and_return(false)
+ it 'calls ApplicationRateLimiter#throttled_request? with the right arguments' do
+ expect(::Gitlab::ApplicationRateLimiter).to receive(:throttled_request?).with(request, user, key, scope: scope).and_return(false)
expect(subject).not_to receive(:render_api_error!)
subject.check_rate_limit!(key, scope: scope)
diff --git a/spec/lib/api/helpers_spec.rb b/spec/lib/api/helpers_spec.rb
index d24a3bd13c0..a0f5ee1ea95 100644
--- a/spec/lib/api/helpers_spec.rb
+++ b/spec/lib/api/helpers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Helpers do
+RSpec.describe API::Helpers, feature_category: :not_owned do
using RSpec::Parameterized::TableSyntax
subject(:helper) { Class.new.include(described_class).new }
@@ -11,7 +11,7 @@ RSpec.describe API::Helpers do
include Rack::Test::Methods
let(:user) { build(:user, id: 42) }
-
+ let(:request) { instance_double(Rack::Request) }
let(:helper) do
Class.new(Grape::API::Instance) do
helpers API::APIGuard::HelperMethods
@@ -797,12 +797,13 @@ RSpec.describe API::Helpers do
describe '#present_artifacts_file!' do
context 'with object storage' do
let(:artifact) { create(:ci_job_artifact, :zip, :remote_store) }
+ let(:is_head_request) { false }
subject { helper.present_artifacts_file!(artifact.file) }
before do
allow(helper).to receive(:env).and_return({})
-
+ allow(helper).to receive(:request).and_return(instance_double(Rack::Request, head?: is_head_request))
stub_artifacts_object_storage(enabled: true)
end
@@ -814,6 +815,18 @@ RSpec.describe API::Helpers do
subject
end
+
+ context 'requested with HEAD' do
+ let(:is_head_request) { true }
+
+ it 'redirects to a CDN-fronted URL' do
+ expect(helper).to receive(:redirect)
+ expect(helper).to receive(:signed_head_url).and_call_original
+ expect(Gitlab::ApplicationContext).to receive(:push).with(artifact: artifact.file.model).and_call_original
+
+ subject
+ end
+ end
end
end
diff --git a/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb b/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb
index 89c85489aea..c14193660e9 100644
--- a/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb
+++ b/spec/lib/atlassian/jira_connect/jwt/asymmetric_spec.rb
@@ -90,7 +90,7 @@ RSpec.describe Atlassian::JiraConnect::Jwt::Asymmetric, feature_category: :integ
it { is_expected.not_to be_valid }
end
- context 'with jira_connect_proxy_url setting' do
+ context 'with jira_connect_proxy_url setting', :aggregate_failures do
let(:stub_asymmetric_jwt_cdn) { 'https://example.com/-/jira_connect/public_keys' }
let(:jira_connect_proxy_url_setting) { 'https://example.com' }
@@ -101,6 +101,19 @@ RSpec.describe Atlassian::JiraConnect::Jwt::Asymmetric, feature_category: :integ
expect(WebMock).to have_requested(:get, "https://example.com/-/jira_connect/public_keys/#{public_key_id}")
end
+
+ context 'when the setting is an empty string', :aggregate_failures do
+ let(:jira_connect_proxy_url_setting) { '' }
+ let(:stub_asymmetric_jwt_cdn) { 'https://connect-install-keys.atlassian.com' }
+
+ it 'requests the default CDN' do
+ expect(JWT).to receive(:decode).twice.and_call_original
+
+ expect(asymmetric_jwt).to be_valid
+
+ expect(WebMock).to have_requested(:get, install_keys_url)
+ end
+ end
end
end
diff --git a/spec/lib/banzai/filter/inline_observability_filter_spec.rb b/spec/lib/banzai/filter/inline_observability_filter_spec.rb
index 341ada6d2b5..fb1ba46e76c 100644
--- a/spec/lib/banzai/filter/inline_observability_filter_spec.rb
+++ b/spec/lib/banzai/filter/inline_observability_filter_spec.rb
@@ -7,27 +7,49 @@ RSpec.describe Banzai::Filter::InlineObservabilityFilter do
let(:input) { %(<a href="#{url}">example</a>) }
let(:doc) { filter(input) }
+ let(:group) { create(:group) }
+ let(:user) { create(:user) }
- context 'when the document has an external link' do
- let(:url) { 'https://foo.com' }
+ describe '#filter?' do
+ context 'when the document has an external link' do
+ let(:url) { 'https://foo.com' }
- it 'leaves regular non-observability links unchanged' do
- expect(doc.to_s).to eq(input)
+ it 'leaves regular non-observability links unchanged' do
+ expect(doc.to_s).to eq(input)
+ end
end
- end
- context 'when the document contains an embeddable observability link' do
- let(:url) { 'https://observe.gitlab.com/12345' }
+ context 'when the document contains an embeddable observability link' do
+ let(:url) { 'https://observe.gitlab.com/12345' }
+
+ it 'leaves the original link unchanged' do
+ expect(doc.at_css('a').to_s).to eq(input)
+ end
+
+ it 'appends an observability charts placeholder' do
+ node = doc.at_css('.js-render-observability')
- it 'leaves the original link unchanged' do
- expect(doc.at_css('a').to_s).to eq(input)
+ expect(node).to be_present
+ expect(node.attribute('data-frame-url').to_s).to eq(url)
+ end
end
- it 'appends a observability charts placeholder' do
- node = doc.at_css('.js-render-observability')
+ context 'when feature flag is disabled' do
+ let(:url) { 'https://observe.gitlab.com/12345' }
+
+ before do
+ stub_feature_flags(observability_group_tab: false)
+ end
+
+ it 'leaves the original link unchanged' do
+ expect(doc.at_css('a').to_s).to eq(input)
+ end
+
+ it 'does not append an observability charts placeholder' do
+ node = doc.at_css('.js-render-observability')
- expect(node).to be_present
- expect(node.attribute('data-frame-url').to_s).to eq(url)
+ expect(node).not_to be_present
+ end
end
end
end
diff --git a/spec/lib/banzai/filter/math_filter_spec.rb b/spec/lib/banzai/filter/math_filter_spec.rb
index c5d2bcd5363..374983e40a1 100644
--- a/spec/lib/banzai/filter/math_filter_spec.rb
+++ b/spec/lib/banzai/filter/math_filter_spec.rb
@@ -2,14 +2,15 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::MathFilter do
+RSpec.describe Banzai::Filter::MathFilter, feature_category: :team_planning do
using RSpec::Parameterized::TableSyntax
include FilterSpecHelper
shared_examples 'inline math' do
it 'removes surrounding dollar signs and adds class code, math and js-render-math' do
- doc = filter(text)
- expected = result_template.gsub('<math>', '<code class="code math js-render-math" data-math-style="inline">')
+ doc = pipeline_filter(text)
+
+ expected = result_template.gsub('<math>', '<code data-math-style="inline" class="code math js-render-math">')
expected.gsub!('</math>', '</code>')
expect(doc.to_s).to eq expected
@@ -17,12 +18,12 @@ RSpec.describe Banzai::Filter::MathFilter do
end
shared_examples 'display math' do
- let_it_be(:template_prefix_with_pre) { '<pre class="code math js-render-math" data-math-style="display"><code>' }
- let_it_be(:template_prefix_with_code) { '<code class="code math js-render-math" data-math-style="display">' }
+ let_it_be(:template_prefix_with_pre) { '<pre lang="math" data-math-style="display" class="js-render-math"><code>' }
+ let_it_be(:template_prefix_with_code) { '<code data-math-style="display" class="code math js-render-math">' }
let(:use_pre_tags) { false }
it 'removes surrounding dollar signs and adds class code, math and js-render-math' do
- doc = filter(text)
+ doc = pipeline_filter(text)
template_prefix = use_pre_tags ? template_prefix_with_pre : template_prefix_with_code
template_suffix = "</code>#{'</pre>' if use_pre_tags}"
@@ -36,36 +37,38 @@ RSpec.describe Banzai::Filter::MathFilter do
describe 'inline math using $...$ syntax' do
context 'with valid syntax' do
where(:text, :result_template) do
- '$2+2$' | '<math>2+2</math>'
- '$22+1$ and $22 + a^2$' | '<math>22+1</math> and <math>22 + a^2</math>'
- '$22 and $2+2$' | '$22 and <math>2+2</math>'
- '$2+2$ $22 and flightjs/Flight$22 $2+2$' | '<math>2+2</math> $22 and flightjs/Flight$22 <math>2+2</math>'
- '$1/2$ &lt;b&gt;test&lt;/b&gt;' | '<math>1/2</math> &lt;b&gt;test&lt;/b&gt;'
- '$a!$' | '<math>a!</math>'
- '$x$' | '<math>x</math>'
+ '$2+2$' | '<p><math>2+2</math></p>'
+ '$22+1$ and $22 + a^2$' | '<p><math>22+1</math> and <math>22 + a^2</math></p>'
+ '$22 and $2+2$' | '<p>$22 and <math>2+2</math></p>'
+ '$2+2$ $22 and flightjs/Flight$22 $2+2$' | '<p><math>2+2</math> $22 and flightjs/Flight$22 <math>2+2</math></p>'
+ '$1/2$ &lt;b&gt;test&lt;/b&gt;' | '<p><math>1/2</math> &lt;b&gt;test&lt;/b&gt;</p>'
+ '$a!$' | '<p><math>a!</math></p>'
+ '$x$' | '<p><math>x</math></p>'
+ '$1+2\$$' | '<p><math>1+2\$</math></p>'
+ '$1+\$2$' | '<p><math>1+\$2</math></p>'
+ '$1+\%2$' | '<p><math>1+\%2</math></p>'
+ '$1+\#2$' | '<p><math>1+\#2</math></p>'
+ '$1+\&2$' | '<p><math>1+\&amp;2</math></p>'
+ '$1+\{2$' | '<p><math>1+\{2</math></p>'
+ '$1+\}2$' | '<p><math>1+\}2</math></p>'
+ '$1+\_2$' | '<p><math>1+\_2</math></p>'
end
with_them do
it_behaves_like 'inline math'
end
end
-
- it 'does not handle dollar literals properly' do
- doc = filter('$20+30\$$')
- expected = '<code class="code math js-render-math" data-math-style="inline">20+30\\</code>$'
-
- expect(doc.to_s).to eq expected
- end
end
describe 'inline math using $`...`$ syntax' do
context 'with valid syntax' do
where(:text, :result_template) do
- '$<code>2+2</code>$' | '<math>2+2</math>'
- '$<code>22+1</code>$ and $<code>22 + a^2</code>$' | '<math>22+1</math> and <math>22 + a^2</math>'
- '$22 and $<code>2+2</code>$' | '$22 and <math>2+2</math>'
- '$<code>2+2</code>$ $22 and flightjs/Flight$22 $<code>2+2</code>$' | '<math>2+2</math> $22 and flightjs/Flight$22 <math>2+2</math>'
- 'test $$<code>2+2</code>$$ test' | 'test $<math>2+2</math>$ test'
+ '$`2+2`$' | '<p><math>2+2</math></p>'
+ '$`22+1`$ and $`22 + a^2`$' | '<p><math>22+1</math> and <math>22 + a^2</math></p>'
+ '$22 and $`2+2`$' | '<p>$22 and <math>2+2</math></p>'
+ '$`2+2`$ $22 and flightjs/Flight$22 $`2+2`$' | '<p><math>2+2</math> $22 and flightjs/Flight$22 <math>2+2</math></p>'
+ 'test $$`2+2`$$ test' | '<p>test $<math>2+2</math>$ test</p>'
+ '$`1+\$2`$' | '<p><math>1+\$2</math></p>'
end
with_them do
@@ -77,15 +80,15 @@ RSpec.describe Banzai::Filter::MathFilter do
describe 'inline display math using $$...$$ syntax' do
context 'with valid syntax' do
where(:text, :result_template) do
- '$$2+2$$' | '<math>2+2</math>'
- '$$ 2+2 $$' | '<math>2+2</math>'
- '$$22+1$$ and $$22 + a^2$$' | '<math>22+1</math> and <math>22 + a^2</math>'
- '$22 and $$2+2$$' | '$22 and <math>2+2</math>'
- '$$2+2$$ $22 and flightjs/Flight$22 $$2+2$$' | '<math>2+2</math> $22 and flightjs/Flight$22 <math>2+2</math>'
- 'flightjs/Flight$22 and $$a^2 + b^2 = c^2$$' | 'flightjs/Flight$22 and <math>a^2 + b^2 = c^2</math>'
- '$$a!$$' | '<math>a!</math>'
- '$$x$$' | '<math>x</math>'
- '$$20,000 and $$30,000' | '<math>20,000 and</math>30,000'
+ '$$2+2$$' | '<p><math>2+2</math></p>'
+ '$$ 2+2 $$' | '<p><math>2+2</math></p>'
+ '$$22+1$$ and $$22 + a^2$$' | '<p><math>22+1</math> and <math>22 + a^2</math></p>'
+ '$22 and $$2+2$$' | '<p>$22 and <math>2+2</math></p>'
+ '$$2+2$$ $22 and flightjs/Flight$22 $$2+2$$' | '<p><math>2+2</math> $22 and flightjs/Flight$22 <math>2+2</math></p>'
+ 'flightjs/Flight$22 and $$a^2 + b^2 = c^2$$' | '<p>flightjs/Flight$22 and <math>a^2 + b^2 = c^2</math></p>'
+ '$$a!$$' | '<p><math>a!</math></p>'
+ '$$x$$' | '<p><math>x</math></p>'
+ '$$20,000 and $$30,000' | '<p><math>20,000 and</math>30,000</p>'
end
with_them do
@@ -97,8 +100,8 @@ RSpec.describe Banzai::Filter::MathFilter do
describe 'block display math using $$\n...\n$$ syntax' do
context 'with valid syntax' do
where(:text, :result_template) do
- "$$\n2+2\n$$" | "<math>2+2</math>"
- "$$\n2+2\n3+4\n$$" | "<math>2+2\n3+4</math>"
+ "$$\n2+2\n$$" | "<math>2+2\n</math>"
+ "$$\n2+2\n3+4\n$$" | "<math>2+2\n3+4\n</math>"
end
with_them do
@@ -107,72 +110,96 @@ RSpec.describe Banzai::Filter::MathFilter do
end
end
end
+
+ context 'when it spans multiple lines' do
+ let(:math) do
+ <<~MATH
+ \\begin{align*}
+ \\Delta t \\frac{d(b_i, a_i)}{c} + \\Delta t_{b_i}
+ \\end{align*}
+ MATH
+ end
+
+ let(:text) { "$$\n#{math}$$" }
+ let(:result_template) { "<math>#{math}</math>" }
+
+ it_behaves_like 'display math' do
+ let(:use_pre_tags) { true }
+ end
+ end
+
+ context 'when it contains \\' do
+ let(:math) do
+ <<~MATH
+ E = mc^2 \\\\
+ E = \\$mc^2
+ MATH
+ end
+
+ let(:text) { "$$\n#{math}$$" }
+ let(:result_template) { "<math>#{math}</math>" }
+
+ it_behaves_like 'display math' do
+ let(:use_pre_tags) { true }
+ end
+ end
end
describe 'display math using ```math...``` syntax' do
it 'adds data-math-style display attribute to display math' do
- doc = filter('<pre lang="math"><code>2+2</code></pre>')
+ doc = pipeline_filter("```math\n2+2\n```")
pre = doc.xpath('descendant-or-self::pre').first
expect(pre['data-math-style']).to eq 'display'
end
it 'adds js-render-math class to display math' do
- doc = filter('<pre lang="math"><code>2+2</code></pre>')
+ doc = pipeline_filter("```math\n2+2\n```")
pre = doc.xpath('descendant-or-self::pre').first
expect(pre[:class]).to include("js-render-math")
end
it 'ignores code blocks that are not math' do
- input = '<pre lang="plaintext"><code>2+2</code></pre>'
- doc = filter(input)
+ input = "```plaintext\n2+2\n```"
+ doc = pipeline_filter(input)
- expect(doc.to_s).to eq input
+ expect(doc.to_s).to eq "<pre lang=\"plaintext\"><code>2+2\n</code></pre>"
end
it 'requires the pre to contain both code and math' do
input = '<pre lang="math">something</pre>'
- doc = filter(input)
+ doc = pipeline_filter(input)
expect(doc.to_s).to eq input
end
-
- it 'dollar signs around to display math' do
- doc = filter('$<pre lang="math"><code>2+2</code></pre>$')
- before = doc.xpath('descendant-or-self::text()[1]').first
- after = doc.xpath('descendant-or-self::text()[3]').first
-
- expect(before.to_s).to eq '$'
- expect(after.to_s).to eq '$'
- end
end
describe 'unrecognized syntax' do
- where(:text) do
- [
- '<code>2+2</code>',
- 'test $<code>2+2</code> test',
- 'test <code>2+2</code>$ test',
- '<em>$</em><code>2+2</code><em>$</em>',
- '$20,000 and $30,000',
- '$20,000 in $USD',
- '$ a^2 $',
- "test $$\n2+2\n$$",
- "$\n$",
- '$$$'
- ]
+ where(:text, :result) do
+ '`2+2`' | '<p><code>2+2</code></p>'
+ 'test $`2+2` test' | '<p>test $<code>2+2</code> test</p>'
+ 'test `2+2`$ test' | '<p>test <code>2+2</code>$ test</p>'
+ '$20,000 and $30,000' | '<p>$20,000 and $30,000</p>'
+ '$20,000 in $USD' | '<p>$20,000 in $USD</p>'
+ '$ a^2 $' | '<p>$ a^2 $</p>'
+ "test $$\n2+2\n$$" | "<p>test $$\n2+2\n$$</p>"
+ "$\n$" | "<p>$\n$</p>"
+ '$$$' | '<p>$$$</p>'
+ '`$1+2$`' | '<p><code>$1+2$</code></p>'
+ '`$$1+2$$`' | '<p><code>$$1+2$$</code></p>'
+ '`$\$1+2$$`' | '<p><code>$\$1+2$$</code></p>'
end
with_them do
it 'is ignored' do
- expect(filter(text).to_s).to eq text
+ expect(pipeline_filter(text).to_s).to eq result
end
end
end
it 'handles multiple styles in one text block' do
- doc = filter('$<code>2+2</code>$ + $3+3$ + $$4+4$$')
+ doc = pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$')
expect(doc.search('.js-render-math').count).to eq(3)
expect(doc.search('[data-math-style="inline"]').count).to eq(2)
@@ -182,15 +209,17 @@ RSpec.describe Banzai::Filter::MathFilter do
it 'limits how many elements can be marked as math' do
stub_const('Banzai::Filter::MathFilter::RENDER_NODES_LIMIT', 2)
- doc = filter('$<code>2+2</code>$ + $<code>3+3</code>$ + $<code>4+4</code>$')
+ doc = pipeline_filter('$`2+2`$ + $3+3$ + $$4+4$$')
expect(doc.search('.js-render-math').count).to eq(2)
end
- it 'does not recognize new syntax when feature flag is off' do
- stub_feature_flags(markdown_dollar_math: false)
- doc = filter('$1+2$')
+ def pipeline_filter(text)
+ context = { project: nil, no_sourcepos: true }
+ doc = Banzai::Pipeline::PreProcessPipeline.call(text, {})
+ doc = Banzai::Pipeline::PlainMarkdownPipeline.call(doc[:output], context)
+ doc = Banzai::Filter::SanitizationFilter.call(doc[:output], context, nil)
- expect(doc.to_s).to eq '$1+2$'
+ filter(doc)
end
end
diff --git a/spec/lib/banzai/filter/references/reference_filter_spec.rb b/spec/lib/banzai/filter/references/reference_filter_spec.rb
index 6d7396ef216..88404f2039d 100644
--- a/spec/lib/banzai/filter/references/reference_filter_spec.rb
+++ b/spec/lib/banzai/filter/references/reference_filter_spec.rb
@@ -189,9 +189,9 @@ RSpec.describe Banzai::Filter::References::ReferenceFilter do
let(:filter) { described_class.new(document, project: project) }
it 'updates all new nodes', :aggregate_failures do
- filter.instance_variable_set('@nodes', nodes)
+ filter.instance_variable_set(:@nodes, nodes)
- expect(filter).to receive(:call) { filter.instance_variable_set('@new_nodes', new_nodes) }
+ expect(filter).to receive(:call) { filter.instance_variable_set(:@new_nodes, new_nodes) }
expect(filter).to receive(:with_update_nodes).and_call_original
expect(filter).to receive(:update_nodes!).and_call_original
@@ -212,7 +212,7 @@ RSpec.describe Banzai::Filter::References::ReferenceFilter do
expect_next_instance_of(described_class) do |filter|
expect(filter).to receive(:call_and_update_nodes).and_call_original
expect(filter).to receive(:with_update_nodes).and_call_original
- expect(filter).to receive(:call) { filter.instance_variable_set('@new_nodes', new_nodes) }
+ expect(filter).to receive(:call) { filter.instance_variable_set(:@new_nodes, new_nodes) }
expect(filter).to receive(:update_nodes!).and_call_original
end
diff --git a/spec/lib/banzai/filter/repository_link_filter_spec.rb b/spec/lib/banzai/filter/repository_link_filter_spec.rb
index 0df680dc0c8..b2162ea2756 100644
--- a/spec/lib/banzai/filter/repository_link_filter_spec.rb
+++ b/spec/lib/banzai/filter/repository_link_filter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Filter::RepositoryLinkFilter do
+RSpec.describe Banzai::Filter::RepositoryLinkFilter, feature_category: :team_planning do
include RepoHelpers
def filter(doc, contexts = {})
@@ -303,6 +303,12 @@ RSpec.describe Banzai::Filter::RepositoryLinkFilter do
expect(doc.at_css('img')['src']).to eq "/#{project_path}/-/raw/#{Addressable::URI.escape(ref)}/#{escaped}"
end
+ it 'supports percent sign in filenames' do
+ doc = filter(link('doc/api/README%.md'))
+ expect(doc.at_css('a')['href'])
+ .to eq "/#{project_path}/-/blob/#{ref}/doc/api/README%25.md"
+ end
+
context 'when requested path is a file in the repo' do
let(:requested_path) { 'doc/api/README.md' }
diff --git a/spec/lib/banzai/filter/service_desk_upload_link_filter_spec.rb b/spec/lib/banzai/filter/service_desk_upload_link_filter_spec.rb
new file mode 100644
index 00000000000..08d6fe03606
--- /dev/null
+++ b/spec/lib/banzai/filter/service_desk_upload_link_filter_spec.rb
@@ -0,0 +1,86 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Filter::ServiceDeskUploadLinkFilter, feature_category: :service_desk do
+ def filter(doc, contexts = {})
+ described_class.call(doc, contexts)
+ end
+
+ def link(path, text)
+ %(<a href="#{path}">#{text}</a>)
+ end
+
+ let(:file_name) { 'test.jpg' }
+ let(:secret) { 'e90decf88d8f96fe9e1389afc2e4a91f' }
+ let(:upload_path) { "/uploads/#{secret}/#{file_name}" }
+ let(:html_link) { link(upload_path, file_name) }
+
+ context 'when replace_upload_links enabled' do
+ context 'when it has only one attachment to replace' do
+ let(:contexts) { { uploads_as_attachments: ["#{secret}/#{file_name}"] } }
+
+ context 'when filename in text is same as in link' do
+ it 'replaces the link with original filename in strong' do
+ doc = filter(html_link, contexts)
+
+ expect(doc.at_css('a')).to be_nil
+ expect(doc.at_css('strong').text).to eq(file_name)
+ end
+ end
+
+ context 'when filename in text is not same as in link' do
+ let(:filename_in_text) { 'Custom name' }
+ let(:html_link) { link(upload_path, filename_in_text) }
+
+ it 'replaces the link with filename in text & original filename, in strong' do
+ doc = filter(html_link, contexts)
+
+ expect(doc.at_css('a')).to be_nil
+ expect(doc.at_css('strong').text).to eq("#{filename_in_text} (#{file_name})")
+ end
+ end
+ end
+
+ context 'when it has more than one attachment to replace' do
+ let(:file_name_1) { 'test1.jpg' }
+ let(:secret_1) { '17817c73e368777e6f743392e334fb8a' }
+ let(:upload_path_1) { "/uploads/#{secret_1}/#{file_name_1}" }
+ let(:html_link_1) { link(upload_path_1, file_name_1) }
+
+ context 'when all of uploads can be replaced' do
+ let(:contexts) { { uploads_as_attachments: ["#{secret}/#{file_name}", "#{secret_1}/#{file_name_1}"] } }
+
+ it 'replaces all links with original filename in strong' do
+ doc = filter("#{html_link} #{html_link_1}", contexts)
+
+ expect(doc.at_css('a')).to be_nil
+ expect(doc.at_css("strong:contains('#{file_name}')")).not_to be_nil
+ expect(doc.at_css("strong:contains('#{file_name_1}')")).not_to be_nil
+ end
+ end
+
+ context 'when not all of uploads can be replaced' do
+ let(:contexts) { { uploads_as_attachments: ["#{secret}/#{file_name}"] } }
+
+ it 'replaces only specific links with original filename in strong' do
+ doc = filter("#{html_link} #{html_link_1}", contexts)
+
+ expect(doc.at_css("strong:contains('#{file_name}')")).not_to be_nil
+ expect(doc.at_css("a:contains('#{file_name_1}')")).not_to be_nil
+ end
+ end
+ end
+ end
+
+ context 'when uploads_as_attachments is empty' do
+ let(:contexts) { { uploads_as_attachments: [] } }
+
+ it 'does not replaces the link' do
+ doc = filter(html_link, contexts)
+
+ expect(doc.at_css('a')).not_to be_nil
+ expect(doc.at_css('a')['href']).to eq upload_path
+ end
+ end
+end
diff --git a/spec/lib/banzai/pipeline/full_pipeline_spec.rb b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
index 1a0f5a53a23..c1d5f16b562 100644
--- a/spec/lib/banzai/pipeline/full_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/full_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Banzai::Pipeline::FullPipeline do
+RSpec.describe Banzai::Pipeline::FullPipeline, feature_category: :team_planning do
describe 'References' do
let(:project) { create(:project, :public) }
let(:issue) { create(:issue, project: project) }
@@ -164,7 +164,7 @@ RSpec.describe Banzai::Pipeline::FullPipeline do
markdown = '_@test\__'
output = described_class.to_html(markdown, project: project)
- expect(output).to include('<em>@test_</em>')
+ expect(output).to include('<em>@test<span>_</span></em>')
end
end
diff --git a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
index 536f2a67415..0e4a4e4492e 100644
--- a/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
+++ b/spec/lib/banzai/pipeline/plain_markdown_pipeline_spec.rb
@@ -2,24 +2,25 @@
require 'spec_helper'
-RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
+RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline, feature_category: :team_planning do
using RSpec::Parameterized::TableSyntax
describe 'backslash escapes', :aggregate_failures do
let_it_be(:project) { create(:project, :public) }
let_it_be(:issue) { create(:issue, project: project) }
- it 'converts all reference punctuation to literals' do
- reference_chars = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS
- markdown = reference_chars.split('').map { |char| char.prepend("\\") }.join
- punctuation = Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.split('')
- punctuation = punctuation.delete_if { |char| char == '&' }
- punctuation << '&amp;'
+ it 'converts all escapable punctuation to literals' do
+ markdown = Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.pluck(:escaped).join
result = described_class.call(markdown, project: project)
output = result[:output].to_html
- punctuation.each { |char| expect(output).to include("<span>#{char}</span>") }
+ Banzai::Filter::MarkdownPreEscapeFilter::ESCAPABLE_CHARS.pluck(:char).each do |char|
+ char = '&amp;' if char == '&'
+
+ expect(output).to include("<span>#{char}</span>")
+ end
+
expect(result[:escaped_literals]).to be_truthy
end
@@ -33,12 +34,12 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
end.compact
reference_chars.all? do |char|
- Banzai::Filter::MarkdownPreEscapeFilter::REFERENCE_CHARACTERS.include?(char)
+ Banzai::Filter::MarkdownPreEscapeFilter::TARGET_CHARS.include?(char)
end
end
- it 'does not convert non-reference punctuation to spans' do
- markdown = %q(\"\'\*\+\,\-\.\/\:\;\<\=\>\?\[\]\_\`\{\|\}) + %q[\(\)\\\\]
+ it 'does not convert non-reference/latex punctuation to spans' do
+ markdown = %q(\"\'\*\+\,\-\.\/\:\;\<\=\>\?\[\]\`\|) + %q[\(\)\\\\]
result = described_class.call(markdown, project: project)
output = result[:output].to_html
@@ -55,11 +56,12 @@ RSpec.describe Banzai::Pipeline::PlainMarkdownPipeline do
expect(result[:escaped_literals]).to be_falsey
end
- describe 'backslash escapes do not work in code blocks, code spans, autolinks, or raw HTML' do
+ describe 'backslash escapes are untouched in code blocks, code spans, autolinks, or raw HTML' do
where(:markdown, :expected) do
%q(`` \@\! ``) | %q(<code>\@\!</code>)
%q( \@\!) | %Q(<code>\\@\\!\n</code>)
%Q(~~~\n\\@\\!\n~~~) | %Q(<code>\\@\\!\n</code>)
+ %q($1+\$2$) | %q(<code data-math-style="inline">1+\\$2</code>)
%q(<http://example.com?find=\@>) | %q(<a href="http://example.com?find=%5C@">http://example.com?find=\@</a>)
%q[<a href="/bar\@)">] | %q[<a href="/bar%5C@)">]
end
diff --git a/spec/lib/banzai/pipeline/service_desk_email_pipeline_spec.rb b/spec/lib/banzai/pipeline/service_desk_email_pipeline_spec.rb
new file mode 100644
index 00000000000..83541494f68
--- /dev/null
+++ b/spec/lib/banzai/pipeline/service_desk_email_pipeline_spec.rb
@@ -0,0 +1,16 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Banzai::Pipeline::ServiceDeskEmailPipeline, feature_category: :service_desk do
+ describe '.filters' do
+ it 'returns the expected type' do
+ expect(described_class.filters).to be_kind_of(Banzai::FilterArray)
+ end
+
+ it 'excludes ServiceDeskUploadLinkFilter' do
+ expect(described_class.filters).not_to be_empty
+ expect(described_class.filters).to include(Banzai::Filter::ServiceDeskUploadLinkFilter)
+ end
+ end
+end
diff --git a/spec/lib/bulk_imports/clients/http_spec.rb b/spec/lib/bulk_imports/clients/http_spec.rb
index 4fb08fc0478..780f61f8c61 100644
--- a/spec/lib/bulk_imports/clients/http_spec.rb
+++ b/spec/lib/bulk_imports/clients/http_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Clients::HTTP do
+RSpec.describe BulkImports::Clients::HTTP, feature_category: :importers do
include ImportSpecHelper
let(:url) { 'http://gitlab.example' }
@@ -22,12 +22,6 @@ RSpec.describe BulkImports::Clients::HTTP do
)
end
- before do
- allow(Gitlab::HTTP).to receive(:get)
- .with('http://gitlab.example/api/v4/version', anything)
- .and_return(metadata_response)
- end
-
subject { described_class.new(url: url, token: token) }
shared_examples 'performs network request' do
@@ -39,7 +33,7 @@ RSpec.describe BulkImports::Clients::HTTP do
context 'error handling' do
context 'when error occurred' do
- it 'raises BulkImports::Error' do
+ it 'raises BulkImports::NetworkError' do
allow(Gitlab::HTTP).to receive(method).and_raise(Errno::ECONNREFUSED)
expect { subject.public_send(method, resource) }.to raise_exception(BulkImports::NetworkError)
@@ -47,7 +41,7 @@ RSpec.describe BulkImports::Clients::HTTP do
end
context 'when response is not success' do
- it 'raises BulkImports::Error' do
+ it 'raises BulkImports::NetworkError' do
response_double = double(code: 503, success?: false, parsed_response: 'Error', request: double(path: double(path: '/test')))
allow(Gitlab::HTTP).to receive(method).and_return(response_double)
@@ -210,33 +204,153 @@ RSpec.describe BulkImports::Clients::HTTP do
describe '#instance_version' do
it 'returns version as an instance of Gitlab::VersionInfo' do
+ response = { version: version }
+
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token')
+ .to_return(status: 200, body: response.to_json, headers: { 'Content-Type' => 'application/json' })
+
expect(subject.instance_version).to eq(Gitlab::VersionInfo.parse(version))
end
context 'when /version endpoint is not available' do
it 'requests /metadata endpoint' do
- response_double = double(code: 404, success?: false, parsed_response: 'Not Found', request: double(path: double(path: '/version')))
-
- allow(Gitlab::HTTP).to receive(:get)
- .with('http://gitlab.example/api/v4/version', anything)
- .and_return(response_double)
+ response = { version: version }
- expect(Gitlab::HTTP).to receive(:get)
- .with('http://gitlab.example/api/v4/metadata', anything)
- .and_return(metadata_response)
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(status: 200, body: response.to_json, headers: { 'Content-Type' => 'application/json' })
expect(subject.instance_version).to eq(Gitlab::VersionInfo.parse(version))
end
+
+ context 'when /metadata endpoint returns a 401' do
+ it 'raises a BulkImports:Error' do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(status: 401, body: "", headers: { 'Content-Type' => 'application/json' })
+
+ expect { subject.instance_version }.to raise_exception(BulkImports::Error,
+ "Import aborted as the provided personal access token does not have the required 'api' scope or " \
+ "is no longer valid.")
+ end
+ end
+
+ context 'when /metadata endpoint returns a 403' do
+ it 'raises a BulkImports:Error' do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(status: 403, body: "", headers: { 'Content-Type' => 'application/json' })
+
+ expect { subject.instance_version }.to raise_exception(BulkImports::Error,
+ "Import aborted as the provided personal access token does not have the required 'api' scope or " \
+ "is no longer valid.")
+ end
+ end
+
+ context 'when /metadata endpoint returns a 404' do
+ it 'raises a BulkImports:Error' do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(status: 404, body: "", headers: { 'Content-Type' => 'application/json' })
+
+ expect { subject.instance_version }.to raise_exception(BulkImports::Error, 'Import aborted as it was not possible to connect to the provided GitLab instance URL.')
+ end
+ end
+
+ context 'when /metadata endpoint returns any other BulkImports::NetworkError' do
+ it 'raises a BulkImports:NetworkError' do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(status: 418, body: "", headers: { 'Content-Type' => 'application/json' })
+
+ expect { subject.instance_version }.to raise_exception(BulkImports::NetworkError)
+ end
+ end
+ end
+ end
+
+ describe '#validate_instance_version!' do
+ before do
+ allow(subject).to receive(:instance_version).and_return(source_version)
+ end
+
+ context 'when instance version is greater than or equal to the minimum major version' do
+ let(:source_version) { Gitlab::VersionInfo.new(14) }
+
+ it { expect(subject.validate_instance_version!).to eq(true) }
+ end
+
+ context 'when instance version is less than the minimum major version' do
+ let(:source_version) { Gitlab::VersionInfo.new(13, 10, 0) }
+
+ it { expect { subject.validate_instance_version! }.to raise_exception(BulkImports::Error) }
+ end
+ end
+
+ describe '#validate_import_scopes!' do
+ context 'when the source_version is < 15.5' do
+ let(:source_version) { Gitlab::VersionInfo.new(15, 0) }
+
+ it 'skips validation' do
+ allow(subject).to receive(:instance_version).and_return(source_version)
+
+ expect(subject.validate_import_scopes!).to eq(true)
+ end
+ end
+
+ context 'when source version is 15.5 or higher' do
+ let(:source_version) { Gitlab::VersionInfo.new(15, 6) }
+
+ before do
+ allow(subject).to receive(:instance_version).and_return(source_version)
+ end
+
+ context 'when an HTTP error is raised' do
+ let(:response) { { enterprise: false } }
+
+ it 'raises BulkImports::NetworkError' do
+ stub_request(:get, 'http://gitlab.example/api/v4/personal_access_tokens/self?private_token=token')
+ .to_return(status: 404)
+
+ expect { subject.validate_import_scopes! }.to raise_exception(BulkImports::NetworkError)
+ end
+ end
+
+ context 'when scopes are valid' do
+ it 'returns true' do
+ stub_request(:get, 'http://gitlab.example/api/v4/personal_access_tokens/self?private_token=token')
+ .to_return(status: 200, body: { 'scopes' => ['api'] }.to_json, headers: { 'Content-Type' => 'application/json' })
+
+ expect(subject.validate_import_scopes!).to eq(true)
+ end
+ end
+
+ context 'when scopes are invalid' do
+ it 'raises a BulkImports error' do
+ stub_request(:get, 'http://gitlab.example/api/v4/personal_access_tokens/self?private_token=token')
+ .to_return(status: 200, body: { 'scopes' => ['read_user'] }.to_json, headers: { 'Content-Type' => 'application/json' })
+
+ expect(subject.instance_version).to eq(Gitlab::VersionInfo.parse(source_version))
+ expect { subject.validate_import_scopes! }.to raise_exception(BulkImports::Error)
+ end
+ end
end
end
describe '#instance_enterprise' do
+ let(:response) { { enterprise: false } }
+
+ before do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token')
+ .to_return(status: 200, body: response.to_json, headers: { 'Content-Type' => 'application/json' })
+ end
+
it 'returns source instance enterprise information' do
expect(subject.instance_enterprise).to eq(false)
end
context 'when enterprise information is missing' do
- let(:enterprise) { nil }
+ let(:response) { {} }
it 'defaults to true' do
expect(subject.instance_enterprise).to eq(true)
@@ -245,14 +359,20 @@ RSpec.describe BulkImports::Clients::HTTP do
end
describe '#compatible_for_project_migration?' do
+ before do
+ allow(subject).to receive(:instance_version).and_return(Gitlab::VersionInfo.parse(version))
+ end
+
context 'when instance version is lower the the expected minimum' do
+ let(:version) { '14.3.0' }
+
it 'returns false' do
expect(subject.compatible_for_project_migration?).to be false
end
end
context 'when instance version is at least the expected minimum' do
- let(:version) { "14.4.4" }
+ let(:version) { '14.4.4' }
it 'returns true' do
expect(subject.compatible_for_project_migration?).to be true
@@ -260,18 +380,6 @@ RSpec.describe BulkImports::Clients::HTTP do
end
end
- context 'when source instance is incompatible' do
- let(:version) { '13.0.0' }
-
- it 'raises an error' do
- expect { subject.get(resource) }
- .to raise_error(
- ::BulkImports::Error,
- "Unsupported GitLab Version. Minimum Supported Gitlab Version #{BulkImport::MIN_MAJOR_VERSION}."
- )
- end
- end
-
context 'when url is relative' do
let(:url) { 'http://website.example/gitlab' }
diff --git a/spec/lib/bulk_imports/groups/stage_spec.rb b/spec/lib/bulk_imports/groups/stage_spec.rb
index 528d65615b1..cc772f07d21 100644
--- a/spec/lib/bulk_imports/groups/stage_spec.rb
+++ b/spec/lib/bulk_imports/groups/stage_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Groups::Stage do
+RSpec.describe BulkImports::Groups::Stage, feature_category: :importers do
let(:ancestor) { create(:group) }
let(:group) { build(:group, parent: ancestor) }
let(:bulk_import) { build(:bulk_import) }
@@ -77,6 +77,28 @@ RSpec.describe BulkImports::Groups::Stage do
)
end
+ describe 'migrate projects flag' do
+ context 'when true' do
+ it 'includes project entities pipeline' do
+ entity.update!(migrate_projects: true)
+
+ expect(described_class.new(entity).pipelines).to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
+ end
+ end
+
+ context 'when false' do
+ it 'does not include project entities pipeline' do
+ entity.update!(migrate_projects: false)
+
+ expect(described_class.new(entity).pipelines).not_to include(
+ hash_including({ pipeline: BulkImports::Groups::Pipelines::ProjectEntitiesPipeline })
+ )
+ end
+ end
+ end
+
context 'when feature flag is enabled on root ancestor level' do
it 'includes project entities pipeline' do
stub_feature_flags(bulk_import_projects: ancestor)
diff --git a/spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb b/spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb
index 6450d90ec0f..69cf80f92c5 100644
--- a/spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb
+++ b/spec/lib/bulk_imports/groups/transformers/subgroup_to_entity_transformer_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe BulkImports::Groups::Transformers::SubgroupToEntityTransformer do
describe "#transform" do
it "transforms subgroups data in entity params" do
parent = create(:group)
- parent_entity = instance_double(BulkImports::Entity, group: parent, id: 1)
+ parent_entity = instance_double(BulkImports::Entity, group: parent, id: 1, migrate_projects: false)
context = instance_double(BulkImports::Pipeline::Context, entity: parent_entity)
subgroup_data = {
"path" => "sub-group",
@@ -18,7 +18,8 @@ RSpec.describe BulkImports::Groups::Transformers::SubgroupToEntityTransformer do
source_full_path: "parent/sub-group",
destination_name: "sub-group",
destination_namespace: parent.full_path,
- parent_id: 1
+ parent_id: 1,
+ migrate_projects: false
)
end
end
diff --git a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
index 4320d5dc119..ecb3c8fe76d 100644
--- a/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
+++ b/spec/lib/bulk_imports/projects/pipelines/project_attributes_pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline do
+RSpec.describe BulkImports::Projects::Pipelines::ProjectAttributesPipeline, :with_license do
let_it_be(:project) { create(:project) }
let_it_be(:bulk_import) { create(:bulk_import) }
let_it_be(:entity) { create(:bulk_import_entity, :project_entity, project: project, bulk_import: bulk_import) }
diff --git a/spec/lib/event_filter_spec.rb b/spec/lib/event_filter_spec.rb
index bab48796b8c..16612a6288c 100644
--- a/spec/lib/event_filter_spec.rb
+++ b/spec/lib/event_filter_spec.rb
@@ -3,6 +3,29 @@
require 'spec_helper'
RSpec.describe EventFilter do
+ let_it_be(:public_project) { create(:project, :public) }
+ let_it_be(:push_event) { create(:push_event, project: public_project) }
+ let_it_be(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
+ let_it_be(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) }
+ let_it_be(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
+ let_it_be(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
+ let_it_be(:left_event) { create(:event, :left, project: public_project, target: public_project) }
+ let_it_be(:wiki_page_event) { create(:wiki_page_event) }
+ let_it_be(:wiki_page_update_event) { create(:wiki_page_event, :updated) }
+ let_it_be(:design_event) { create(:design_event) }
+
+ let_it_be(:work_item_event) do
+ create(:event,
+ :created,
+ project: public_project,
+ target: create(:work_item, :task, project: public_project),
+ target_type: 'WorkItem'
+ )
+ end
+
describe '#filter' do
it 'returns "all" if given filter is nil' do
expect(described_class.new(nil).filter).to eq(described_class::ALL)
@@ -18,20 +41,6 @@ RSpec.describe EventFilter do
end
describe '#apply_filter' do
- let_it_be(:public_project) { create(:project, :public) }
- let_it_be(:push_event) { create(:push_event, project: public_project) }
- let_it_be(:merged_event) { create(:event, :merged, project: public_project, target: public_project) }
- let_it_be(:created_event) { create(:event, :created, project: public_project, target: create(:issue, project: public_project)) }
- let_it_be(:updated_event) { create(:event, :updated, project: public_project, target: create(:issue, project: public_project)) }
- let_it_be(:closed_event) { create(:event, :closed, project: public_project, target: create(:issue, project: public_project)) }
- let_it_be(:reopened_event) { create(:event, :reopened, project: public_project, target: create(:issue, project: public_project)) }
- let_it_be(:comments_event) { create(:event, :commented, project: public_project, target: public_project) }
- let_it_be(:joined_event) { create(:event, :joined, project: public_project, target: public_project) }
- let_it_be(:left_event) { create(:event, :left, project: public_project, target: public_project) }
- let_it_be(:wiki_page_event) { create(:wiki_page_event) }
- let_it_be(:wiki_page_update_event) { create(:wiki_page_event, :updated) }
- let_it_be(:design_event) { create(:design_event) }
-
let(:filtered_events) { described_class.new(filter).apply_filter(Event.all) }
context 'with the "push" filter' do
@@ -53,8 +62,14 @@ RSpec.describe EventFilter do
context 'with the "issue" filter' do
let(:filter) { described_class::ISSUE }
- it 'filters issue events only' do
- expect(filtered_events).to contain_exactly(created_event, updated_event, closed_event, reopened_event)
+ it 'filters issue and work item events only' do
+ expect(filtered_events).to contain_exactly(
+ created_event,
+ updated_event,
+ closed_event,
+ reopened_event,
+ work_item_event
+ )
end
end
@@ -115,6 +130,31 @@ RSpec.describe EventFilter do
end
end
+ describe '#in_operator_query_builder_params' do
+ let(:filtered_events) { described_class.new(filter).in_operator_query_builder_params(array_data) }
+ let(:array_data) do
+ {
+ scope_ids: [public_project.id],
+ scope_model: Project,
+ mapping_column: 'project_id'
+ }
+ end
+
+ context 'with the "issue" filter' do
+ let(:filter) { described_class::ISSUE }
+
+ it 'also includes work item events' do
+ expect(filtered_events[:scope]).to contain_exactly(
+ created_event,
+ updated_event,
+ closed_event,
+ reopened_event,
+ work_item_event
+ )
+ end
+ end
+ end
+
describe '#active?' do
let(:event_filter) { described_class.new(described_class::TEAM) }
diff --git a/spec/lib/gitlab/application_rate_limiter_spec.rb b/spec/lib/gitlab/application_rate_limiter_spec.rb
index 41e79f811fa..c938393adce 100644
--- a/spec/lib/gitlab/application_rate_limiter_spec.rb
+++ b/spec/lib/gitlab/application_rate_limiter_spec.rb
@@ -214,6 +214,52 @@ RSpec.describe Gitlab::ApplicationRateLimiter, :clean_gitlab_redis_rate_limiting
end
end
+ describe '.throttled_request?', :freeze_time do
+ let(:request) { instance_double('Rack::Request') }
+
+ context 'when request is not over the limit' do
+ it 'returns false and does not log the request' do
+ expect(subject).not_to receive(:log_request)
+
+ expect(subject.throttled_request?(request, user, :test_action, scope: [user])).to eq(false)
+ end
+ end
+
+ context 'when request is over the limit' do
+ before do
+ subject.throttled?(:test_action, scope: [user])
+ end
+
+ it 'returns true and logs the request' do
+ expect(subject).to receive(:log_request).with(request, :test_action_request_limit, user)
+
+ expect(subject.throttled_request?(request, user, :test_action, scope: [user])).to eq(true)
+ end
+
+ context 'when the bypass header is set' do
+ before do
+ allow(Gitlab::Throttle).to receive(:bypass_header).and_return('SOME_HEADER')
+ end
+
+ it 'skips rate limit if set to "1"' do
+ allow(request).to receive(:get_header).with(Gitlab::Throttle.bypass_header).and_return('1')
+
+ expect(subject).not_to receive(:log_request)
+
+ expect(subject.throttled_request?(request, user, :test_action, scope: [user])).to eq(false)
+ end
+
+ it 'does not skip rate limit if set to something else than "1"' do
+ allow(request).to receive(:get_header).with(Gitlab::Throttle.bypass_header).and_return('0')
+
+ expect(subject).to receive(:log_request).with(request, :test_action_request_limit, user)
+
+ expect(subject.throttled_request?(request, user, :test_action, scope: [user])).to eq(true)
+ end
+ end
+ end
+ end
+
describe '.peek' do
it 'peeks at the current state without changing its value' do
freeze_time do
diff --git a/spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb b/spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb
index ca6b91ac6f1..a303634d463 100644
--- a/spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb
+++ b/spec/lib/gitlab/auth/atlassian/identity_linker_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe Gitlab::Auth::Atlassian::IdentityLinker do
let(:credentials) do
{
token: SecureRandom.alphanumeric(1254),
- refresh_token: SecureRandom.alphanumeric(45),
+ refresh_token: SecureRandom.alphanumeric(1500),
expires_at: 2.weeks.from_now.to_i,
expires: true
}
diff --git a/spec/lib/gitlab/auth/o_auth/user_spec.rb b/spec/lib/gitlab/auth/o_auth/user_spec.rb
index bb81621ec92..beeb3ca7011 100644
--- a/spec/lib/gitlab/auth/o_auth/user_spec.rb
+++ b/spec/lib/gitlab/auth/o_auth/user_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Auth::OAuth::User do
+RSpec.describe Gitlab::Auth::OAuth::User, feature_category: :authentication_and_authorization do
include LdapHelpers
let(:oauth_user) { described_class.new(auth_hash) }
@@ -329,7 +329,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do
context "and no LDAP provider defined" do
before do
- stub_ldap_config(providers: [])
+ allow(Gitlab::Auth::Ldap::Config).to receive(:providers).at_least(:once).and_return([])
end
include_examples "to verify compliance with allow_single_sign_on"
@@ -509,6 +509,8 @@ RSpec.describe Gitlab::Auth::OAuth::User do
context "and no corresponding LDAP person" do
before do
allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_uid).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_email).and_return(nil)
+ allow(Gitlab::Auth::Ldap::Person).to receive(:find_by_dn).and_return(nil)
end
include_examples "to verify compliance with allow_single_sign_on"
@@ -935,7 +937,7 @@ RSpec.describe Gitlab::Auth::OAuth::User do
end
it "does not update the user location" do
- expect(gl_user.location).to be_nil
+ expect(gl_user.location).to be_blank
expect(gl_user.user_synced_attributes_metadata.location_synced).to be(false)
end
end
diff --git a/spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb b/spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb
deleted file mode 100644
index b50a55a9e41..00000000000
--- a/spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb
+++ /dev/null
@@ -1,49 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::BackgroundMigration::AddPrimaryEmailToEmailsIfUserConfirmed do
- let(:users) { table(:users) }
- let(:emails) { table(:emails) }
-
- let!(:unconfirmed_user) { users.create!(name: 'unconfirmed', email: 'unconfirmed@example.com', confirmed_at: nil, projects_limit: 100) }
- let!(:confirmed_user_1) { users.create!(name: 'confirmed-1', email: 'confirmed-1@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:confirmed_user_2) { users.create!(name: 'confirmed-2', email: 'confirmed-2@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:email) { emails.create!(user_id: confirmed_user_1.id, email: 'confirmed-1@example.com', confirmed_at: 1.day.ago) }
-
- let(:perform) { described_class.new.perform(users.first.id, users.last.id) }
-
- it 'adds the primary email of confirmed users to Emails, unless already added', :aggregate_failures do
- expect(emails.where(email: [unconfirmed_user.email, confirmed_user_2.email])).to be_empty
-
- expect { perform }.not_to raise_error
-
- expect(emails.where(email: unconfirmed_user.email).count).to eq(0)
- expect(emails.where(email: confirmed_user_1.email, user_id: confirmed_user_1.id).count).to eq(1)
- expect(emails.where(email: confirmed_user_2.email, user_id: confirmed_user_2.id).count).to eq(1)
-
- email_2 = emails.find_by(email: confirmed_user_2.email, user_id: confirmed_user_2.id)
- expect(email_2.confirmed_at).to eq(confirmed_user_2.reload.confirmed_at)
- end
-
- it 'sets timestamps on the created Emails' do
- perform
-
- email_2 = emails.find_by(email: confirmed_user_2.email, user_id: confirmed_user_2.id)
-
- expect(email_2.created_at).not_to be_nil
- expect(email_2.updated_at).not_to be_nil
- end
-
- context 'when a range of IDs is specified' do
- let!(:confirmed_user_3) { users.create!(name: 'confirmed-3', email: 'confirmed-3@example.com', confirmed_at: 1.hour.ago, projects_limit: 100) }
- let!(:confirmed_user_4) { users.create!(name: 'confirmed-4', email: 'confirmed-4@example.com', confirmed_at: 1.hour.ago, projects_limit: 100) }
-
- it 'only acts on the specified range of IDs', :aggregate_failures do
- expect do
- described_class.new.perform(confirmed_user_2.id, confirmed_user_3.id)
- end.to change { Email.count }.by(2)
- expect(emails.where(email: confirmed_user_4.email).count).to eq(0)
- end
- end
-end
diff --git a/spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb b/spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb
new file mode 100644
index 00000000000..7075d4694ae
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_admin_mode_scope_for_personal_access_tokens_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillAdminModeScopeForPersonalAccessTokens,
+ :migration, schema: 20221228103133, feature_category: :authentication_and_authorization do
+ let(:users) { table(:users) }
+ let(:personal_access_tokens) { table(:personal_access_tokens) }
+
+ let(:admin) { users.create!(name: 'admin', email: 'admin@example.com', projects_limit: 1, admin: true) }
+ let(:user) { users.create!(name: 'user', email: 'user@example.com', projects_limit: 1) }
+
+ let!(:pat_admin_1) { personal_access_tokens.create!(name: 'admin 1', user_id: admin.id, scopes: "---\n- api\n") }
+ let!(:pat_user) { personal_access_tokens.create!(name: 'user 1', user_id: user.id, scopes: "---\n- api\n") }
+ let!(:pat_revoked) do
+ personal_access_tokens.create!(name: 'admin 2', user_id: admin.id, scopes: "---\n- api\n", revoked: true)
+ end
+
+ let!(:pat_expired) do
+ personal_access_tokens.create!(name: 'admin 3', user_id: admin.id, scopes: "---\n- api\n", expires_at: 1.day.ago)
+ end
+
+ let!(:pat_admin_mode) do
+ personal_access_tokens.create!(name: 'admin 4', user_id: admin.id, scopes: "---\n- admin_mode\n")
+ end
+
+ let!(:pat_admin_2) { personal_access_tokens.create!(name: 'admin 5', user_id: admin.id, scopes: "---\n- read_api\n") }
+ let!(:pat_not_in_range) { personal_access_tokens.create!(name: 'admin 6', user_id: admin.id, scopes: "---\n- api\n") }
+
+ subject do
+ described_class.new(
+ start_id: pat_admin_1.id,
+ end_id: pat_admin_2.id,
+ batch_table: :personal_access_tokens,
+ batch_column: :id,
+ sub_batch_size: 1,
+ pause_ms: 0,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ it "adds `admin_mode` scope to active personal access tokens of administrators" do
+ subject.perform
+
+ expect(pat_admin_1.reload.scopes).to eq("---\n- api\n- admin_mode\n")
+ expect(pat_user.reload.scopes).to eq("---\n- api\n")
+ expect(pat_revoked.reload.scopes).to eq("---\n- api\n")
+ expect(pat_expired.reload.scopes).to eq("---\n- api\n")
+ expect(pat_admin_mode.reload.scopes).to eq("---\n- admin_mode\n")
+ expect(pat_admin_2.reload.scopes).to eq("---\n- read_api\n- admin_mode\n")
+ expect(pat_not_in_range.reload.scopes).to eq("---\n- api\n")
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
index 8db45ac0f57..96adea03d43 100644
--- a/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_jira_tracker_deployment_type2_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20210301200959 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillJiraTrackerDeploymentType2, :migration, schema: 20210602155110 do
let!(:jira_integration_temp) { described_class::JiraServiceTemp }
let!(:jira_tracker_data_temp) { described_class::JiraTrackerDataTemp }
let!(:atlassian_host) { 'https://api.atlassian.net' }
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
index 35928deff82..15956d2ea80 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_children_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210506065000 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsChildren, :migration, schema: 20210602155110 do
let(:namespaces_table) { table(:namespaces) }
let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
diff --git a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
index 96e43275972..019c6d54068 100644
--- a/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_namespace_traversal_ids_roots_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210506065000 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillNamespaceTraversalIdsRoots, :migration, schema: 20210602155110 do
let(:namespaces_table) { table(:namespaces) }
let!(:user_namespace) { namespaces_table.create!(id: 1, name: 'user', path: 'user', type: nil) }
diff --git a/spec/lib/gitlab/background_migration/backfill_releases_author_id_spec.rb b/spec/lib/gitlab/background_migration/backfill_releases_author_id_spec.rb
new file mode 100644
index 00000000000..d8ad10849f2
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/backfill_releases_author_id_spec.rb
@@ -0,0 +1,61 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::BackgroundMigration::BackfillReleasesAuthorId,
+ :migration, schema: 20221215151822, feature_category: :release_orchestration do
+ let(:releases_table) { table(:releases) }
+ let(:user_table) { table(:users) }
+ let(:date_time) { DateTime.now }
+
+ let!(:test_user) { user_table.create!(name: 'test', email: 'test@example.com', username: 'test', projects_limit: 10) }
+ let!(:ghost_user) do
+ user_table.create!(name: 'ghost', email: 'ghost@example.com',
+ username: 'ghost', user_type: User::USER_TYPES['ghost'], projects_limit: 100000)
+ end
+
+ let(:migration) do
+ described_class.new(start_id: 1, end_id: 100,
+ batch_table: :releases, batch_column: :id,
+ sub_batch_size: 10, pause_ms: 0,
+ job_arguments: [ghost_user.id],
+ connection: ApplicationRecord.connection)
+ end
+
+ subject(:perform_migration) { migration.perform }
+
+ before do
+ releases_table.create!(tag: 'tag1', name: 'tag1',
+ released_at: (date_time - 1.minute), author_id: test_user.id)
+ releases_table.create!(tag: 'tag2', name: 'tag2',
+ released_at: (date_time - 2.minutes), author_id: test_user.id)
+ releases_table.new(tag: 'tag3', name: 'tag3',
+ released_at: (date_time - 3.minutes), author_id: nil).save!(validate: false)
+ releases_table.new(tag: 'tag4', name: 'tag4',
+ released_at: (date_time - 4.minutes), author_id: nil).save!(validate: false)
+ releases_table.new(tag: 'tag5', name: 'tag5',
+ released_at: (date_time - 5.minutes), author_id: nil).save!(validate: false)
+ releases_table.create!(tag: 'tag6', name: 'tag6',
+ released_at: (date_time - 6.minutes), author_id: test_user.id)
+ releases_table.new(tag: 'tag7', name: 'tag7',
+ released_at: (date_time - 7.minutes), author_id: nil).save!(validate: false)
+ end
+
+ it 'backfills `author_id` for the selected records', :aggregate_failures do
+ expect(releases_table.where(author_id: ghost_user.id).count).to eq 0
+ expect(releases_table.where(author_id: nil).count).to eq 4
+
+ perform_migration
+
+ expect(releases_table.where(author_id: ghost_user.id).count).to eq 4
+ expect(releases_table.where(author_id: ghost_user.id).pluck(:name)).to include('tag3', 'tag4', 'tag5', 'tag7')
+ expect(releases_table.where(author_id: test_user.id).count).to eq 3
+ expect(releases_table.where(author_id: test_user.id).pluck(:name)).to include('tag1', 'tag2', 'tag6')
+ end
+
+ it 'tracks timings of queries' do
+ expect(migration.batch_metrics.timings).to be_empty
+
+ expect { perform_migration }.to change { migration.batch_metrics.timings }
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
index 1c2e0e991d9..8d5aa6236a7 100644
--- a/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
+++ b/spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 2021_03_13_045845 do
+RSpec.describe Gitlab::BackgroundMigration::BackfillSnippetRepositories, :migration, schema: 20210602155110 do
let(:gitlab_shell) { Gitlab::Shell.new }
let(:users) { table(:users) }
let(:snippets) { table(:snippets) }
diff --git a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
index 7280ca0b58e..faaaccfdfaf 100644
--- a/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
+++ b/spec/lib/gitlab/background_migration/batched_migration_job_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Gitlab::BackgroundMigration::BatchedMigrationJob do
expect(generic_instance.send(:batch_table)).to eq('projects')
expect(generic_instance.send(:batch_column)).to eq('id')
- expect(generic_instance.instance_variable_get('@job_arguments')).to eq(%w(x y))
+ expect(generic_instance.instance_variable_get(:@job_arguments)).to eq(%w(x y))
expect(generic_instance.send(:connection)).to eq(connection)
%i(start_id end_id sub_batch_size pause_ms).each do |attr|
diff --git a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
index dd202acc372..0d9d9eb929c 100644
--- a/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
+++ b/spec/lib/gitlab/background_migration/cleanup_orphaned_lfs_objects_projects_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210514063252 do
+RSpec.describe Gitlab::BackgroundMigration::CleanupOrphanedLfsObjectsProjects, schema: 20210602155110 do
let(:lfs_objects_projects) { table(:lfs_objects_projects) }
let(:lfs_objects) { table(:lfs_objects) }
let(:projects) { table(:projects) }
diff --git a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
index ba04f2d20a7..66e16b16270 100644
--- a/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
+++ b/spec/lib/gitlab/background_migration/drop_invalid_vulnerabilities_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210301200959 do
+RSpec.describe Gitlab::BackgroundMigration::DropInvalidVulnerabilities, schema: 20210602155110 do
let!(:background_migration_jobs) { table(:background_migration_jobs) }
let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
let!(:users) { table(:users) }
diff --git a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
index 5495d786a48..4d7c836cff4 100644
--- a/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_project_taggings_context_from_tags_to_topics_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::BackgroundMigration::MigrateProjectTaggingsContextFromTagsToTopics,
- :suppress_gitlab_schemas_validate_connection, schema: 20210511095658 do
+ :suppress_gitlab_schemas_validate_connection, schema: 20210602155110 do
it 'correctly migrates project taggings context from tags to topics' do
taggings = table(:taggings)
diff --git a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
index fc957a7c425..fe45eaac3b7 100644
--- a/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
+++ b/spec/lib/gitlab/background_migration/migrate_u2f_webauthn_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
require 'webauthn/u2f_migrator'
-RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210301200959 do
+RSpec.describe Gitlab::BackgroundMigration::MigrateU2fWebauthn, :migration, schema: 20210602155110 do
let(:users) { table(:users) }
let(:user) { users.create!(email: 'email@email.com', name: 'foo', username: 'foo', projects_limit: 0) }
diff --git a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
index 79b5567f5b3..cafddb6aeaf 100644
--- a/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
+++ b/spec/lib/gitlab/background_migration/move_container_registry_enabled_to_project_feature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210301200959 do
+RSpec.describe Gitlab::BackgroundMigration::MoveContainerRegistryEnabledToProjectFeature, :migration, schema: 20210602155110 do
let(:enabled) { 20 }
let(:disabled) { 0 }
diff --git a/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb b/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb
index c58f2060001..a19a3760958 100644
--- a/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb
+++ b/spec/lib/gitlab/background_migration/sanitize_confidential_todos_spec.rb
@@ -2,7 +2,9 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::SanitizeConfidentialTodos, :migration, schema: 20221110045406 do
+RSpec.describe Gitlab::BackgroundMigration::SanitizeConfidentialTodos, :migration, feature_category: :team_planning do
+ let!(:issue_type_id) { table(:work_item_types).find_by(base_type: 0).id }
+
let(:todos) { table(:todos) }
let(:notes) { table(:notes) }
let(:namespaces) { table(:namespaces) }
@@ -29,12 +31,16 @@ RSpec.describe Gitlab::BackgroundMigration::SanitizeConfidentialTodos, :migratio
let(:issue1) do
issues.create!(
- project_id: project1.id, namespace_id: project_namespace1.id, issue_type: 1, title: 'issue1', author_id: user.id
+ project_id: project1.id, namespace_id: project_namespace1.id, issue_type: 1, title: 'issue1', author_id: user.id,
+ work_item_type_id: issue_type_id
)
end
let(:issue2) do
- issues.create!(project_id: project2.id, namespace_id: project_namespace2.id, issue_type: 1, title: 'issue2')
+ issues.create!(
+ project_id: project2.id, namespace_id: project_namespace2.id, issue_type: 1, title: 'issue2',
+ work_item_type_id: issue_type_id
+ )
end
let(:public_note) { notes.create!(note: 'text', project_id: project1.id) }
diff --git a/spec/lib/gitlab/background_migration/truncate_overlong_vulnerability_html_titles_spec.rb b/spec/lib/gitlab/background_migration/truncate_overlong_vulnerability_html_titles_spec.rb
new file mode 100644
index 00000000000..fcd88d523bc
--- /dev/null
+++ b/spec/lib/gitlab/background_migration/truncate_overlong_vulnerability_html_titles_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+# rubocop:disable Layout/LineLength
+RSpec.describe Gitlab::BackgroundMigration::TruncateOverlongVulnerabilityHtmlTitles, schema: 20221110100602, feature_category: :vulnerability_management do
+ # rubocop:enable Layout/LineLength
+
+ let(:namespaces) { table(:namespaces) }
+ let(:projects) { table(:projects) }
+ let(:vulnerabilities) { table(:vulnerabilities) }
+ let(:users) { table(:users) }
+ let(:namespace) { namespaces.create!(name: 'name', path: 'path') }
+
+ let(:project) do
+ projects
+ .create!(name: "project", path: "project", namespace_id: namespace.id, project_namespace_id: namespace.id)
+ end
+
+ let!(:user) { create_user! }
+
+ let!(:vulnerability_1) { create_vulnerability!(title_html: 'a' * 900, project_id: project.id, author_id: user.id) }
+ let!(:vulnerability_2) { create_vulnerability!(title_html: 'a' * 801, project_id: project.id, author_id: user.id) }
+ let!(:vulnerability_3) { create_vulnerability!(title_html: 'a' * 800, project_id: project.id, author_id: user.id) }
+ let!(:vulnerability_4) { create_vulnerability!(title_html: 'a' * 544, project_id: project.id, author_id: user.id) }
+
+ subject do
+ described_class.new(
+ start_id: vulnerabilities.minimum(:id),
+ end_id: vulnerabilities.maximum(:id),
+ batch_table: :vulnerabilities,
+ batch_column: :id,
+ sub_batch_size: 200,
+ pause_ms: 2.minutes,
+ connection: ApplicationRecord.connection
+ )
+ end
+
+ describe '#perform' do
+ it 'truncates the vulnerability html title when longer than 800 characters' do
+ subject.perform
+
+ expect(vulnerability_1.reload.title_html.length).to eq(800)
+ expect(vulnerability_2.reload.title_html.length).to eq(800)
+ expect(vulnerability_3.reload.title_html.length).to eq(800)
+ expect(vulnerability_4.reload.title_html.length).to eq(544)
+ end
+ end
+
+ private
+
+ # rubocop:disable Metrics/ParameterLists
+ def create_vulnerability!(
+ project_id:, author_id:, title: 'test', title_html: 'test', severity: 7, confidence: 7, report_type: 0, state: 1,
+ dismissed_at: nil
+ )
+ vulnerabilities.create!(
+ project_id: project_id,
+ author_id: author_id,
+ title: title,
+ title_html: title_html,
+ severity: severity,
+ confidence: confidence,
+ report_type: report_type,
+ state: state,
+ dismissed_at: dismissed_at
+ )
+ end
+ # rubocop:enable Metrics/ParameterLists
+
+ def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
+ users.create!(
+ name: name,
+ email: email,
+ username: name,
+ projects_limit: 10
+ )
+ end
+end
diff --git a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
index fc4d776b8be..7261758e010 100644
--- a/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_timelogs_project_id_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210427212034 do
+RSpec.describe Gitlab::BackgroundMigration::UpdateTimelogsProjectId, schema: 20210602155110 do
let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
let!(:project1) { table(:projects).create!(namespace_id: namespace.id) }
let!(:project2) { table(:projects).create!(namespace_id: namespace.id) }
diff --git a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb b/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb
index e14328b6150..4599491b580 100644
--- a/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb
+++ b/spec/lib/gitlab/background_migration/update_users_where_two_factor_auth_required_from_group_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::BackgroundMigration::UpdateUsersWhereTwoFactorAuthRequiredFromGroup, :migration, schema: 20210519154058 do
+RSpec.describe Gitlab::BackgroundMigration::UpdateUsersWhereTwoFactorAuthRequiredFromGroup, :migration, schema: 20210602155110 do
include MigrationHelpers::NamespacesHelpers
let(:group_with_2fa_parent) { create_namespace('parent', Gitlab::VisibilityLevel::PRIVATE, require_two_factor_authentication: true) }
diff --git a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
index c78140a70b3..2dea0aef4cf 100644
--- a/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
+++ b/spec/lib/gitlab/cache/ci/project_pipeline_status_spec.rb
@@ -24,7 +24,7 @@ RSpec.describe Gitlab::Cache::Ci::ProjectPipelineStatus, :clean_gitlab_redis_cac
described_class.load_in_batch_for_projects([project])
# Don't call the accessor that would lazy load the variable
- project_pipeline_status = project.instance_variable_get('@pipeline_status')
+ project_pipeline_status = project.instance_variable_get(:@pipeline_status)
expect(project_pipeline_status).to be_a(described_class)
expect(project_pipeline_status).to be_loaded
diff --git a/spec/lib/gitlab/ci/config/entry/cache_spec.rb b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
index 414cbb169b9..67252eed938 100644
--- a/spec/lib/gitlab/ci/config/entry/cache_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/cache_spec.rb
@@ -16,12 +16,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
let(:policy) { nil }
let(:key) { 'some key' }
let(:when_config) { nil }
+ let(:unprotect) { false }
let(:config) do
{
key: key,
untracked: true,
- paths: ['some/path/']
+ paths: ['some/path/'],
+ unprotect: unprotect
}.tap do |config|
config[:policy] = policy if policy
config[:when] = when_config if when_config
@@ -31,7 +33,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
describe '#value' do
shared_examples 'hash key value' do
it 'returns hash value' do
- expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success')
+ expect(entry.value).to eq(key: key, untracked: true, paths: ['some/path/'], policy: 'pull-push', when: 'on_success', unprotect: false)
end
end
@@ -57,6 +59,14 @@ RSpec.describe Gitlab::Ci::Config::Entry::Cache do
end
end
+ context 'with option `unprotect` specified' do
+ let(:unprotect) { true }
+
+ it 'returns true' do
+ expect(entry.value).to match(a_hash_including(unprotect: true))
+ end
+ end
+
context 'with `policy`' do
where(:policy, :result) do
'pull-push' | 'pull-push'
diff --git a/spec/lib/gitlab/ci/config/entry/job_spec.rb b/spec/lib/gitlab/ci/config/entry/job_spec.rb
index becb46ac2e7..c1b9bd58d98 100644
--- a/spec/lib/gitlab/ci/config/entry/job_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/job_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Job do
+RSpec.describe Gitlab::Ci::Config::Entry::Job, feature_category: :pipeline_authoring do
let(:entry) { described_class.new(config, name: :rspec) }
it_behaves_like 'with inheritable CI config' do
@@ -337,100 +337,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
- context 'when only: is used with rules:' do
- let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
-
- it 'returns error about mixing only: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
-
- context 'and only: is blank' do
- let(:config) { { only: nil, rules: [{ if: '$THIS' }] } }
-
- it 'returns error about mixing only: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
-
- context 'and rules: is blank' do
- let(:config) { { only: ['merge_requests'], rules: nil } }
-
- it 'returns error about mixing only: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
- end
-
- context 'when except: is used with rules:' do
- let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }] } }
-
- it 'returns error about mixing except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
-
- context 'and except: is blank' do
- let(:config) { { except: nil, rules: [{ if: '$THIS' }] } }
-
- it 'returns error about mixing except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
-
- context 'and rules: is blank' do
- let(:config) { { except: { refs: %w[master] }, rules: nil } }
-
- it 'returns error about mixing except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
- end
-
- context 'when only: and except: are both used with rules:' do
- let(:config) do
- {
- only: %w[merge_requests],
- except: { refs: %w[master] },
- rules: [{ if: '$THIS' }]
- }
- end
-
- it 'returns errors about mixing both only: and except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- expect(entry.errors).to include /may not be used with `rules`/
- end
-
- context 'when only: and except: as both blank' do
- let(:config) do
- { only: nil, except: nil, rules: [{ if: '$THIS' }] }
- end
-
- it 'returns errors about mixing both only: and except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
-
- context 'when rules: is blank' do
- let(:config) do
- { only: %w[merge_requests], except: { refs: %w[master] }, rules: nil }
- end
-
- it 'returns errors about mixing both only: and except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
- end
-
context 'when start_in specified without delayed specification' do
let(:config) { { start_in: '1 day' } }
@@ -603,6 +509,92 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
end
end
end
+
+ context 'when only: is used with rules:' do
+ let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }], script: 'echo' } }
+
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`: only/
+ end
+
+ context 'and only: is blank' do
+ let(:config) { { only: nil, rules: [{ if: '$THIS' }], script: 'echo' } }
+
+ it 'is valid:' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'and rules: is blank' do
+ let(:config) { { only: ['merge_requests'], rules: nil, script: 'echo' } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when except: is used with rules:' do
+ let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }], script: 'echo' } }
+
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`: except/
+ end
+
+ context 'and except: is blank' do
+ let(:config) { { except: nil, rules: [{ if: '$THIS' }], script: 'echo' } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'and rules: is blank' do
+ let(:config) { { except: { refs: %w[master] }, rules: nil, script: 'echo' } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
+
+ context 'when only: and except: are both used with rules:' do
+ let(:config) do
+ {
+ only: %w[merge_requests],
+ except: { refs: %w[master] },
+ rules: [{ if: '$THIS' }],
+ script: 'echo'
+ }
+ end
+
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`: only, except/
+ end
+
+ context 'when only: and except: as both blank' do
+ let(:config) do
+ { only: nil, except: nil, rules: [{ if: '$THIS' }], script: 'echo' }
+ end
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+
+ context 'when rules: is blank' do
+ let(:config) do
+ { only: %w[merge_requests], except: { refs: %w[master] }, rules: nil, script: 'echo' }
+ end
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+ end
+ end
end
describe '#relevant?' do
@@ -639,7 +631,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it 'overrides default config' do
expect(entry[:image].value).to eq(name: 'some_image')
- expect(entry[:cache].value).to eq([key: 'test', policy: 'pull-push', when: 'on_success'])
+ expect(entry[:cache].value).to eq([key: 'test', policy: 'pull-push', when: 'on_success', unprotect: false])
end
end
@@ -654,7 +646,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Job do
it 'uses config from default entry' do
expect(entry[:image].value).to eq 'specified'
- expect(entry[:cache].value).to eq([key: 'test', policy: 'pull-push', when: 'on_success'])
+ expect(entry[:cache].value).to eq([key: 'test', policy: 'pull-push', when: 'on_success', unprotect: false])
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/processable_spec.rb b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
index f1578a068b9..b28562ba2ea 100644
--- a/spec/lib/gitlab/ci/config/entry/processable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/processable_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Processable do
+RSpec.describe Gitlab::Ci::Config::Entry::Processable, feature_category: :pipeline_authoring do
let(:node_class) do
Class.new(::Gitlab::Config::Entry::Node) do
include Gitlab::Ci::Config::Entry::Processable
@@ -104,111 +104,102 @@ RSpec.describe Gitlab::Ci::Config::Entry::Processable do
end
end
- context 'when only: is used with rules:' do
- let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
+ context 'when a variable has an invalid data attribute' do
+ let(:config) do
+ {
+ script: 'echo',
+ variables: { 'VAR1' => 'val 1', 'VAR2' => { value: 'val 2', description: 'hello var 2' } }
+ }
+ end
- it 'returns error about mixing only: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
+ it 'reports error about variable' do
+ expect(entry.errors)
+ .to include 'variables:var2 config uses invalid data keys: description'
end
+ end
+ end
- context 'and only: is blank' do
- let(:config) { { only: nil, rules: [{ if: '$THIS' }] } }
+ context 'when only: is used with rules:' do
+ let(:config) { { only: ['merge_requests'], rules: [{ if: '$THIS' }] } }
- it 'returns error about mixing only: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
+ it 'returns error about mixing only: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`: only/
+ end
- context 'and rules: is blank' do
- let(:config) { { only: ['merge_requests'], rules: nil } }
+ context 'and only: is blank' do
+ let(:config) { { only: nil, rules: [{ if: '$THIS' }] } }
- it 'returns error about mixing only: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
+ it 'is valid' do
+ expect(entry).to be_valid
end
end
- context 'when except: is used with rules:' do
- let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }] } }
+ context 'and rules: is blank' do
+ let(:config) { { only: ['merge_requests'], rules: nil } }
- it 'returns error about mixing except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
+ it 'is valid' do
+ expect(entry).to be_valid
end
+ end
+ end
- context 'and except: is blank' do
- let(:config) { { except: nil, rules: [{ if: '$THIS' }] } }
+ context 'when except: is used with rules:' do
+ let(:config) { { except: { refs: %w[master] }, rules: [{ if: '$THIS' }] } }
- it 'returns error about mixing except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
+ it 'returns error about mixing except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`: except/
+ end
- context 'and rules: is blank' do
- let(:config) { { except: { refs: %w[master] }, rules: nil } }
+ context 'and except: is blank' do
+ let(:config) { { except: nil, rules: [{ if: '$THIS' }] } }
- it 'returns error about mixing except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- end
+ it 'is valid' do
+ expect(entry).to be_valid
end
end
- context 'when only: and except: are both used with rules:' do
- let(:config) do
- {
- only: %w[merge_requests],
- except: { refs: %w[master] },
- rules: [{ if: '$THIS' }]
- }
- end
+ context 'and rules: is blank' do
+ let(:config) { { except: { refs: %w[master] }, rules: nil } }
- it 'returns errors about mixing both only: and except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- expect(entry.errors).to include /may not be used with `rules`/
+ it 'is valid' do
+ expect(entry).to be_valid
end
+ end
+ end
- context 'when only: and except: as both blank' do
- let(:config) do
- { only: nil, except: nil, rules: [{ if: '$THIS' }] }
- end
+ context 'when only: and except: are both used with rules:' do
+ let(:config) do
+ {
+ only: %w[merge_requests],
+ except: { refs: %w[master] },
+ rules: [{ if: '$THIS' }]
+ }
+ end
- it 'returns errors about mixing both only: and except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- expect(entry.errors).to include /may not be used with `rules`/
- end
- end
+ it 'returns errors about mixing both only: and except: with rules:' do
+ expect(entry).not_to be_valid
+ expect(entry.errors).to include /may not be used with `rules`: only, except/
+ end
- context 'when rules: is blank' do
- let(:config) do
- { only: %w[merge_requests], except: { refs: %w[master] }, rules: nil }
- end
+ context 'when only: and except: as both blank' do
+ let(:config) do
+ { only: nil, except: nil, rules: [{ if: '$THIS' }] }
+ end
- it 'returns errors about mixing both only: and except: with rules:' do
- expect(entry).not_to be_valid
- expect(entry.errors).to include /may not be used with `rules`/
- expect(entry.errors).to include /may not be used with `rules`/
- end
+ it 'is valid' do
+ expect(entry).to be_valid
end
end
- context 'when a variable has an invalid data attribute' do
+ context 'when rules: is blank' do
let(:config) do
- {
- script: 'echo',
- variables: { 'VAR1' => 'val 1', 'VAR2' => { value: 'val 2', description: 'hello var 2' } }
- }
+ { only: %w[merge_requests], except: { refs: %w[master] }, rules: nil }
end
- it 'reports error about variable' do
- expect(entry.errors)
- .to include 'variables:var2 config uses invalid data keys: description'
+ it 'is valid' do
+ expect(entry).to be_valid
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
index 394d91466bf..cbd3109522c 100644
--- a/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/matrix_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
[
{
'VAR_1' => (1..10).to_a,
- 'VAR_2' => (11..20).to_a
+ 'VAR_2' => (11..31).to_a
}
]
end
@@ -41,7 +41,7 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Matrix do
describe '#errors' do
it 'returns error about too many jobs' do
expect(matrix.errors)
- .to include('matrix config generates too many jobs (maximum is 50)')
+ .to include('matrix config generates too many jobs (maximum is 200)')
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
index a16f1cf9e43..ec21519a8f6 100644
--- a/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/product/parallel_spec.rb
@@ -33,10 +33,10 @@ RSpec.describe ::Gitlab::Ci::Config::Entry::Product::Parallel do
it_behaves_like 'invalid config', /must be greater than or equal to 2/
end
- context 'when it is bigger than 50' do
- let(:config) { 51 }
+ context 'when it is bigger than 200' do
+ let(:config) { 201 }
- it_behaves_like 'invalid config', /must be less than or equal to 50/
+ it_behaves_like 'invalid config', /must be less than or equal to 200/
end
context 'when it is not an integer' do
diff --git a/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb
index 0fd9a83a4fa..ccd6f6ab427 100644
--- a/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports/coverage_report_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Reports::CoverageReport do
+RSpec.describe Gitlab::Ci::Config::Entry::Reports::CoverageReport, feature_category: :pipeline_authoring do
let(:entry) { described_class.new(config) }
describe 'validations' do
@@ -14,6 +14,16 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports::CoverageReport do
it { expect(entry.value).to eq(config) }
end
+ context 'when it is not a hash' do
+ where(:config) { ['string', true, []] }
+
+ with_them do
+ it { expect(entry).not_to be_valid }
+
+ it { expect(entry.errors).to include /should be a hash/ }
+ end
+ end
+
context 'with unsupported coverage format' do
let(:config) { { coverage_format: 'jacoco', path: 'jacoco.xml' } }
diff --git a/spec/lib/gitlab/ci/config/entry/reports_spec.rb b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
index 45aa859a356..715cb18fb92 100644
--- a/spec/lib/gitlab/ci/config/entry/reports_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/reports_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::Entry::Reports do
+RSpec.describe Gitlab::Ci::Config::Entry::Reports, feature_category: :pipeline_authoring do
let(:entry) { described_class.new(config) }
describe 'validates ALLOWED_KEYS' do
@@ -90,6 +90,18 @@ RSpec.describe Gitlab::Ci::Config::Entry::Reports do
end
end
end
+
+ context 'when coverage_report is nil' do
+ let(:config) { { coverage_report: nil } }
+
+ it 'is valid' do
+ expect(entry).to be_valid
+ end
+
+ it 'returns artifacts configuration as an empty hash' do
+ expect(entry.value).to eq({})
+ end
+ end
end
context 'when entry value is not correct' do
diff --git a/spec/lib/gitlab/ci/config/entry/root_spec.rb b/spec/lib/gitlab/ci/config/entry/root_spec.rb
index c40589104cd..9722609aef6 100644
--- a/spec/lib/gitlab/ci/config/entry/root_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/root_spec.rb
@@ -127,7 +127,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'image:1.0' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success',
+ unprotect: false }],
job_variables: {},
root_variables_inheritance: true,
ignore: false,
@@ -142,7 +143,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'image:1.0' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success',
+ unprotect: false }],
job_variables: {},
root_variables_inheritance: true,
ignore: false,
@@ -158,7 +160,8 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
release: { name: "Release $CI_TAG_NAME", tag_name: 'v0.06', description: "./release_changelog.txt" },
image: { name: "image:1.0" },
services: [{ name: "postgres:9.1" }, { name: "mysql:5.5" }],
- cache: [{ key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success' }],
+ cache: [{ key: "k", untracked: true, paths: ["public/"], policy: "pull-push", when: 'on_success',
+ unprotect: false }],
only: { refs: %w(branches tags) },
job_variables: { 'VAR' => { value: 'job' } },
root_variables_inheritance: true,
@@ -206,7 +209,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'image:1.0' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success', unprotect: false }],
job_variables: {},
root_variables_inheritance: true,
ignore: false,
@@ -219,7 +222,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
image: { name: 'image:1.0' },
services: [{ name: 'postgres:9.1' }, { name: 'mysql:5.5' }],
stage: 'test',
- cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success' }],
+ cache: [{ key: 'k', untracked: true, paths: ['public/'], policy: 'pull-push', when: 'on_success', unprotect: false }],
job_variables: { 'VAR' => { value: 'job' } },
root_variables_inheritance: true,
ignore: false,
@@ -274,7 +277,7 @@ RSpec.describe Gitlab::Ci::Config::Entry::Root do
describe '#cache_value' do
it 'returns correct cache definition' do
- expect(root.cache_value).to eq([key: 'a', policy: 'pull-push', when: 'on_success'])
+ expect(root.cache_value).to eq([key: 'a', policy: 'pull-push', when: 'on_success', unprotect: false])
end
end
end
diff --git a/spec/lib/gitlab/ci/config/entry/variable_spec.rb b/spec/lib/gitlab/ci/config/entry/variable_spec.rb
index 97b06c8b1a5..1067db6d124 100644
--- a/spec/lib/gitlab/ci/config/entry/variable_spec.rb
+++ b/spec/lib/gitlab/ci/config/entry/variable_spec.rb
@@ -257,14 +257,6 @@ RSpec.describe Gitlab::Ci::Config::Entry::Variable do
subject(:value_with_data) { entry.value_with_data }
it { is_expected.to eq(value: 'value', raw: true) }
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it { is_expected.to eq(value: 'value') }
- end
end
context 'when config expand is true' do
diff --git a/spec/lib/gitlab/ci/config/external/file/local_spec.rb b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
index f5b36ebfa45..a77acb45978 100644
--- a/spec/lib/gitlab/ci/config/external/file/local_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/file/local_spec.rb
@@ -2,11 +2,13 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Config::External::File::Local do
+RSpec.describe Gitlab::Ci::Config::External::File::Local, feature_category: :pipeline_authoring do
+ include RepoHelpers
+
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
- let(:sha) { '12345' }
+ let(:sha) { project.commit.sha }
let(:variables) { project.predefined_variables.to_runner_variables }
let(:context) { Gitlab::Ci::Config::External::Context.new(**context_params) }
let(:params) { { local: location } }
@@ -172,14 +174,17 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
let(:another_location) { 'another-config.yml' }
let(:another_content) { 'rspec: JOB' }
- before do
- allow(project.repository).to receive(:blob_data_at).with(sha, location)
- .and_return(content)
-
- allow(project.repository).to receive(:blob_data_at).with(sha, another_location)
- .and_return(another_content)
+ let(:project_files) do
+ {
+ location => content,
+ another_location => another_content
+ }
+ end
- local_file.validate!
+ around(:all) do |example|
+ create_and_delete_files(project, project_files) do
+ example.run
+ end
end
it 'does expand hash to include the template' do
@@ -196,11 +201,11 @@ RSpec.describe Gitlab::Ci::Config::External::File::Local do
it {
is_expected.to eq(
context_project: project.full_path,
- context_sha: '12345',
+ context_sha: sha,
type: :local,
- location: location,
- blob: "http://localhost/#{project.full_path}/-/blob/12345/lib/gitlab/ci/templates/existent-file.yml",
- raw: "http://localhost/#{project.full_path}/-/raw/12345/lib/gitlab/ci/templates/existent-file.yml",
+ location: '/lib/gitlab/ci/templates/existent-file.yml',
+ blob: "http://localhost/#{project.full_path}/-/blob/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
+ raw: "http://localhost/#{project.full_path}/-/raw/#{sha}/lib/gitlab/ci/templates/existent-file.yml",
extra: {}
)
}
diff --git a/spec/lib/gitlab/ci/config/external/mapper_spec.rb b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
index b7e58d4dfa1..9d0e57d4292 100644
--- a/spec/lib/gitlab/ci/config/external/mapper_spec.rb
+++ b/spec/lib/gitlab/ci/config/external/mapper_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-# This will be removed with FF ci_refactoring_external_mapper and moved to below.
+# This will be use with the FF ci_refactoring_external_mapper_verifier in the next MR.
+# It can be removed when the FF is removed.
RSpec.shared_context 'gitlab_ci_config_external_mapper' do
include StubRequests
include RepoHelpers
@@ -466,12 +467,4 @@ end
RSpec.describe Gitlab::Ci::Config::External::Mapper, feature_category: :pipeline_authoring do
it_behaves_like 'gitlab_ci_config_external_mapper'
-
- context 'when the FF ci_refactoring_external_mapper is disabled' do
- before do
- stub_feature_flags(ci_refactoring_external_mapper: false)
- end
-
- it_behaves_like 'gitlab_ci_config_external_mapper'
- end
end
diff --git a/spec/lib/gitlab/ci/config_spec.rb b/spec/lib/gitlab/ci/config_spec.rb
index b48a89059bf..5cdc9c21561 100644
--- a/spec/lib/gitlab/ci/config_spec.rb
+++ b/spec/lib/gitlab/ci/config_spec.rb
@@ -4,6 +4,7 @@ require 'spec_helper'
RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
include StubRequests
+ include RepoHelpers
let_it_be(:user) { create(:user) }
@@ -313,9 +314,12 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
context "when using 'include' directive" do
let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :repository, group: group) }
+ let_it_be(:main_project) { create(:project, :repository, :public, group: group) }
+
+ let(:project_sha) { project.commit.id }
+ let(:main_project_sha) { main_project.commit.id }
- let(:project) { create(:project, :repository, group: group) }
- let(:main_project) { create(:project, :repository, :public, group: group) }
let(:pipeline) { build(:ci_pipeline, project: project) }
let(:remote_location) { 'https://gitlab.com/gitlab-org/gitlab-foss/blob/1234/.gitlab-ci-1.yml' }
@@ -356,36 +360,38 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
end
let(:config) do
- described_class.new(gitlab_ci_yml, project: project, pipeline: pipeline, sha: '12345', user: user)
+ described_class.new(gitlab_ci_yml, project: project, pipeline: pipeline, sha: project_sha, user: user)
end
- before do
- stub_full_request(remote_location).to_return(body: remote_file_content)
-
- allow(project.repository)
- .to receive(:blob_data_at).and_return(local_file_content)
+ let(:project_files) do
+ {
+ local_location => local_file_content
+ }
+ end
- main_project.repository.create_file(
- main_project.creator,
- '.gitlab-ci.yml',
- local_file_content,
- message: 'Add README.md',
- branch_name: 'master'
- )
+ let(:main_project_files) do
+ {
+ '.gitlab-ci.yml' => local_file_content,
+ '.another-ci-file.yml' => local_file_content
+ }
+ end
- main_project.repository.create_file(
- main_project.creator,
- '.another-ci-file.yml',
- local_file_content,
- message: 'Add README.md',
- branch_name: 'master'
- )
+ before do
+ stub_full_request(remote_location).to_return(body: remote_file_content)
create(:ci_variable, project: project, key: "REF", value: "HEAD")
create(:ci_group_variable, group: group, key: "FILENAME", value: ".gitlab-ci.yml")
create(:ci_instance_variable, key: 'MAIN_PROJECT', value: main_project.full_path)
end
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ create_and_delete_files(main_project, main_project_files) do
+ example.run
+ end
+ end
+ end
+
context "when gitlab_ci_yml has valid 'include' defined" do
it 'returns a composed hash' do
composed_hash = {
@@ -434,25 +440,25 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
expect(config.metadata[:includes]).to contain_exactly(
{ type: :local,
location: local_location,
- blob: "http://localhost/#{project.full_path}/-/blob/12345/#{local_location}",
- raw: "http://localhost/#{project.full_path}/-/raw/12345/#{local_location}",
+ blob: "http://localhost/#{project.full_path}/-/blob/#{project_sha}/#{local_location}",
+ raw: "http://localhost/#{project.full_path}/-/raw/#{project_sha}/#{local_location}",
extra: {},
context_project: project.full_path,
- context_sha: '12345' },
+ context_sha: project_sha },
{ type: :remote,
location: remote_location,
blob: nil,
raw: remote_location,
extra: {},
context_project: project.full_path,
- context_sha: '12345' },
+ context_sha: project_sha },
{ type: :file,
location: '.gitlab-ci.yml',
- blob: "http://localhost/#{main_project.full_path}/-/blob/#{main_project.commit.sha}/.gitlab-ci.yml",
- raw: "http://localhost/#{main_project.full_path}/-/raw/#{main_project.commit.sha}/.gitlab-ci.yml",
+ blob: "http://localhost/#{main_project.full_path}/-/blob/#{main_project_sha}/.gitlab-ci.yml",
+ raw: "http://localhost/#{main_project.full_path}/-/raw/#{main_project_sha}/.gitlab-ci.yml",
extra: { project: main_project.full_path, ref: 'HEAD' },
context_project: project.full_path,
- context_sha: '12345' }
+ context_sha: project_sha }
)
end
end
@@ -511,16 +517,13 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
describe 'external file version' do
context 'when external local file SHA is defined' do
it 'is using a defined value' do
- expect(project.repository).to receive(:blob_data_at)
- .with('eeff1122', local_location)
-
- described_class.new(gitlab_ci_yml, project: project, sha: 'eeff1122', user: user, pipeline: pipeline)
+ described_class.new(gitlab_ci_yml, project: project, sha: project_sha, user: user, pipeline: pipeline)
end
end
context 'when external local file SHA is not defined' do
it 'is using latest SHA on the default branch' do
- expect(project.repository).to receive(:root_ref_sha)
+ expect(project.repository).to receive(:root_ref_sha).and_call_original
described_class.new(gitlab_ci_yml, project: project, sha: nil, user: user, pipeline: pipeline)
end
@@ -757,13 +760,11 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
before do
project.add_developer(user)
+ end
- allow_next_instance_of(Repository) do |repository|
- allow(repository).to receive(:blob_data_at).with(an_instance_of(String), local_location)
- .and_return(local_file_content)
-
- allow(repository).to receive(:blob_data_at).with(an_instance_of(String), other_file_location)
- .and_return(other_file_content)
+ around do |example|
+ create_and_delete_files(project, { other_file_location => other_file_content }) do
+ example.run
end
end
@@ -819,14 +820,10 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
HEREDOC
end
- before do
- project.repository.create_file(
- project.creator,
- 'my_builds.yml',
- local_file_content,
- message: 'Add my_builds.yml',
- branch_name: '12345'
- )
+ around do |example|
+ create_and_delete_files(project, { 'my_builds.yml' => local_file_content }) do
+ example.run
+ end
end
context 'when the exists file does not exist' do
@@ -853,7 +850,7 @@ RSpec.describe Gitlab::Ci::Config, feature_category: :pipeline_authoring do
include:
- local: #{local_location}
rules:
- - if: $CI_COMMIT_SHA == "#{project.commit.sha}"
+ - if: $CI_COMMIT_REF_NAME == "master"
HEREDOC
end
diff --git a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
index 712dc00ec7a..acb7c122bcd 100644
--- a/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/sbom/validators/cyclonedx_schema_validator_spec.rb
@@ -62,6 +62,47 @@ RSpec.describe Gitlab::Ci::Parsers::Sbom::Validators::CyclonedxSchemaValidator,
it { is_expected.to be_valid }
end
+ context 'when components have licenses' do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4",
+ "licenses" => [
+ { "license" => { "id" => "MIT" } }
+ ]
+ }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when components have a signature' do
+ let(:components) do
+ [
+ {
+ "type" => "library",
+ "name" => "activesupport",
+ "version" => "5.1.4",
+ "signature" => {
+ "algorithm" => "ES256",
+ "publicKey" => {
+ "kty" => "EC",
+ "crv" => "P-256",
+ "x" => "6BKxpty8cI-exDzCkh-goU6dXq3MbcY0cd1LaAxiNrU",
+ "y" => "mCbcvUzm44j3Lt2b5BPyQloQ91tf2D2V-gzeUxWaUdg"
+ },
+ "value" => "ybT1qz5zHNi4Ndc6y7Zhamuf51IqXkPkZwjH1XcC-KSuBiaQplTw6Jasf2MbCLg3CF7PAdnMO__WSLwvI5r2jA"
+ }
+ }
+ ]
+ end
+
+ it { is_expected.to be_valid }
+ end
+
context "when components are not valid" do
let(:components) do
[
diff --git a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
index c94ed1f8d6d..12886c79d7d 100644
--- a/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
+++ b/spec/lib/gitlab/ci/parsers/security/validators/schema_validator_spec.rb
@@ -2,9 +2,10 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
+RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator, feature_category: :vulnerability_management do
let_it_be(:project) { create(:project) }
+ let(:current_dast_versions) { described_class::CURRENT_VERSIONS[:dast].join(', ') }
let(:supported_dast_versions) { described_class::SUPPORTED_VERSIONS[:dast].join(', ') }
let(:deprecated_schema_version_message) {}
let(:missing_schema_version_message) do
@@ -19,6 +20,14 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
}
end
+ let(:analyzer_vendor) do
+ { 'name' => 'A DAST analyzer' }
+ end
+
+ let(:scanner_vendor) do
+ { 'name' => 'A DAST scanner' }
+ end
+
let(:report_data) do
{
'scan' => {
@@ -26,7 +35,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
'id' => 'my-dast-analyzer',
'name' => 'My DAST analyzer',
'version' => '0.1.0',
- 'vendor' => { 'name' => 'A DAST analyzer' }
+ 'vendor' => analyzer_vendor
},
'end_time' => '2020-01-28T03:26:02',
'scanned_resources' => [],
@@ -34,7 +43,7 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
'id' => 'my-dast-scanner',
'name' => 'My DAST scanner',
'version' => '0.2.0',
- 'vendor' => { 'name' => 'A DAST scanner' }
+ 'vendor' => scanner_vendor
},
'start_time' => '2020-01-28T03:26:01',
'status' => 'success',
@@ -458,8 +467,9 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
let(:report_version) { described_class::DEPRECATED_VERSIONS[report_type].last }
let(:expected_deprecation_message) do
- "Version #{report_version} for report type #{report_type} has been deprecated, supported versions for this "\
- "report type are: #{supported_dast_versions}. GitLab will attempt to parse and ingest this report if valid."
+ "version #{report_version} for report type #{report_type} is deprecated. "\
+ "However, GitLab will still attempt to parse and ingest this report. "\
+ "Upgrade the security report to one of the following versions: #{current_dast_versions}."
end
let(:expected_deprecation_warnings) do
@@ -492,6 +502,22 @@ RSpec.describe Gitlab::Ci::Parsers::Security::Validators::SchemaValidator do
it_behaves_like 'report with expected warnings'
end
+
+ context 'and the report passes schema validation as a GitLab-vendored analyzer' do
+ let(:analyzer_vendor) do
+ { 'name' => 'GitLab' }
+ end
+
+ it { is_expected.to be_empty }
+ end
+
+ context 'and the report passes schema validation as a GitLab-vendored scanner' do
+ let(:scanner_vendor) do
+ { 'name' => 'GitLab' }
+ end
+
+ it { is_expected.to be_empty }
+ end
end
context 'when given an unsupported schema version' do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
index be5d3a96126..bec80a43a76 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_deployments_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments, feature_category: :continuous_integration do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -19,6 +19,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::CreateDeployments do
subject { step.perform! }
before do
+ stub_feature_flags(move_create_deployments_to_worker: false)
job.pipeline = pipeline
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
index eba0db0adfb..e13e78d0db8 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/create_spec.rb
@@ -63,11 +63,11 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Create do
end
let(:job) do
- build(:ci_build, stage: stage, pipeline: pipeline, project: project)
+ build(:ci_build, ci_stage: stage, pipeline: pipeline, project: project)
end
let(:bridge) do
- build(:ci_bridge, stage: stage, pipeline: pipeline, project: project)
+ build(:ci_bridge, ci_stage: stage, pipeline: pipeline, project: project)
end
before do
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
index 35e1c48a942..00200b57b1e 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_metadata_spec.rb
@@ -54,94 +54,76 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::PopulateMetadata do
expect(step.break?).to be false
end
- context 'with feature flag disabled' do
- before do
- stub_feature_flags(pipeline_name: false)
- end
-
- it 'does not build pipeline_metadata' do
- run_chain
+ it 'builds pipeline_metadata' do
+ run_chain
- expect(pipeline.pipeline_metadata).to be_nil
- end
+ expect(pipeline.pipeline_metadata.name).to eq('Pipeline name')
+ expect(pipeline.pipeline_metadata.project).to eq(pipeline.project)
+ expect(pipeline.pipeline_metadata).not_to be_persisted
end
- context 'with feature flag enabled' do
- before do
- stub_feature_flags(pipeline_name: true)
+ context 'with empty name' do
+ let(:config) do
+ { workflow: { name: ' ' }, rspec: { script: 'rspec' } }
end
- it 'builds pipeline_metadata' do
+ it 'strips whitespace from name' do
run_chain
- expect(pipeline.pipeline_metadata.name).to eq('Pipeline name')
- expect(pipeline.pipeline_metadata.project).to eq(pipeline.project)
- expect(pipeline.pipeline_metadata).not_to be_persisted
+ expect(pipeline.pipeline_metadata).to be_nil
end
- context 'with empty name' do
+ context 'with empty name after variable substitution' do
let(:config) do
- { workflow: { name: ' ' }, rspec: { script: 'rspec' } }
+ { workflow: { name: '$VAR1' }, rspec: { script: 'rspec' } }
end
- it 'strips whitespace from name' do
+ it 'does not save empty name' do
run_chain
expect(pipeline.pipeline_metadata).to be_nil
end
-
- context 'with empty name after variable substitution' do
- let(:config) do
- { workflow: { name: '$VAR1' }, rspec: { script: 'rspec' } }
- end
-
- it 'does not save empty name' do
- run_chain
-
- expect(pipeline.pipeline_metadata).to be_nil
- end
- end
end
+ end
- context 'with variables' do
- let(:config) do
- {
- variables: { ROOT_VAR: 'value $WORKFLOW_VAR1' },
- workflow: {
- name: 'Pipeline $ROOT_VAR $WORKFLOW_VAR2 $UNKNOWN_VAR',
- rules: [{ variables: { WORKFLOW_VAR1: 'value1', WORKFLOW_VAR2: 'value2' } }]
- },
- rspec: { script: 'rspec' }
- }
- end
+ context 'with variables' do
+ let(:config) do
+ {
+ variables: { ROOT_VAR: 'value $WORKFLOW_VAR1' },
+ workflow: {
+ name: 'Pipeline $ROOT_VAR $WORKFLOW_VAR2 $UNKNOWN_VAR',
+ rules: [{ variables: { WORKFLOW_VAR1: 'value1', WORKFLOW_VAR2: 'value2' } }]
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
- it 'substitutes variables' do
- run_chain
+ it 'substitutes variables' do
+ run_chain
- expect(pipeline.pipeline_metadata.name).to eq('Pipeline value value1 value2')
- end
+ expect(pipeline.pipeline_metadata.name).to eq('Pipeline value value1 value2')
end
+ end
- context 'with invalid name' do
- let(:config) do
- {
- variables: { ROOT_VAR: 'a' * 256 },
- workflow: {
- name: 'Pipeline $ROOT_VAR'
- },
- rspec: { script: 'rspec' }
- }
- end
+ context 'with invalid name' do
+ let(:config) do
+ {
+ variables: { ROOT_VAR: 'a' * 256 },
+ workflow: {
+ name: 'Pipeline $ROOT_VAR'
+ },
+ rspec: { script: 'rspec' }
+ }
+ end
- it 'returns error and breaks chain' do
- ret = run_chain
+ it 'returns error and breaks chain' do
+ ret = run_chain
- expect(ret)
- .to match_array(["Failed to build pipeline metadata! Name is too long (maximum is 255 characters)"])
- expect(pipeline.pipeline_metadata.errors.full_messages)
- .to match_array(['Name is too long (maximum is 255 characters)'])
- expect(step.break?).to be true
- end
+ expect(ret)
+ .to match_array(["Failed to build pipeline metadata! Name is too long (maximum is 255 characters)"])
+ expect(pipeline.pipeline_metadata.errors.full_messages)
+ .to match_array(['Name is too long (maximum is 255 characters)'])
+ expect(step.break?).to be true
end
end
end
diff --git a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
index 62de4d2e96d..91bb94bbb11 100644
--- a/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/chain/populate_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
+RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate, feature_category: :continuous_integration do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -90,7 +90,8 @@ RSpec.describe Gitlab::Ci::Pipeline::Chain::Populate do
it 'appends an error about missing stages' do
expect(pipeline.errors.to_a)
- .to include 'No stages / jobs for this pipeline.'
+ .to include 'Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'
end
it 'wastes pipeline iid' do
diff --git a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
index fb8020bf43e..c264ea3bece 100644
--- a/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
+++ b/spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb
@@ -212,6 +212,7 @@ RSpec.describe Gitlab::Ci::Pipeline::Seed::Build::Cache do
paths: ['vendor/ruby'],
untracked: true,
policy: 'push',
+ unprotect: true,
when: 'on_success'
}
end
diff --git a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
index 6081f104e42..c13901a4776 100644
--- a/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
+++ b/spec/lib/gitlab/ci/status/bridge/factory_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Status::Bridge::Factory do
+RSpec.describe Gitlab::Ci::Status::Bridge::Factory, feature_category: :continuous_integration do
let(:user) { create(:user) }
let(:project) { bridge.project }
let(:status) { factory.fabricate! }
@@ -59,13 +59,15 @@ RSpec.describe Gitlab::Ci::Status::Bridge::Factory do
context 'failed with downstream_pipeline_creation_failed' do
before do
- bridge.options = { downstream_errors: ['No stages / jobs for this pipeline.', 'other error'] }
+ bridge.options = { downstream_errors: ['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.', 'other error'] }
bridge.failure_reason = 'downstream_pipeline_creation_failed'
end
it 'fabricates correct status_tooltip' do
expect(status.status_tooltip).to eq(
- "#{s_('CiStatusText|failed')} - (downstream pipeline can not be created, No stages / jobs for this pipeline., other error)"
+ "#{s_('CiStatusText|failed')} - (downstream pipeline can not be created, Pipeline will not run for the selected trigger. " \
+ "The rules configuration prevented any jobs from being added to the pipeline., other error)"
)
end
end
diff --git a/spec/lib/gitlab/ci/status/build/manual_spec.rb b/spec/lib/gitlab/ci/status/build/manual_spec.rb
index a1152cb77e3..8f5d1558314 100644
--- a/spec/lib/gitlab/ci/status/build/manual_spec.rb
+++ b/spec/lib/gitlab/ci/status/build/manual_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Status::Build::Manual do
+RSpec.describe Gitlab::Ci::Status::Build::Manual, feature_category: :continuous_integration do
let_it_be(:user) { create(:user) }
let_it_be(:job) { create(:ci_build, :manual) }
@@ -18,11 +18,44 @@ RSpec.describe Gitlab::Ci::Status::Build::Manual do
job.project.add_maintainer(user)
end
- it { expect(subject.illustration[:content]).to match /This job requires manual intervention to start/ }
+ context 'when the job has not been played' do
+ it 'instructs the user about possible actions' do
+ expect(subject.illustration[:content]).to eq(
+ _(
+ 'This job does not start automatically and must be started manually. ' \
+ 'You can add CI/CD variables below for last-minute configuration changes before starting the job.'
+ )
+ )
+ end
+ end
+
+ context 'when the job is retryable' do
+ before do
+ job.update!(status: :failed)
+ end
+
+ it 'instructs the user about possible actions' do
+ expect(subject.illustration[:content]).to eq(
+ _("You can modify this job's CI/CD variables before running it again.")
+ )
+ end
+ end
+ end
+
+ context 'when the user can not trigger the job because of outdated deployment' do
+ before do
+ allow(job).to receive(:outdated_deployment?).and_return(true)
+ end
+
+ it { expect(subject.illustration[:content]).to match /This deployment job does not run automatically and must be started manually, but it's older than the latest deployment, and therefore can't run/ }
end
- context 'when the user can not trigger the job' do
- it { expect(subject.illustration[:content]).to match /This job does not run automatically and must be started manually/ }
+ context 'when the user can not trigger the job due to another reason' do
+ it 'informs the user' do
+ expect(subject.illustration[:content]).to eq(
+ _("This job does not run automatically and must be started manually, but you do not have access to it.")
+ )
+ end
end
end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
index 16c5d7a4b6d..286f3d10b7f 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/code_quality_gitlab_ci_yaml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
+RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml', feature_category: :continuous_integration do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Code-Quality') }
describe 'the created pipeline' do
@@ -63,7 +63,8 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
context 'on master' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
@@ -72,7 +73,8 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
@@ -81,7 +83,8 @@ RSpec.describe 'Jobs/Code-Quality.gitlab-ci.yml' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb
index 8a5aea7c0f0..68d249e31f9 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_gitlab_ci_yaml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Jobs/SAST-IaC.gitlab-ci.yml' do
+RSpec.describe 'Jobs/SAST-IaC.gitlab-ci.yml', feature_category: :continuous_integration do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/SAST-IaC') }
describe 'the created pipeline' do
@@ -50,7 +50,8 @@ RSpec.describe 'Jobs/SAST-IaC.gitlab-ci.yml' do
context 'on default branch' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
@@ -59,7 +60,8 @@ RSpec.describe 'Jobs/SAST-IaC.gitlab-ci.yml' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
index d540b035f81..039a6a739dd 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/sast_iac_latest_gitlab_ci_yaml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do
+RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml', feature_category: :continuous_integration do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/SAST-IaC.latest') }
describe 'the created pipeline' do
@@ -51,7 +51,8 @@ RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do
context 'on default branch' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
@@ -60,7 +61,8 @@ RSpec.describe 'Jobs/SAST-IaC.latest.gitlab-ci.yml' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
index 7cf0cf3ed33..d73d8a15feb 100644
--- a/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/Jobs/test_gitlab_ci_yaml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
+RSpec.describe 'Jobs/Test.gitlab-ci.yml', feature_category: :continuous_integration do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Jobs/Test') }
describe 'the created pipeline' do
@@ -63,7 +63,8 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
context 'on master' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
@@ -72,7 +73,8 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
@@ -81,7 +83,8 @@ RSpec.describe 'Jobs/Test.gitlab-ci.yml' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
index b2ca906e172..09ca2678de5 100644
--- a/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/auto_devops_gitlab_ci_yaml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Auto-DevOps.gitlab-ci.yml' do
+RSpec.describe 'Auto-DevOps.gitlab-ci.yml', feature_category: :auto_devops do
using RSpec::Parameterized::TableSyntax
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('Auto-DevOps') }
diff --git a/spec/lib/gitlab/ci/templates/npm_spec.rb b/spec/lib/gitlab/ci/templates/npm_spec.rb
index 55fd4675f11..a949a7ccfb1 100644
--- a/spec/lib/gitlab/ci/templates/npm_spec.rb
+++ b/spec/lib/gitlab/ci/templates/npm_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'npm.gitlab-ci.yml' do
+RSpec.describe 'npm.gitlab-ci.yml', feature_category: :continuous_integration do
subject(:template) { Gitlab::Template::GitlabCiYmlTemplate.find('npm') }
describe 'the created pipeline' do
@@ -43,7 +43,8 @@ RSpec.describe 'npm.gitlab-ci.yml' do
shared_examples 'no pipeline created' do
it 'does not create a pipeline because the only job (publish) is not created' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
diff --git a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
index 6ae51f9783b..a81f29d0d01 100644
--- a/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/terraform_latest_gitlab_ci_yaml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
+RSpec.describe 'Terraform.latest.gitlab-ci.yml', feature_category: :continuous_integration do
before do
allow(Gitlab::Template::GitlabCiYmlTemplate).to receive(:excluded_patterns).and_return([])
end
@@ -66,7 +66,12 @@ RSpec.describe 'Terraform.latest.gitlab-ci.yml' do
it 'does not create a branch pipeline', :aggregate_failures do
expect(branch_build_names).to be_empty
- expect(branch_pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(branch_pipeline.errors.full_messages).to match_array(
+ [
+ 'Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'
+ ]
+ )
end
end
end
diff --git a/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb b/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb
index 157fd39f1cc..607db33f61a 100644
--- a/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb
+++ b/spec/lib/gitlab/ci/templates/themekit_gitlab_ci_yaml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'ThemeKit.gitlab-ci.yml' do
+RSpec.describe 'ThemeKit.gitlab-ci.yml', feature_category: :continuous_integration do
before do
allow(Gitlab::Template::GitlabCiYmlTemplate).to receive(:excluded_patterns).and_return([])
end
@@ -52,7 +52,8 @@ RSpec.describe 'ThemeKit.gitlab-ci.yml' do
it 'has no jobs' do
expect(build_names).to be_empty
- expect(pipeline.errors.full_messages).to match_array(["No stages / jobs for this pipeline."])
+ expect(pipeline.errors.full_messages).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
end
diff --git a/spec/lib/gitlab/ci/variables/collection_spec.rb b/spec/lib/gitlab/ci/variables/collection_spec.rb
index 10b8f0065d9..4ee122cc607 100644
--- a/spec/lib/gitlab/ci/variables/collection_spec.rb
+++ b/spec/lib/gitlab/ci/variables/collection_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ci::Variables::Collection do
+RSpec.describe Gitlab::Ci::Variables::Collection, feature_category: :pipeline_authoring do
describe '.new' do
it 'can be initialized with an array' do
variable = { key: 'VAR', value: 'value', public: true, masked: false }
@@ -295,69 +295,6 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
end
end
- describe '#expand_value' do
- let(:collection) do
- Gitlab::Ci::Variables::Collection.new
- .append(key: 'CI_JOB_NAME', value: 'test-1')
- .append(key: 'CI_BUILD_ID', value: '1')
- .append(key: 'TEST1', value: 'test-3')
- .append(key: 'FILEVAR1', value: 'file value 1', file: true)
- end
-
- context 'table tests' do
- using RSpec::Parameterized::TableSyntax
-
- where do
- {
- "empty value": {
- value: '',
- result: ''
- },
- "simple expansions": {
- value: 'key$TEST1-$CI_BUILD_ID',
- result: 'keytest-3-1'
- },
- "complex expansion": {
- value: 'key${TEST1}-${CI_JOB_NAME}',
- result: 'keytest-3-test-1'
- },
- "missing variable not keeping original": {
- value: 'key${MISSING_VAR}-${CI_JOB_NAME}',
- result: 'key-test-1'
- },
- "missing variable keeping original": {
- value: 'key${MISSING_VAR}-${CI_JOB_NAME}',
- result: 'key${MISSING_VAR}-test-1',
- keep_undefined: true
- },
- "escaped characters are kept intact": {
- value: 'key-$TEST1-%%HOME%%-$${HOME}',
- result: 'key-test-3-%%HOME%%-$${HOME}'
- },
- "file variable with expand_file_refs: true": {
- value: 'key-$FILEVAR1-$TEST1',
- result: 'key-file value 1-test-3'
- },
- "file variable with expand_file_refs: false": {
- value: 'key-$FILEVAR1-$TEST1',
- result: 'key-$FILEVAR1-test-3',
- expand_file_refs: false
- }
- }
- end
-
- with_them do
- let(:options) { { keep_undefined: keep_undefined, expand_file_refs: expand_file_refs }.compact }
-
- subject(:expanded_result) { collection.expand_value(value, **options) }
-
- it 'matches expected expansion' do
- is_expected.to eq(result)
- end
- end
- end
- end
-
describe '#sort_and_expand_all' do
context 'table tests' do
using RSpec::Parameterized::TableSyntax
@@ -369,6 +306,14 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
keep_undefined: false,
result: []
},
+ "empty string": {
+ variables: [
+ { key: 'variable', value: '' }
+ ],
+ result: [
+ { key: 'variable', value: '' }
+ ]
+ },
"simple expansions": {
variables: [
{ key: 'variable', value: 'value' },
@@ -560,13 +505,42 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
{ key: 'variable2', value: '$variable3' },
{ key: 'variable3', value: 'key$variable$variable2' }
]
+ },
+ "file variables with expand_file_refs: true": {
+ variables: [
+ { key: 'file_var', value: 'secret content', file: true },
+ { key: 'variable1', value: 'var one' },
+ { key: 'variable2', value: 'var two $variable1 $file_var' }
+ ],
+ result: [
+ { key: 'file_var', value: 'secret content' },
+ { key: 'variable1', value: 'var one' },
+ { key: 'variable2', value: 'var two var one secret content' }
+ ]
+ },
+ "file variables with expand_file_refs: false": {
+ variables: [
+ { key: 'file_var', value: 'secret content', file: true },
+ { key: 'variable1', value: 'var one' },
+ { key: 'variable2', value: 'var two $variable1 $file_var' }
+ ],
+ expand_file_refs: false,
+ result: [
+ { key: 'file_var', value: 'secret content' },
+ { key: 'variable1', value: 'var one' },
+ { key: 'variable2', value: 'var two var one $file_var' }
+ ]
}
}
end
with_them do
let(:collection) { Gitlab::Ci::Variables::Collection.new(variables) }
- let(:options) { { keep_undefined: keep_undefined, expand_raw_refs: expand_raw_refs }.compact }
+ let(:options) do
+ { keep_undefined: keep_undefined,
+ expand_raw_refs: expand_raw_refs,
+ expand_file_refs: expand_file_refs }.compact
+ end
subject(:expanded_result) { collection.sort_and_expand_all(**options) }
@@ -585,43 +559,21 @@ RSpec.describe Gitlab::Ci::Variables::Collection do
end
end
end
+ end
- context 'with the file_variable_is_referenced_in_another_variable logging' do
- let(:collection) do
- Gitlab::Ci::Variables::Collection.new
- .append(key: 'VAR1', value: 'test-1')
- .append(key: 'VAR2', value: '$VAR1')
- .append(key: 'VAR3', value: '$VAR1', raw: true)
- .append(key: 'FILEVAR4', value: 'file-test-4', file: true)
- .append(key: 'VAR5', value: '$FILEVAR4')
- .append(key: 'VAR6', value: '$FILEVAR4', raw: true)
- end
-
- subject(:sort_and_expand_all) { collection.sort_and_expand_all(project: project) }
-
- context 'when a project is not passed' do
- let(:project) {}
-
- it 'does not log anything' do
- expect(Gitlab::AppJsonLogger).not_to receive(:info)
-
- sort_and_expand_all
- end
- end
+ describe '#to_s' do
+ let(:variables) do
+ [
+ { key: 'VAR', value: 'value', public: true },
+ { key: 'VAR2', value: 'value2', public: false }
+ ]
+ end
- context 'when a project is passed' do
- let(:project) { create(:project) }
+ let(:errors) { 'circular variable reference detected' }
+ let(:collection) { Gitlab::Ci::Variables::Collection.new(variables, errors) }
- it 'logs file_variable_is_referenced_in_another_variable once for VAR5' do
- expect(Gitlab::AppJsonLogger).to receive(:info).with(
- event: 'file_variable_is_referenced_in_another_variable',
- project_id: project.id,
- variable: 'FILEVAR4'
- ).once
+ subject(:result) { collection.to_s }
- sort_and_expand_all
- end
- end
- end
+ it { is_expected.to eq("[\"VAR\", \"VAR2\"], @errors='circular variable reference detected'") }
end
end
diff --git a/spec/lib/gitlab/ci/yaml_processor_spec.rb b/spec/lib/gitlab/ci/yaml_processor_spec.rb
index ae98d2e0cad..b9f65ff749d 100644
--- a/spec/lib/gitlab/ci/yaml_processor_spec.rb
+++ b/spec/lib/gitlab/ci/yaml_processor_spec.rb
@@ -4,8 +4,9 @@ require 'spec_helper'
module Gitlab
module Ci
- RSpec.describe YamlProcessor do
+ RSpec.describe YamlProcessor, feature_category: :pipeline_authoring do
include StubRequests
+ include RepoHelpers
subject(:processor) { described_class.new(config, user: nil).execute }
@@ -1302,32 +1303,6 @@ module Gitlab
'VAR3' => { value: 'value3', raw: true }
)
end
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it 'returns variables without description and raw' do
- expect(job_variables).to contain_exactly(
- { key: 'VAR4', value: 'value4' },
- { key: 'VAR5', value: 'value5' },
- { key: 'VAR6', value: 'value6' }
- )
-
- expect(execute.root_variables).to contain_exactly(
- { key: 'VAR1', value: 'value1' },
- { key: 'VAR2', value: 'value2' },
- { key: 'VAR3', value: 'value3' }
- )
-
- expect(execute.root_variables_with_prefill_data).to eq(
- 'VAR1' => { value: 'value1' },
- 'VAR2' => { value: 'value2', description: 'description2' },
- 'VAR3' => { value: 'value3' }
- )
- end
- end
end
end
@@ -1505,9 +1480,19 @@ module Gitlab
let(:opts) { { project: project, sha: project.commit.sha } }
context "when the included internal file is present" do
- before do
- expect(project.repository).to receive(:blob_data_at)
- .and_return(YAML.dump({ job1: { script: 'hello' } }))
+ let(:project_files) do
+ {
+ 'local.gitlab-ci.yml' => <<~YAML
+ job1:
+ script: hello
+ YAML
+ }
+ end
+
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ example.run
+ end
end
it { is_expected.to be_valid }
@@ -1699,7 +1684,8 @@ module Gitlab
untracked: true,
key: 'key',
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
])
end
@@ -1723,7 +1709,8 @@ module Gitlab
untracked: true,
key: { files: ['file'] },
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
])
end
@@ -1749,14 +1736,16 @@ module Gitlab
untracked: true,
key: 'keya',
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
},
{
paths: ['logs/', 'binaries/'],
untracked: true,
key: 'key',
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
]
)
@@ -1783,7 +1772,8 @@ module Gitlab
untracked: true,
key: { files: ['file'] },
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
])
end
@@ -1808,7 +1798,8 @@ module Gitlab
untracked: true,
key: { files: ['file'], prefix: 'prefix' },
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
])
end
@@ -1831,7 +1822,8 @@ module Gitlab
untracked: false,
key: 'local',
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
])
end
end
diff --git a/spec/lib/gitlab/config/entry/validators_spec.rb b/spec/lib/gitlab/config/entry/validators_spec.rb
index 0458bcd6354..54a2adbefd2 100644
--- a/spec/lib/gitlab/config/entry/validators_spec.rb
+++ b/spec/lib/gitlab/config/entry/validators_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Config::Entry::Validators do
+RSpec.describe Gitlab::Config::Entry::Validators, feature_category: :pipeline_authoring do
let(:klass) do
Class.new do
include ActiveModel::Validations
@@ -40,4 +40,66 @@ RSpec.describe Gitlab::Config::Entry::Validators do
end
end
end
+
+ describe described_class::DisallowedKeysValidator do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:config, :disallowed_keys, :ignore_nil, :valid_result) do
+ { foo: '1' } | 'foo' | false | false
+ { foo: '1', bar: '2', baz: '3' } | 'foo, bar' | false | false
+ { baz: '1', qux: '2' } | '' | false | true
+ { foo: nil } | 'foo' | false | false
+ { foo: nil, bar: '2', baz: '3' } | 'foo, bar' | false | false
+ { foo: nil, bar: nil, baz: '3' } | 'foo, bar' | false | false
+ { baz: nil, qux: nil } | '' | false | true
+ { foo: '1' } | 'foo' | true | false
+ { foo: '1', bar: '2', baz: '3' } | 'foo, bar' | true | false
+ { baz: '1', qux: '2' } | '' | true | true
+ { foo: nil } | '' | true | true
+ { foo: nil, bar: '2', baz: '3' } | 'bar' | true | false
+ { foo: nil, bar: nil, baz: '3' } | '' | true | true
+ { baz: nil, qux: nil } | '' | true | true
+ end
+
+ with_them do
+ before do
+ klass.instance_variable_set(:@ignore_nil, ignore_nil)
+
+ klass.instance_eval do
+ validates :config, disallowed_keys: {
+ in: %i[foo bar],
+ ignore_nil: @ignore_nil # rubocop:disable RSpec/InstanceVariable
+ }
+ end
+
+ allow(instance).to receive(:config).and_return(config)
+ end
+
+ it 'validates the instance' do
+ expect(instance.valid?).to be(valid_result)
+
+ unless valid_result
+ expect(instance.errors.messages_for(:config)).to include "contains disallowed keys: #{disallowed_keys}"
+ end
+ end
+ end
+
+ context 'when custom message is provided' do
+ before do
+ klass.instance_eval do
+ validates :config, disallowed_keys: {
+ in: %i[foo bar],
+ message: 'custom message'
+ }
+ end
+
+ allow(instance).to receive(:config).and_return({ foo: '1' })
+ end
+
+ it 'returns the custom message when invalid' do
+ expect(instance).not_to be_valid
+ expect(instance.errors.messages_for(:config)).to include "custom message: foo"
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/counters/buffered_counter_spec.rb b/spec/lib/gitlab/counters/buffered_counter_spec.rb
index a1fd97768ea..2d5209161d9 100644
--- a/spec/lib/gitlab/counters/buffered_counter_spec.rb
+++ b/spec/lib/gitlab/counters/buffered_counter_spec.rb
@@ -7,7 +7,8 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
subject(:counter) { described_class.new(counter_record, attribute) }
- let(:counter_record) { create(:project_statistics) }
+ let_it_be(:counter_record) { create(:project_statistics) }
+
let(:attribute) { :build_artifacts_size }
describe '#get' do
@@ -25,42 +26,447 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
describe '#increment' do
- it 'sets a new key by the given value' do
- counter.increment(123)
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 123, ref: 1) }
+ let(:other_increment) { Gitlab::Counters::Increment.new(amount: 100, ref: 2) }
+
+ context 'when the counter is not undergoing refresh' do
+ it 'sets a new key by the given value' do
+ counter.increment(increment)
+
+ expect(counter.get).to eq(increment.amount)
+ end
+
+ it 'increments an existing key by the given value' do
+ counter.increment(other_increment)
+ counter.increment(increment)
+
+ expect(counter.get).to eq(other_increment.amount + increment.amount)
+ end
+
+ it 'returns the value of the key after the increment' do
+ counter.increment(increment)
+ result = counter.increment(other_increment)
+
+ expect(result).to eq(increment.amount + other_increment.amount)
+ end
+
+ it 'schedules a worker to commit the counter key into database' do
+ expect(FlushCounterIncrementsWorker).to receive(:perform_in)
+ .with(described_class::WORKER_DELAY, counter_record.class.to_s, counter_record.id, attribute)
+
+ counter.increment(increment)
+ end
+ end
+
+ context 'when the counter is undergoing refresh' do
+ let(:increment_1) { Gitlab::Counters::Increment.new(amount: 123, ref: 1) }
+ let(:decrement_1) { Gitlab::Counters::Increment.new(amount: -increment_1.amount, ref: increment_1.ref) }
+
+ let(:increment_2) { Gitlab::Counters::Increment.new(amount: 100, ref: 2) }
+ let(:decrement_2) { Gitlab::Counters::Increment.new(amount: -increment_2.amount, ref: increment_2.ref) }
+
+ before do
+ counter.initiate_refresh!
+ end
+
+ it 'does not increment the counter key' do
+ expect { counter.increment(increment) }.not_to change { counter.get }.from(0)
+ end
+
+ it 'increments the amount in the refresh key' do
+ counter.increment(increment)
+
+ expect(redis_get_key(counter.refresh_key).to_i).to eq(increment.amount)
+ end
+
+ it 'schedules a worker to commit the counter key into database' do
+ expect(FlushCounterIncrementsWorker).to receive(:perform_in)
+ .with(described_class::WORKER_DELAY, counter_record.class.to_s, counter_record.id, attribute)
+
+ counter.increment(increment)
+ end
+
+ shared_examples 'changing the counter refresh key by the given amount' do
+ it 'changes the refresh counter key by the given value' do
+ expect { counter.increment(increment) }
+ .to change { redis_get_key(counter.refresh_key).to_i }.by(increment.amount)
+ end
+
+ it 'returns the value of the key after the increment' do
+ expect(counter.increment(increment)).to eq(expected_counter_value)
+ end
+ end
+
+ shared_examples 'not changing the counter refresh key' do
+ it 'does not change the counter' do
+ expect { counter.increment(increment) }.not_to change { redis_get_key(counter.refresh_key).to_i }
+ end
+
+ it 'returns the unchanged value of the key' do
+ expect(counter.increment(increment)).to eq(expected_counter_value)
+ end
+ end
+
+ context 'when it is an increment (positive amount)' do
+ let(:increment) { increment_1 }
+
+ context 'when it is the first increment on the ref' do
+ let(:expected_counter_value) { increment.amount }
+
+ it_behaves_like 'changing the counter refresh key by the given amount'
+ end
+
+ context 'when it follows an existing increment on the same ref' do
+ before do
+ counter.increment(increment)
+ end
+
+ let(:expected_counter_value) { increment.amount }
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+
+ context 'when it follows an existing decrement on the same ref' do
+ before do
+ counter.increment(decrement_1)
+ end
+
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+
+ context 'when there has been an existing increment on another ref' do
+ before do
+ counter.increment(increment_2)
+ end
+
+ let(:expected_counter_value) { increment.amount + increment_2.amount }
+
+ it_behaves_like 'changing the counter refresh key by the given amount'
+ end
+
+ context 'when there has been an existing decrement on another ref' do
+ before do
+ counter.increment(decrement_2)
+ end
+
+ let(:expected_counter_value) { increment.amount }
+
+ it_behaves_like 'changing the counter refresh key by the given amount'
+ end
+ end
- expect(counter.get).to eq(123)
+ context 'when it is a decrement (negative amount)' do
+ let(:increment) { decrement_1 }
+
+ context 'when it is the first decrement on the same ref' do
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+
+ context 'when it follows an existing decrement on the ref' do
+ before do
+ counter.increment(decrement_1)
+ end
+
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+
+ context 'when it follows an existing increment on the ref' do
+ before do
+ counter.increment(increment_1)
+ end
+
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'changing the counter refresh key by the given amount'
+ end
+
+ context 'when there has been an existing increment on another ref' do
+ before do
+ counter.increment(increment_2)
+ end
+
+ let(:expected_counter_value) { increment_2.amount }
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+
+ context 'when there has been an existing decrement on another ref' do
+ before do
+ counter.increment(decrement_2)
+ end
+
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+ end
+
+ context 'when the amount is 0' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 0, ref: 1) }
+
+ context 'when it is the first increment on the ref' do
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+
+ context 'when it follows the another increment on the ref' do
+ let(:expected_counter_value) { 0 }
+
+ before do
+ counter.increment(increment)
+ end
+
+ it_behaves_like 'not changing the counter refresh key'
+ end
+ end
+
+ context 'when the ref is greater than 67108863 (8MB)' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 123, ref: 67108864) }
+
+ let(:increment_2) { Gitlab::Counters::Increment.new(amount: 123, ref: 267108863) }
+ let(:decrement_2) { Gitlab::Counters::Increment.new(amount: -increment_2.amount, ref: increment_2.ref) }
+
+ let(:expected_counter_value) { increment.amount }
+
+ it 'deduplicates increments correctly' do
+ counter.increment(decrement_2)
+ counter.increment(increment)
+ counter.increment(increment_2)
+
+ expect(redis_get_key(counter.refresh_key).to_i).to eq(increment.amount)
+ end
+ end
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(project_statistics_bulk_increment: false)
+ end
+
+ context 'when the counter is not undergoing refresh' do
+ it 'sets a new key by the given value' do
+ counter.increment(increment)
+
+ expect(counter.get).to eq(increment.amount)
+ end
+
+ it 'increments an existing key by the given value' do
+ counter.increment(other_increment)
+ counter.increment(increment)
+
+ expect(counter.get).to eq(other_increment.amount + increment.amount)
+ end
+ end
+
+ context 'when the counter is undergoing refresh' do
+ before do
+ counter.initiate_refresh!
+ end
+
+ context 'when it is a decrement (negative amount)' do
+ let(:decrement) { Gitlab::Counters::Increment.new(amount: -123, ref: 3) }
+
+ it 'immediately decrements the counter key to negative' do
+ counter.increment(decrement)
+
+ expect(counter.get).to eq(decrement.amount)
+ end
+ end
+ end
end
+ end
+
+ describe '#bulk_increment' do
+ let(:other_increment) { Gitlab::Counters::Increment.new(amount: 1) }
+ let(:increments) { [Gitlab::Counters::Increment.new(amount: 123), Gitlab::Counters::Increment.new(amount: 456)] }
- it 'increments an existing key by the given value' do
- counter.increment(100)
- counter.increment(123)
+ context 'when the counter is not undergoing refresh' do
+ it 'increments the key by the given values' do
+ counter.bulk_increment(increments)
+
+ expect(counter.get).to eq(increments.sum(&:amount))
+ end
+
+ it 'returns the value of the key after the increment' do
+ counter.increment(other_increment)
+
+ result = counter.bulk_increment(increments)
+
+ expect(result).to eq(other_increment.amount + increments.sum(&:amount))
+ end
- expect(counter.get).to eq(100 + 123)
+ it 'schedules a worker to commit the counter into database' do
+ expect(FlushCounterIncrementsWorker).to receive(:perform_in)
+ .with(described_class::WORKER_DELAY, counter_record.class.to_s, counter_record.id, attribute)
+
+ counter.bulk_increment(increments)
+ end
end
- it 'returns the new value' do
- counter.increment(123)
+ context 'when the counter is undergoing refresh' do
+ let(:increment_1) { Gitlab::Counters::Increment.new(amount: 123, ref: 1) }
+ let(:decrement_1) { Gitlab::Counters::Increment.new(amount: -increment_1.amount, ref: increment_1.ref) }
+
+ let(:increment_2) { Gitlab::Counters::Increment.new(amount: 100, ref: 2) }
+ let(:decrement_2) { Gitlab::Counters::Increment.new(amount: -increment_2.amount, ref: increment_2.ref) }
+
+ let(:increment_3) { Gitlab::Counters::Increment.new(amount: 100, ref: 3) }
+ let(:decrement_3) { Gitlab::Counters::Increment.new(amount: -increment_3.amount, ref: increment_3.ref) }
+
+ before do
+ counter.initiate_refresh!
+ end
+
+ shared_examples 'changing the counter refresh key by the expected amount' do
+ it 'changes the counter refresh key by the net change' do
+ expect { counter.bulk_increment(increments) }
+ .to change { redis_get_key(counter.refresh_key).to_i }.by(expected_change)
+ end
+
+ it 'returns the value of the key after the increment' do
+ expect(counter.bulk_increment(increments)).to eq(expected_counter_value)
+ end
+ end
+
+ context 'when there are 2 increments on different ref' do
+ let(:increments) { [increment_1, increment_2] }
+ let(:expected_change) { increments.sum(&:amount) }
+ let(:expected_counter_value) { increments.sum(&:amount) }
+
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+
+ context 'when there has been previous decrements' do
+ before do
+ counter.increment(decrement_1)
+ counter.increment(decrement_3)
+ end
+
+ let(:expected_change) { increment_2.amount }
+ let(:expected_counter_value) { increment_2.amount }
+
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+ end
+
+ context 'when one of the increment is repeated' do
+ before do
+ counter.increment(increment_2)
+ end
+
+ let(:expected_change) { increment_1.amount }
+ let(:expected_counter_value) { increment_2.amount + increment_1.amount }
- expect(counter.increment(23)).to eq(146)
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+ end
+ end
+
+ context 'when there are 2 decrements on different ref' do
+ let(:increments) { [decrement_1, decrement_2] }
+ let(:expected_change) { 0 }
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+
+ context 'when there has been previous increments' do
+ before do
+ counter.increment(increment_1)
+ counter.increment(increment_3)
+ end
+
+ let(:expected_change) { decrement_1.amount }
+ let(:expected_counter_value) { increment_3.amount }
+
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+ end
+ end
+
+ context 'when there is a mixture of increment and decrement on different refs' do
+ let(:increments) { [increment_1, decrement_2] }
+ let(:expected_change) { increment_1.amount }
+ let(:expected_counter_value) { increment_1.amount }
+
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+
+ context 'when the increment ref has been decremented' do
+ before do
+ counter.increment(decrement_1)
+ end
+
+ let(:expected_change) { 0 }
+ let(:expected_counter_value) { 0 }
+
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+ end
+
+ context 'when the decrement ref has been incremented' do
+ before do
+ counter.increment(increment_2)
+ end
+
+ let(:expected_change) { increments.sum(&:amount) }
+ let(:expected_counter_value) { increment_1.amount }
+
+ it_behaves_like 'changing the counter refresh key by the expected amount'
+ end
+ end
end
- it 'schedules a worker to commit the counter into database' do
- expect(FlushCounterIncrementsWorker).to receive(:perform_in)
- .with(described_class::WORKER_DELAY, counter_record.class.to_s, counter_record.id, attribute)
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(project_statistics_bulk_increment: false)
+ end
+
+ context 'when the counter is not undergoing refresh' do
+ it 'sets a new key by the given value' do
+ counter.bulk_increment(increments)
+
+ expect(counter.get).to eq(increments.sum(&:amount))
+ end
+
+ it 'increments an existing key by the given value' do
+ counter.increment(other_increment)
+
+ result = counter.bulk_increment(increments)
- counter.increment(123)
+ expect(result).to eq(other_increment.amount + increments.sum(&:amount))
+ end
+ end
+
+ context 'when the counter is undergoing refresh' do
+ before do
+ counter.initiate_refresh!
+ end
+
+ context 'when it is a decrement (negative amount)' do
+ let(:decrement) { Gitlab::Counters::Increment.new(amount: -123, ref: 3) }
+
+ it 'immediately decrements the counter key to negative' do
+ counter.bulk_increment([decrement])
+
+ expect(counter.get).to eq(decrement.amount)
+ end
+ end
+ end
end
end
- describe '#reset!' do
+ describe '#initiate_refresh!' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 123) }
+
before do
allow(counter_record).to receive(:update!)
- counter.increment(123)
+ counter.increment(increment)
end
it 'removes the key from Redis' do
- counter.reset!
+ counter.initiate_refresh!
Gitlab::Redis::SharedState.with do |redis|
expect(redis.exists?(counter.key)).to eq(false)
@@ -68,7 +474,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
it 'resets the counter to 0' do
- counter.reset!
+ counter.initiate_refresh!
expect(counter.get).to eq(0)
end
@@ -76,7 +482,91 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
it 'resets the record to 0' do
expect(counter_record).to receive(:update!).with(attribute => 0)
- counter.reset!
+ counter.initiate_refresh!
+ end
+
+ it 'sets a refresh indicator with a long expiry' do
+ counter.initiate_refresh!
+
+ expect(redis_exists_key(counter.refresh_indicator_key)).to eq(true)
+ expect(redis_key_ttl(counter.refresh_indicator_key)).to eq(described_class::REFRESH_KEYS_TTL)
+ end
+ end
+
+ describe '#finalize_refresh' do
+ before do
+ counter.initiate_refresh!
+ end
+
+ context 'with existing amount in refresh key' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 123, ref: 1) }
+ let(:other_increment) { Gitlab::Counters::Increment.new(amount: 100, ref: 2) }
+ let(:other_decrement) { Gitlab::Counters::Increment.new(amount: -100, ref: 2) }
+
+ before do
+ counter.bulk_increment([other_decrement, increment, other_increment])
+ end
+
+ it 'moves the deduplicated amount in the refresh key into the counter key' do
+ expect { counter.finalize_refresh }
+ .to change { counter.get }.by(increment.amount)
+ end
+
+ it 'removes the refresh counter key and the refresh indicator' do
+ expect { counter.finalize_refresh }
+ .to change { redis_exists_key(counter.refresh_key) }.from(true).to(false)
+ .and change { redis_exists_key(counter.refresh_indicator_key) }.from(true).to(false)
+ end
+
+ it 'schedules a worker to clean up the refresh tracking keys' do
+ expect(Counters::CleanupRefreshWorker).to receive(:perform_async)
+ .with(counter_record.class.to_s, counter_record.id, attribute)
+
+ counter.finalize_refresh
+ end
+ end
+
+ context 'without existing amount in refresh key' do
+ it 'does not change the counter key' do
+ expect { counter.finalize_refresh }.not_to change { counter.get }
+ end
+
+ it 'removes the refresh indicator key' do
+ expect { counter.finalize_refresh }
+ .to change { redis_exists_key(counter.refresh_indicator_key) }.from(true).to(false)
+ end
+
+ it 'schedules a worker to commit the counter key into database' do
+ expect(FlushCounterIncrementsWorker).to receive(:perform_in)
+ .with(described_class::WORKER_DELAY, counter_record.class.to_s, counter_record.id, attribute)
+
+ counter.finalize_refresh
+ end
+ end
+ end
+
+ describe '#cleanup_refresh' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 123, ref: 67108864) }
+ let(:increment_2) { Gitlab::Counters::Increment.new(amount: 123, ref: 267108864) }
+ let(:decrement_2) { Gitlab::Counters::Increment.new(amount: -increment_2.amount, ref: increment_2.ref) }
+ let(:increment_3) { Gitlab::Counters::Increment.new(amount: 123, ref: 534217728) }
+
+ before do
+ stub_const("#{described_class}::CLEANUP_BATCH_SIZE", 2)
+ stub_const("#{described_class}::CLEANUP_INTERVAL_SECONDS", 0.001)
+
+ counter.initiate_refresh!
+ counter.increment(decrement_2)
+ counter.increment(increment)
+ counter.increment(increment_2)
+ counter.finalize_refresh
+ end
+
+ it 'removes all tracking keys' do
+ Gitlab::Redis::SharedState.with do |redis|
+ expect { counter.cleanup_refresh }
+ .to change { redis.scan_each(match: "#{counter.refresh_key}*").to_a.count }.from(4).to(0)
+ end
end
end
@@ -88,7 +578,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
end
context 'when there is an amount to commit' do
- let(:increments) { [10, -3] }
+ let(:increments) { [10, -3].map { |amt| Gitlab::Counters::Increment.new(amount: amt) } }
before do
increments.each { |i| counter.increment(i) }
@@ -96,21 +586,11 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
it 'commits the increment into the database' do
expect { counter.commit_increment! }
- .to change { counter_record.reset.read_attribute(attribute) }.by(increments.sum)
+ .to change { counter_record.reset.read_attribute(attribute) }.by(increments.sum(&:amount))
end
it 'removes the increment entry from Redis' do
- Gitlab::Redis::SharedState.with do |redis|
- key_exists = redis.exists?(counter.key)
- expect(key_exists).to be_truthy
- end
-
- counter.commit_increment!
-
- Gitlab::Redis::SharedState.with do |redis|
- key_exists = redis.exists?(counter.key)
- expect(key_exists).to be_falsey
- end
+ expect { counter.commit_increment! }.to change { redis_exists_key(counter.key) }.from(true).to(false)
end
end
@@ -171,7 +651,7 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
context 'when there are increments to flush' do
before do
- counter.increment(10)
+ counter.increment(Gitlab::Counters::Increment.new(amount: 10))
end
it 'executes the callbacks' do
@@ -223,11 +703,27 @@ RSpec.describe Gitlab::Counters::BufferedCounter, :clean_gitlab_redis_shared_sta
it 'drops the increment key and creates the flushed key if it does not exist' do
counter.amount_to_be_flushed
- Gitlab::Redis::SharedState.with do |redis|
- expect(redis.exists?(increment_key)).to eq(false)
- expect(redis.exists?(flushed_key)).to eq(flushed_key_present)
- end
+ expect(redis_exists_key(increment_key)).to eq(false)
+ expect(redis_exists_key(flushed_key)).to eq(flushed_key_present)
end
end
end
+
+ def redis_get_key(key)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.get(key)
+ end
+ end
+
+ def redis_exists_key(key)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.exists?(key)
+ end
+ end
+
+ def redis_key_ttl(key)
+ Gitlab::Redis::SharedState.with do |redis|
+ redis.ttl(key)
+ end
+ end
end
diff --git a/spec/lib/gitlab/counters/legacy_counter_spec.rb b/spec/lib/gitlab/counters/legacy_counter_spec.rb
index e66b1ce08c4..9b0ffafff67 100644
--- a/spec/lib/gitlab/counters/legacy_counter_spec.rb
+++ b/spec/lib/gitlab/counters/legacy_counter_spec.rb
@@ -5,37 +5,50 @@ require 'spec_helper'
RSpec.describe Gitlab::Counters::LegacyCounter do
subject(:counter) { described_class.new(counter_record, attribute) }
- let(:counter_record) { create(:project_statistics) }
+ let_it_be(:counter_record, reload: true) { create(:project_statistics) }
+
let(:attribute) { :snippets_size }
- let(:amount) { 123 }
+
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 123) }
+ let(:other_increment) { Gitlab::Counters::Increment.new(amount: 100) }
describe '#increment' do
it 'increments the attribute in the counter record' do
- expect { counter.increment(amount) }.to change { counter_record.reload.method(attribute).call }.by(amount)
+ expect { counter.increment(increment) }
+ .to change { counter_record.reload.method(attribute).call }.by(increment.amount)
end
it 'returns the value after the increment' do
- counter.increment(100)
+ counter.increment(other_increment)
- expect(counter.increment(amount)).to eq(100 + amount)
+ expect(counter.increment(increment)).to eq(other_increment.amount + increment.amount)
end
it 'executes after counter_record after commit callback' do
expect(counter_record).to receive(:execute_after_commit_callbacks).and_call_original
- counter.increment(amount)
+ counter.increment(increment)
end
end
- describe '#reset!' do
- before do
- allow(counter_record).to receive(:update!)
+ describe '#bulk_increment' do
+ let(:increments) { [Gitlab::Counters::Increment.new(amount: 123), Gitlab::Counters::Increment.new(amount: 456)] }
+
+ it 'increments the attribute in the counter record' do
+ expect { counter.bulk_increment(increments) }
+ .to change { counter_record.reload.method(attribute).call }.by(increments.sum(&:amount))
+ end
+
+ it 'returns the value after the increment' do
+ counter.increment(other_increment)
+
+ expect(counter.bulk_increment(increments)).to eq(other_increment.amount + increments.sum(&:amount))
end
- it 'resets the record to 0' do
- expect(counter_record).to receive(:update!).with(attribute => 0)
+ it 'executes after counter_record after commit callback' do
+ expect(counter_record).to receive(:execute_after_commit_callbacks).and_call_original
- counter.reset!
+ counter.bulk_increment(increments)
end
end
end
diff --git a/spec/lib/gitlab/data_builder/build_spec.rb b/spec/lib/gitlab/data_builder/build_spec.rb
index 544b210651b..92fef93bddb 100644
--- a/spec/lib/gitlab/data_builder/build_spec.rb
+++ b/spec/lib/gitlab/data_builder/build_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
-RSpec.describe Gitlab::DataBuilder::Build do
+RSpec.describe Gitlab::DataBuilder::Build, feature_category: :integrations do
let_it_be(:runner) { create(:ci_runner, :instance, :tagged_only) }
let_it_be(:user) { create(:user, :public_email) }
- let_it_be(:ci_build) { create(:ci_build, :running, runner: runner, user: user) }
+ let_it_be(:pipeline) { create(:ci_pipeline, name: 'Build pipeline') }
+ let_it_be(:ci_build) { create(:ci_build, :running, pipeline: pipeline, runner: runner, user: user) }
describe '.build' do
around do |example|
@@ -33,6 +34,7 @@ RSpec.describe Gitlab::DataBuilder::Build do
it { expect(data[:project_name]).to eq(ci_build.project.full_name) }
it { expect(data[:pipeline_id]).to eq(ci_build.pipeline.id) }
it { expect(data[:retries_count]).to eq(ci_build.retries_count) }
+ it { expect(data[:commit][:name]).to eq(pipeline.name) }
it {
expect(data[:user]).to eq(
@@ -61,10 +63,10 @@ RSpec.describe Gitlab::DataBuilder::Build do
described_class.build(b) # Don't use ci_build variable here since it has all associations loaded into memory
end
- expect(control.count).to eq(13)
+ expect(control.count).to eq(14)
end
- context 'when feature flag is disabled' do
+ context 'when job_webhook_retries_count feature flag is disabled' do
before do
stub_feature_flags(job_webhook_retries_count: false)
end
@@ -79,7 +81,7 @@ RSpec.describe Gitlab::DataBuilder::Build do
described_class.build(b) # Don't use ci_build variable here since it has all associations loaded into memory
end
- expect(control.count).to eq(12)
+ expect(control.count).to eq(13)
end
end
diff --git a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
index 7ad3eb395a9..207aedd1a38 100644
--- a/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_creator_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
let(:connection) { model.connection }
let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/async_indexes/index_creator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_key) { "gitlab/database/indexing/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
around do |example|
@@ -39,7 +39,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexCreator do
it 'creates the index while controlling statement timeout' do
allow(connection).to receive(:execute).and_call_original
- expect(connection).to receive(:execute).with("SET statement_timeout TO '32400s'").ordered.and_call_original
+ expect(connection).to receive(:execute).with("SET statement_timeout TO '72000s'").ordered.and_call_original
expect(connection).to receive(:execute).with(async_index.definition).ordered.and_call_original
expect(connection).to receive(:execute).with("RESET statement_timeout").ordered.and_call_original
diff --git a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
index adb0f45706d..11039ad4f7e 100644
--- a/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
+++ b/spec/lib/gitlab/database/async_indexes/index_destructor_spec.rb
@@ -16,7 +16,7 @@ RSpec.describe Gitlab::Database::AsyncIndexes::IndexDestructor do
let(:connection) { model.connection }
let!(:lease) { stub_exclusive_lease(lease_key, :uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/async_indexes/index_destructor/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_key) { "gitlab/database/indexing/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
let(:lease_timeout) { described_class::TIMEOUT_PER_ACTION }
before do
diff --git a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
index 983f482d464..f3a292abbae 100644
--- a/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/batched_migration_wrapper_spec.rb
@@ -152,6 +152,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::BatchedMigrationWrapper, '
it 'runs the job with the correct arguments' do
expect(job_class).to receive(:new).with(no_args).and_return(job_instance)
+ expect(Gitlab::ApplicationContext).to receive(:push).with(feature_category: :database)
expect(job_instance).to receive(:perform).with(1, 10, 'events', 'id', 1, pause_ms, 'id', 'other_id')
perform
diff --git a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb b/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
index db4383a79d4..1c0f5a0c420 100644
--- a/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
+++ b/spec/lib/gitlab/database/background_migration/health_status/indicators/autovacuum_active_on_table_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::AutovacuumActiveOnTable do
+RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::AutovacuumActiveOnTable,
+ feature_category: :database do
include Database::DatabaseHelpers
let(:connection) { Gitlab::Database.database_base_models[:main].connection }
@@ -17,7 +18,7 @@ RSpec.describe Gitlab::Database::BackgroundMigration::HealthStatus::Indicators::
subject { described_class.new(context).evaluate }
before do
- swapout_view_for_table(:postgres_autovacuum_activity)
+ swapout_view_for_table(:postgres_autovacuum_activity, connection: connection)
end
let(:tables) { [table] }
diff --git a/spec/lib/gitlab/database/consistency_checker_spec.rb b/spec/lib/gitlab/database/consistency_checker_spec.rb
index 2ff79d20786..c0f0c349ddd 100644
--- a/spec/lib/gitlab/database/consistency_checker_spec.rb
+++ b/spec/lib/gitlab/database/consistency_checker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::ConsistencyChecker do
+RSpec.describe Gitlab::Database::ConsistencyChecker, feature_category: :pods do
let(:batch_size) { 10 }
let(:max_batches) { 4 }
let(:max_runtime) { described_class::MAX_RUNTIME }
diff --git a/spec/lib/gitlab/database/gitlab_schema_spec.rb b/spec/lib/gitlab/database/gitlab_schema_spec.rb
index 4b37cbda047..28a087d5401 100644
--- a/spec/lib/gitlab/database/gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/gitlab_schema_spec.rb
@@ -8,13 +8,40 @@ RSpec.shared_examples 'validate path globs' do |path_globs|
end
end
+RSpec.shared_examples 'validate schema data' do |tables_and_views|
+ it 'all tables and views have assigned a known gitlab_schema' do
+ expect(tables_and_views).to all(
+ match([be_a(String), be_in(Gitlab::Database.schemas_to_base_models.keys.map(&:to_sym))])
+ )
+ end
+end
+
RSpec.describe Gitlab::Database::GitlabSchema do
- describe '.views_and_tables_to_schema' do
- it 'all tables and views have assigned a known gitlab_schema' do
- expect(described_class.views_and_tables_to_schema).to all(
- match([be_a(String), be_in(Gitlab::Database.schemas_to_base_models.keys.map(&:to_sym))])
- )
+ shared_examples 'maps table name to table schema' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:name, :classification) do
+ 'ci_builds' | :gitlab_ci
+ 'my_schema.ci_builds' | :gitlab_ci
+ 'information_schema.columns' | :gitlab_internal
+ 'audit_events_part_5fc467ac26' | :gitlab_main
+ '_test_gitlab_main_table' | :gitlab_main
+ '_test_gitlab_ci_table' | :gitlab_ci
+ '_test_my_table' | :gitlab_shared
+ 'pg_attribute' | :gitlab_internal
+ end
+
+ with_them do
+ it { is_expected.to eq(classification) }
end
+ end
+
+ describe '.deleted_views_and_tables_to_schema' do
+ include_examples 'validate schema data', described_class.deleted_views_and_tables_to_schema
+ end
+
+ describe '.views_and_tables_to_schema' do
+ include_examples 'validate schema data', described_class.views_and_tables_to_schema
# This being run across different databases indirectly also tests
# a general consistency of structure across databases
@@ -55,6 +82,14 @@ RSpec.describe Gitlab::Database::GitlabSchema do
include_examples 'validate path globs', described_class.view_path_globs
end
+ describe '.deleted_tables_path_globs' do
+ include_examples 'validate path globs', described_class.deleted_tables_path_globs
+ end
+
+ describe '.deleted_views_path_globs' do
+ include_examples 'validate path globs', described_class.deleted_views_path_globs
+ end
+
describe '.tables_to_schema' do
let(:database_models) { Gitlab::Database.database_base_models.except(:geo) }
let(:views) { database_models.flat_map { |_, m| m.connection.views }.sort.uniq }
@@ -81,25 +116,85 @@ RSpec.describe Gitlab::Database::GitlabSchema do
end
end
+ describe '.table_schemas' do
+ let(:tables) { %w[users projects ci_builds] }
+
+ subject { described_class.table_schemas(tables) }
+
+ it 'returns the matched schemas' do
+ expect(subject).to match_array %i[gitlab_main gitlab_ci].to_set
+ end
+
+ context 'when one of the tables does not have a matching table schema' do
+ let(:tables) { %w[users projects unknown ci_builds] }
+
+ context 'and undefined parameter is false' do
+ subject { described_class.table_schemas(tables, undefined: false) }
+
+ it 'includes a nil value' do
+ is_expected.to match_array [:gitlab_main, nil, :gitlab_ci].to_set
+ end
+ end
+
+ context 'and undefined parameter is true' do
+ subject { described_class.table_schemas(tables, undefined: true) }
+
+ it 'includes "undefined_<table_name>"' do
+ is_expected.to match_array [:gitlab_main, :undefined_unknown, :gitlab_ci].to_set
+ end
+ end
+
+ context 'and undefined parameter is not specified' do
+ it 'includes a nil value' do
+ is_expected.to match_array [:gitlab_main, :undefined_unknown, :gitlab_ci].to_set
+ end
+ end
+ end
+ end
+
describe '.table_schema' do
- using RSpec::Parameterized::TableSyntax
+ subject { described_class.table_schema(name) }
- where(:name, :classification) do
- 'ci_builds' | :gitlab_ci
- 'my_schema.ci_builds' | :gitlab_ci
- 'information_schema.columns' | :gitlab_internal
- 'audit_events_part_5fc467ac26' | :gitlab_main
- '_test_gitlab_main_table' | :gitlab_main
- '_test_gitlab_ci_table' | :gitlab_ci
- '_test_my_table' | :gitlab_shared
- 'pg_attribute' | :gitlab_internal
- 'my_other_table' | :undefined_my_other_table
+ it_behaves_like 'maps table name to table schema'
+
+ context 'when mapping fails' do
+ let(:name) { 'unknown_table' }
+
+ context "and parameter 'undefined' is set to true" do
+ subject { described_class.table_schema(name, undefined: true) }
+
+ it { is_expected.to eq(:undefined_unknown_table) }
+ end
+
+ context "and parameter 'undefined' is set to false" do
+ subject { described_class.table_schema(name, undefined: false) }
+
+ it { is_expected.to be_nil }
+ end
+
+ context "and parameter 'undefined' is not set" do
+ subject { described_class.table_schema(name) }
+
+ it { is_expected.to eq(:undefined_unknown_table) }
+ end
end
+ end
- with_them do
- subject { described_class.table_schema(name) }
+ describe '.table_schema!' do
+ subject { described_class.table_schema!(name) }
- it { is_expected.to eq(classification) }
+ it_behaves_like 'maps table name to table schema'
+
+ context 'when mapping fails' do
+ let(:name) { 'non_existing_table' }
+
+ it "raises error" do
+ expect { subject }.to raise_error(
+ Gitlab::Database::GitlabSchema::UnknownSchemaError,
+ "Could not find gitlab schema for table #{name}: " \
+ "Any new tables must be added to the database dictionary"
+ )
+ end
end
end
end
diff --git a/spec/lib/gitlab/database/indexing_exclusive_lease_guard_spec.rb b/spec/lib/gitlab/database/indexing_exclusive_lease_guard_spec.rb
new file mode 100644
index 00000000000..ddc9cdee92f
--- /dev/null
+++ b/spec/lib/gitlab/database/indexing_exclusive_lease_guard_spec.rb
@@ -0,0 +1,40 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Database::IndexingExclusiveLeaseGuard, feature_category: :database do
+ let(:helper_class) do
+ Class.new do
+ include Gitlab::Database::IndexingExclusiveLeaseGuard
+
+ attr_reader :connection
+
+ def initialize(connection)
+ @connection = connection
+ end
+ end
+ end
+
+ describe '#lease_key' do
+ let(:helper) { helper_class.new(connection) }
+ let(:lease_key) { "gitlab/database/indexing/actions/#{database_name}" }
+
+ context 'with CI database connection' do
+ let(:connection) { Ci::ApplicationRecord.connection }
+ let(:database_name) { Gitlab::Database::CI_DATABASE_NAME }
+
+ before do
+ skip_if_multiple_databases_not_setup
+ end
+
+ it { expect(helper.lease_key).to eq(lease_key) }
+ end
+
+ context 'with MAIN database connection' do
+ let(:connection) { ApplicationRecord.connection }
+ let(:database_name) { Gitlab::Database::MAIN_DATABASE_NAME }
+
+ it { expect(helper.lease_key).to eq(lease_key) }
+ end
+ end
+end
diff --git a/spec/lib/gitlab/database/load_balancing/resolver_spec.rb b/spec/lib/gitlab/database/load_balancing/resolver_spec.rb
index 0051cf50255..4af36693383 100644
--- a/spec/lib/gitlab/database/load_balancing/resolver_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/resolver_spec.rb
@@ -2,15 +2,16 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::LoadBalancing::Resolver do
+RSpec.describe Gitlab::Database::LoadBalancing::Resolver, :freeze_time, feature_category: :database do
describe '#resolve' do
let(:ip_addr) { IPAddr.new('127.0.0.2') }
context 'when nameserver is an IP' do
it 'returns an IPAddr object' do
service = described_class.new('127.0.0.2')
+ response = service.resolve
- expect(service.resolve).to eq(ip_addr)
+ expect(response.address).to eq(ip_addr)
end
end
@@ -22,12 +23,14 @@ RSpec.describe Gitlab::Database::LoadBalancing::Resolver do
allow(instance).to receive(:getaddress).with('localhost').and_return('127.0.0.2')
end
- expect(subject).to eq(ip_addr)
+ expect(subject.address).to eq(ip_addr)
end
context 'when nameserver is not in the hosts file' do
+ let(:raw_ttl) { 10 }
+
it 'looks the nameserver up in DNS' do
- resource = double(:resource, address: ip_addr)
+ resource = double(:resource, address: ip_addr, ttl: raw_ttl)
packet = double(:packet, answer: [resource])
allow_next_instance_of(Resolv::Hosts) do |instance|
@@ -38,7 +41,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::Resolver do
.with('localhost', Net::DNS::A)
.and_return(packet)
- expect(subject).to eq(ip_addr)
+ expect(subject.address).to eq(ip_addr)
+ expect(subject.ttl).to eq(raw_ttl.seconds.from_now)
end
context 'when nameserver is not in DNS' do
diff --git a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
index 984d60e9962..bfd9c644ffa 100644
--- a/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
+++ b/spec/lib/gitlab/database/load_balancing/service_discovery_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
+RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery, feature_category: :database do
let(:load_balancer) do
configuration = Gitlab::Database::LoadBalancing::Configuration.new(ActiveRecord::Base)
configuration.service_discovery[:record] = 'localhost'
@@ -23,6 +23,8 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
resource = double(:resource, address: IPAddr.new('127.0.0.1'))
packet = double(:packet, answer: [resource])
+ service.instance_variable_set(:@nameserver_ttl, Gitlab::Database::LoadBalancing::Resolver::FAR_FUTURE_TTL)
+
allow(Net::DNS::Resolver).to receive(:start)
.with('localhost', Net::DNS::A)
.and_return(packet)
@@ -362,4 +364,52 @@ RSpec.describe Gitlab::Database::LoadBalancing::ServiceDiscovery do
expect(service.addresses_from_load_balancer).to eq(addresses)
end
end
+
+ describe '#resolver', :freeze_time do
+ context 'without predefined resolver' do
+ it 'fetches a new resolver and assigns it to the instance variable' do
+ expect(service.instance_variable_get(:@resolver)).not_to be_present
+
+ service_resolver = service.resolver
+
+ expect(service.instance_variable_get(:@resolver)).to be_present
+ expect(service_resolver).to be_present
+ end
+ end
+
+ context 'with predefined resolver' do
+ let(:resolver) do
+ Net::DNS::Resolver.new(
+ nameservers: 'localhost',
+ port: 8600
+ )
+ end
+
+ before do
+ service.instance_variable_set(:@resolver, resolver)
+ end
+
+ context "when nameserver's TTL is in the future" do
+ it 'returns the existing resolver' do
+ expect(service.resolver).to eq(resolver)
+ end
+ end
+
+ context "when nameserver's TTL is in the past" do
+ before do
+ service.instance_variable_set(
+ :@nameserver_ttl,
+ 1.minute.ago
+ )
+ end
+
+ it 'fetches new resolver' do
+ service_resolver = service.resolver
+
+ expect(service_resolver).to be_present
+ expect(service_resolver).not_to eq(resolver)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/lock_writes_manager_spec.rb b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
index 242b2040eaa..c06c463d918 100644
--- a/spec/lib/gitlab/database/lock_writes_manager_spec.rb
+++ b/spec/lib/gitlab/database/lock_writes_manager_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::LockWritesManager do
+RSpec.describe Gitlab::Database::LockWritesManager, :delete, feature_category: :pods do
let(:connection) { ApplicationRecord.connection }
let(:test_table) { '_test_table' }
let(:logger) { instance_double(Logger) }
@@ -13,12 +13,14 @@ RSpec.describe Gitlab::Database::LockWritesManager do
table_name: test_table,
connection: connection,
database_name: 'main',
+ with_retries: true,
logger: logger,
dry_run: dry_run
)
end
before do
+ allow(connection).to receive(:execute).and_call_original
allow(logger).to receive(:info)
connection.execute(<<~SQL)
@@ -29,20 +31,24 @@ RSpec.describe Gitlab::Database::LockWritesManager do
SQL
end
+ after do
+ ApplicationRecord.connection.execute("DROP TABLE IF EXISTS #{test_table}")
+ end
+
describe "#table_locked_for_writes?" do
it 'returns false for a table that is not locked for writes' do
- expect(subject.table_locked_for_writes?(test_table)).to eq(false)
+ expect(subject.table_locked_for_writes?).to eq(false)
end
it 'returns true for a table that is locked for writes' do
- expect { subject.lock_writes }.to change { subject.table_locked_for_writes?(test_table) }.from(false).to(true)
+ expect { subject.lock_writes }.to change { subject.table_locked_for_writes? }.from(false).to(true)
end
context 'for detached partition tables in another schema' do
let(:test_table) { 'gitlab_partitions_dynamic._test_table_20220101' }
it 'returns true for a table that is locked for writes' do
- expect { subject.lock_writes }.to change { subject.table_locked_for_writes?(test_table) }.from(false).to(true)
+ expect { subject.lock_writes }.to change { subject.table_locked_for_writes? }.from(false).to(true)
end
end
end
@@ -83,21 +89,19 @@ RSpec.describe Gitlab::Database::LockWritesManager do
it 'retries again if it receives a statement_timeout a few number of times' do
error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
call_count = 0
- allow(connection).to receive(:execute) do |statement|
- if statement.include?("CREATE TRIGGER")
- call_count += 1
- raise(ActiveRecord::QueryCanceled, error_message) if call_count.even?
- end
+ expect(connection).to receive(:execute).twice.with(/^CREATE TRIGGER gitlab_schema_write_trigger_for_/) do
+ call_count += 1
+ raise(ActiveRecord::QueryCanceled, error_message) if call_count.odd?
end
subject.lock_writes
+
+ expect(call_count).to eq(2) # The first call fails, the 2nd call succeeds
end
it 'raises the exception if it happened many times' do
error_message = "PG::QueryCanceled: ERROR: canceling statement due to statement timeout"
- allow(connection).to receive(:execute) do |statement|
- if statement.include?("CREATE TRIGGER")
- raise(ActiveRecord::QueryCanceled, error_message)
- end
+ allow(connection).to receive(:execute).with(/^CREATE TRIGGER gitlab_schema_write_trigger_for_/) do
+ raise(ActiveRecord::QueryCanceled, error_message)
end
expect do
@@ -152,6 +156,7 @@ RSpec.describe Gitlab::Database::LockWritesManager do
table_name: test_table,
connection: connection,
database_name: 'main',
+ with_retries: true,
logger: logger,
dry_run: false
).lock_writes
diff --git a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
index ff99f681b0c..3c2d9ca82f2 100644
--- a/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
+++ b/spec/lib/gitlab/database/loose_foreign_keys_spec.rb
@@ -112,4 +112,31 @@ RSpec.describe Gitlab::Database::LooseForeignKeys do
end
end
end
+
+ describe '.build_definition' do
+ context 'when child table schema is not defined' do
+ let(:loose_foreign_keys_yaml) do
+ {
+ 'ci_unknown_table' => [
+ {
+ 'table' => 'projects',
+ 'column' => 'project_id',
+ 'on_delete' => 'async_delete'
+ }
+ ]
+ }
+ end
+
+ subject { described_class.definitions }
+
+ before do
+ described_class.instance_variable_set(:@definitions, nil)
+ described_class.instance_variable_set(:@loose_foreign_keys_yaml, loose_foreign_keys_yaml)
+ end
+
+ it 'raises Gitlab::Database::GitlabSchema::UnknownSchemaError error' do
+ expect { subject }.to raise_error(Gitlab::Database::GitlabSchema::UnknownSchemaError)
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb
index 9fd49b312eb..089c7a779f2 100644
--- a/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/automatic_lock_writes_on_tables_spec.rb
@@ -3,27 +3,39 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
- :reestablished_active_record_base, query_analyzers: false do
+ :reestablished_active_record_base, :delete, query_analyzers: false, feature_category: :pods do
using RSpec::Parameterized::TableSyntax
let(:schema_class) { Class.new(Gitlab::Database::Migration[2.1]) }
+ let(:skip_automatic_lock_on_writes) { false }
let(:gitlab_main_table_name) { :_test_gitlab_main_table }
let(:gitlab_ci_table_name) { :_test_gitlab_ci_table }
let(:gitlab_geo_table_name) { :_test_gitlab_geo_table }
let(:gitlab_shared_table_name) { :_test_table }
+ let(:renamed_gitlab_main_table_name) { :_test_gitlab_main_new_table }
+ let(:renamed_gitlab_ci_table_name) { :_test_gitlab_ci_new_table }
+
before do
stub_feature_flags(automatic_lock_writes_on_table: true)
reconfigure_db_connection(model: ActiveRecord::Base, config_model: config_model)
end
+ # Drop the created test tables, because we use non-transactional tests
+ after do
+ drop_table_if_exists(gitlab_main_table_name)
+ drop_table_if_exists(gitlab_ci_table_name)
+ drop_table_if_exists(gitlab_geo_table_name)
+ drop_table_if_exists(gitlab_shared_table_name)
+ drop_table_if_exists(renamed_gitlab_main_table_name)
+ drop_table_if_exists(renamed_gitlab_ci_table_name)
+ end
+
shared_examples 'does not lock writes on table' do |config_model|
let(:config_model) { config_model }
it 'allows deleting records from the table' do
- allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
- expect(instance).not_to receive(:lock_writes)
- end
+ expect(Gitlab::Database::LockWritesManager).not_to receive(:new)
run_migration
@@ -37,9 +49,10 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
let(:config_model) { config_model }
it 'errors on deleting' do
- allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
+ expect_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
expect(instance).to receive(:lock_writes).and_call_original
end
+ expect(Gitlab::Database::WithLockRetries).not_to receive(:new)
run_migration
@@ -49,22 +62,35 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
end
end
- context 'when executing create_table migrations' do
- let(:create_gitlab_main_table_migration_class) { create_table_migration(gitlab_main_table_name) }
- let(:create_gitlab_ci_table_migration_class) { create_table_migration(gitlab_ci_table_name) }
- let(:create_gitlab_shared_table_migration_class) { create_table_migration(gitlab_shared_table_name) }
+ shared_examples 'locks writes on table using WithLockRetries' do |config_model|
+ let(:config_model) { config_model }
+
+ it 'locks the writes on the table using WithLockRetries' do
+ expect_next_instance_of(Gitlab::Database::WithLockRetries) do |instance|
+ expect(instance).to receive(:run).and_call_original
+ end
+ run_migration
+
+ expect do
+ migration_class.connection.execute("DELETE FROM #{table_name}")
+ end.to raise_error(ActiveRecord::StatementInvalid, /is write protected/)
+ end
+ end
+
+ context 'when executing create_table migrations' do
context 'when single database' do
let(:config_model) { Gitlab::Database.database_base_models[:main] }
+ let(:create_gitlab_main_table_migration_class) { create_table_migration(gitlab_main_table_name) }
+ let(:create_gitlab_ci_table_migration_class) { create_table_migration(gitlab_ci_table_name) }
+ let(:create_gitlab_shared_table_migration_class) { create_table_migration(gitlab_shared_table_name) }
before do
skip_if_multiple_databases_are_setup
end
it 'does not lock any newly created tables' do
- allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
- expect(instance).not_to receive(:lock_writes)
- end
+ expect(Gitlab::Database::LockWritesManager).not_to receive(:new)
create_gitlab_main_table_migration_class.migrate(:up)
create_gitlab_ci_table_migration_class.migrate(:up)
@@ -83,9 +109,12 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
skip_if_multiple_databases_not_setup
end
- let(:skip_automatic_lock_on_writes) { false }
let(:migration_class) { create_table_migration(table_name, skip_automatic_lock_on_writes) }
- let(:run_migration) { migration_class.migrate(:up) }
+ let(:run_migration) do
+ migration_class.connection.transaction do
+ migration_class.migrate(:up)
+ end
+ end
context 'for creating a gitlab_main table' do
let(:table_name) { gitlab_main_table_name }
@@ -95,7 +124,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
context 'when table listed as a deleted table' do
before do
- stub_const("Gitlab::Database::GitlabSchema::DELETED_TABLES", { table_name.to_s => :gitlab_main })
+ allow(Gitlab::Database::GitlabSchema).to receive(:deleted_tables_to_schema).and_return(
+ { table_name.to_s => :gitlab_main }
+ )
end
it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
@@ -107,6 +138,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
end
+ context 'when migration does not run within a transaction' do
+ let(:run_migration) do
+ migration_class.migrate(:up)
+ end
+
+ it_behaves_like 'locks writes on table using WithLockRetries', Gitlab::Database.database_base_models[:ci]
+ end
+
context 'when the SKIP_AUTOMATIC_LOCK_ON_WRITES feature flag is set' do
before do
stub_env('SKIP_AUTOMATIC_LOCK_ON_WRITES' => 'true')
@@ -132,7 +171,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
context 'when table listed as a deleted table' do
before do
- stub_const("Gitlab::Database::GitlabSchema::DELETED_TABLES", { table_name.to_s => :gitlab_ci })
+ allow(Gitlab::Database::GitlabSchema).to receive(:deleted_tables_to_schema).and_return(
+ { table_name.to_s => :gitlab_ci }
+ )
end
it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
@@ -202,11 +243,15 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
end
let(:migration_class) { rename_table_migration(old_table_name, table_name) }
- let(:run_migration) { migration_class.migrate(:up) }
+ let(:run_migration) do
+ migration_class.connection.transaction do
+ migration_class.migrate(:up)
+ end
+ end
context 'when a gitlab_main table' do
let(:old_table_name) { gitlab_main_table_name }
- let(:table_name) { :_test_gitlab_main_new_table }
+ let(:table_name) { renamed_gitlab_main_table_name }
let(:database_base_model) { Gitlab::Database.database_base_models[:main] }
it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:main]
@@ -215,7 +260,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
context 'when a gitlab_ci table' do
let(:old_table_name) { gitlab_ci_table_name }
- let(:table_name) { :_test_gitlab_ci_new_table }
+ let(:table_name) { renamed_gitlab_ci_table_name }
let(:database_base_model) { Gitlab::Database.database_base_models[:ci] }
it_behaves_like 'does not lock writes on table', Gitlab::Database.database_base_models[:ci]
@@ -236,9 +281,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
end
it 'does not lock any newly created tables' do
- allow_next_instance_of(Gitlab::Database::LockWritesManager) do |instance|
- expect(instance).not_to receive(:lock_writes)
- end
+ expect(Gitlab::Database::LockWritesManager).not_to receive(:new)
drop_gitlab_main_table_migration_class.connection.execute("CREATE TABLE #{gitlab_main_table_name}()")
drop_gitlab_ci_table_migration_class.connection.execute("CREATE TABLE #{gitlab_ci_table_name}()")
@@ -268,7 +311,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
end
let(:migration_class) { drop_table_migration(table_name) }
- let(:run_migration) { migration_class.migrate(:down) }
+ let(:run_migration) do
+ migration_class.connection.transaction do
+ migration_class.migrate(:down)
+ end
+ end
context 'for re-creating a gitlab_main table' do
let(:table_name) { gitlab_main_table_name }
@@ -293,14 +340,14 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
end
end
- def create_table_migration(table_name, skip_lock_on_writes = false)
+ def create_table_migration(table_name, skip_automatic_lock_on_writes = false)
migration_class = Class.new(schema_class) do
class << self; attr_accessor :table_name; end
def change
create_table self.class.table_name
end
end
- migration_class.skip_automatic_lock_on_writes = skip_lock_on_writes
+ migration_class.skip_automatic_lock_on_writes = skip_automatic_lock_on_writes
migration_class.tap { |klass| klass.table_name = table_name }
end
@@ -331,4 +378,11 @@ RSpec.describe Gitlab::Database::MigrationHelpers::AutomaticLockWritesOnTables,
def geo_configured?
!!ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: 'geo')
end
+
+ # To drop the test tables that have been created in the test migrations
+ def drop_table_if_exists(table_name)
+ Gitlab::Database.database_base_models.each_value do |model|
+ model.connection.execute("DROP TABLE IF EXISTS #{table_name}")
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
index e8045f5afec..714fbab5aff 100644
--- a/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers/restrict_gitlab_schema_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_analyzers: false, stub_feature_flags: false do
+RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_analyzers: false,
+ stub_feature_flags: false, feature_category: :pods do
let(:schema_class) { Class.new(Gitlab::Database::Migration[1.0]).include(described_class) }
# We keep only the GitlabSchemasValidateConnection analyzer running
@@ -125,8 +126,9 @@ RSpec.describe Gitlab::Database::MigrationHelpers::RestrictGitlabSchema, query_a
"does add index to ci_builds in gitlab_main and gitlab_ci" => {
migration: ->(klass) do
def change
- # Due to running in transactin we cannot use `add_concurrent_index`
- add_index :ci_builds, :tag, where: "type = 'Ci::Build'", name: 'index_ci_builds_on_tag_and_type_eq_ci_build'
+ # Due to running in transaction we cannot use `add_concurrent_index`
+ index_name = 'index_ci_builds_on_tag_and_type_eq_ci_build'
+ add_index :ci_builds, :tag, where: "type = 'Ci::Build'", name: index_name
end
end,
query_matcher: /CREATE INDEX/,
diff --git a/spec/lib/gitlab/database/migration_helpers_spec.rb b/spec/lib/gitlab/database/migration_helpers_spec.rb
index 30eeff31326..12fa115cc4e 100644
--- a/spec/lib/gitlab/database/migration_helpers_spec.rb
+++ b/spec/lib/gitlab/database/migration_helpers_spec.rb
@@ -743,6 +743,75 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
end
+ context 'ON UPDATE statements' do
+ context 'on_update: :nullify' do
+ it 'appends ON UPDATE SET NULL statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ expect(model).to receive(:execute).with(/ON UPDATE SET NULL/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id,
+ on_update: :nullify)
+ end
+ end
+
+ context 'on_update: :cascade' do
+ it 'appends ON UPDATE CASCADE statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ expect(model).to receive(:execute).with(/ON UPDATE CASCADE/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id,
+ on_update: :cascade)
+ end
+ end
+
+ context 'on_update: nil' do
+ it 'appends no ON UPDATE statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ expect(model).not_to receive(:execute).with(/ON UPDATE/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id,
+ on_update: nil)
+ end
+ end
+
+ context 'when on_update is not provided' do
+ it 'appends no ON UPDATE statement' do
+ expect(model).to receive(:with_lock_retries).and_call_original
+ expect(model).to receive(:disable_statement_timeout).and_call_original
+ expect(model).to receive(:statement_timeout_disabled?).and_return(false)
+ expect(model).to receive(:execute).with(/SET statement_timeout TO/)
+ expect(model).to receive(:execute).ordered.with(/VALIDATE CONSTRAINT/)
+ expect(model).to receive(:execute).ordered.with(/RESET statement_timeout/)
+
+ expect(model).not_to receive(:execute).with(/ON UPDATE/)
+
+ model.add_concurrent_foreign_key(:projects, :users,
+ column: :user_id)
+ end
+ end
+ end
+
context 'when no custom key name is supplied' do
it 'creates a concurrent foreign key and validates it' do
expect(model).to receive(:with_lock_retries).and_call_original
@@ -760,6 +829,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
name = model.concurrent_foreign_key_name(:projects, :user_id)
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
column: :user_id,
+ on_update: nil,
on_delete: :cascade,
name: name,
primary_key: :id).and_return(true)
@@ -792,6 +862,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model).to receive(:foreign_key_exists?).with(:projects, :users,
name: :foo,
primary_key: :id,
+ on_update: nil,
on_delete: :cascade,
column: :user_id).and_return(true)
@@ -861,6 +932,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
"ADD CONSTRAINT fk_multiple_columns\n" \
"FOREIGN KEY \(partition_number, user_id\)\n" \
"REFERENCES users \(partition_number, id\)\n" \
+ "ON UPDATE CASCADE\n" \
"ON DELETE CASCADE\n" \
"NOT VALID;\n"
)
@@ -871,7 +943,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
column: [:partition_number, :user_id],
target_column: [:partition_number, :id],
validate: false,
- name: :fk_multiple_columns
+ name: :fk_multiple_columns,
+ on_update: :cascade
)
end
@@ -883,6 +956,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
{
column: [:partition_number, :user_id],
name: :fk_multiple_columns,
+ on_update: :cascade,
on_delete: :cascade,
primary_key: [:partition_number, :id]
}
@@ -898,6 +972,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
:users,
column: [:partition_number, :user_id],
target_column: [:partition_number, :id],
+ on_update: :cascade,
validate: false,
name: :fk_multiple_columns
)
@@ -973,58 +1048,58 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
describe '#foreign_key_exists?' do
before do
- key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
- :projects, :users,
- {
- column: :non_standard_id,
- name: :fk_projects_users_non_standard_id,
- on_delete: :cascade,
- primary_key: :id
- }
- )
- allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
+ model.connection.execute(<<~SQL)
+ create table referenced (
+ id bigserial primary key not null
+ );
+ create table referencing (
+ id bigserial primary key not null,
+ non_standard_id bigint not null,
+ constraint fk_referenced foreign key (non_standard_id) references referenced(id) on delete cascade
+ );
+ SQL
end
shared_examples_for 'foreign key checks' do
it 'finds existing foreign keys by column' do
- expect(model.foreign_key_exists?(:projects, target_table, column: :non_standard_id)).to be_truthy
+ expect(model.foreign_key_exists?(:referencing, target_table, column: :non_standard_id)).to be_truthy
end
it 'finds existing foreign keys by name' do
- expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id)).to be_truthy
+ expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced)).to be_truthy
end
it 'finds existing foreign_keys by name and column' do
- expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id)).to be_truthy
+ expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id)).to be_truthy
end
it 'finds existing foreign_keys by name, column and on_delete' do
- expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id, on_delete: :cascade)).to be_truthy
+ expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :cascade)).to be_truthy
end
it 'finds existing foreign keys by target table only' do
- expect(model.foreign_key_exists?(:projects, target_table)).to be_truthy
+ expect(model.foreign_key_exists?(:referencing, target_table)).to be_truthy
end
it 'compares by column name if given' do
- expect(model.foreign_key_exists?(:projects, target_table, column: :user_id)).to be_falsey
+ expect(model.foreign_key_exists?(:referencing, target_table, column: :user_id)).to be_falsey
end
it 'compares by target column name if given' do
- expect(model.foreign_key_exists?(:projects, target_table, primary_key: :user_id)).to be_falsey
- expect(model.foreign_key_exists?(:projects, target_table, primary_key: :id)).to be_truthy
+ expect(model.foreign_key_exists?(:referencing, target_table, primary_key: :user_id)).to be_falsey
+ expect(model.foreign_key_exists?(:referencing, target_table, primary_key: :id)).to be_truthy
end
it 'compares by foreign key name if given' do
- expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name)).to be_falsey
+ expect(model.foreign_key_exists?(:referencing, target_table, name: :non_existent_foreign_key_name)).to be_falsey
end
it 'compares by foreign key name and column if given' do
- expect(model.foreign_key_exists?(:projects, target_table, name: :non_existent_foreign_key_name, column: :non_standard_id)).to be_falsey
+ expect(model.foreign_key_exists?(:referencing, target_table, name: :non_existent_foreign_key_name, column: :non_standard_id)).to be_falsey
end
it 'compares by foreign key name, column and on_delete if given' do
- expect(model.foreign_key_exists?(:projects, target_table, name: :fk_projects_users_non_standard_id, column: :non_standard_id, on_delete: :nullify)).to be_falsey
+ expect(model.foreign_key_exists?(:referencing, target_table, name: :fk_referenced, column: :non_standard_id, on_delete: :nullify)).to be_falsey
end
end
@@ -1035,7 +1110,7 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
end
context 'specifying a target table' do
- let(:target_table) { :users }
+ let(:target_table) { :referenced }
it_behaves_like 'foreign key checks'
end
@@ -1044,59 +1119,66 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
expect(model.foreign_key_exists?(:projects, :other_table)).to be_falsey
end
+ it 'raises an error if an invalid on_delete is specified' do
+ # The correct on_delete key is "nullify"
+ expect { model.foreign_key_exists?(:referenced, on_delete: :set_null) }.to raise_error(ArgumentError)
+ end
+
context 'with foreign key using multiple columns' do
before do
- key = ActiveRecord::ConnectionAdapters::ForeignKeyDefinition.new(
- :projects, :users,
- {
- column: [:partition_number, :id],
- name: :fk_projects_users_partition_number_id,
- on_delete: :cascade,
- primary_key: [:partition_number, :id]
- }
- )
- allow(model).to receive(:foreign_keys).with(:projects).and_return([key])
+ model.connection.execute(<<~SQL)
+ create table p_referenced (
+ id bigserial not null,
+ partition_number bigint not null default 100,
+ primary key (partition_number, id)
+ );
+ create table p_referencing (
+ id bigserial primary key not null,
+ partition_number bigint not null,
+ constraint fk_partitioning foreign key (partition_number, id) references p_referenced(partition_number, id) on delete cascade
+ );
+ SQL
end
it 'finds existing foreign keys by columns' do
- expect(model.foreign_key_exists?(:projects, :users, column: [:partition_number, :id])).to be_truthy
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, column: [:partition_number, :id])).to be_truthy
end
it 'finds existing foreign keys by name' do
- expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id)).to be_truthy
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning)).to be_truthy
end
it 'finds existing foreign_keys by name and column' do
- expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id])).to be_truthy
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id])).to be_truthy
end
it 'finds existing foreign_keys by name, column and on_delete' do
- expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id], on_delete: :cascade)).to be_truthy
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id], on_delete: :cascade)).to be_truthy
end
it 'finds existing foreign keys by target table only' do
- expect(model.foreign_key_exists?(:projects, :users)).to be_truthy
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced)).to be_truthy
end
it 'compares by column name if given' do
- expect(model.foreign_key_exists?(:projects, :users, column: :id)).to be_falsey
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, column: :id)).to be_falsey
end
it 'compares by target column name if given' do
- expect(model.foreign_key_exists?(:projects, :users, primary_key: :user_id)).to be_falsey
- expect(model.foreign_key_exists?(:projects, :users, primary_key: [:partition_number, :id])).to be_truthy
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, primary_key: :user_id)).to be_falsey
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, primary_key: [:partition_number, :id])).to be_truthy
end
it 'compares by foreign key name if given' do
- expect(model.foreign_key_exists?(:projects, :users, name: :non_existent_foreign_key_name)).to be_falsey
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :non_existent_foreign_key_name)).to be_falsey
end
it 'compares by foreign key name and column if given' do
- expect(model.foreign_key_exists?(:projects, :users, name: :non_existent_foreign_key_name, column: [:partition_number, :id])).to be_falsey
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :non_existent_foreign_key_name, column: [:partition_number, :id])).to be_falsey
end
it 'compares by foreign key name, column and on_delete if given' do
- expect(model.foreign_key_exists?(:projects, :users, name: :fk_projects_users_partition_number_id, column: [:partition_number, :id], on_delete: :nullify)).to be_falsey
+ expect(model.foreign_key_exists?(:p_referencing, :p_referenced, name: :fk_partitioning, column: [:partition_number, :id], on_delete: :nullify)).to be_falsey
end
end
end
@@ -1159,7 +1241,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
Gitlab::Database::LockWritesManager.new(
table_name: test_table,
connection: model.connection,
- database_name: 'main'
+ database_name: 'main',
+ with_retries: false
)
end
@@ -1340,7 +1423,8 @@ RSpec.describe Gitlab::Database::MigrationHelpers do
Gitlab::Database::LockWritesManager.new(
table_name: test_table,
connection: model.connection,
- database_name: 'main'
+ database_name: 'main',
+ with_retries: false
)
end
diff --git a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
index 3540a120b8f..b0bdbf5c371 100644
--- a/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
+++ b/spec/lib/gitlab/database/migrations/instrumentation_spec.rb
@@ -2,6 +2,8 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::Migrations::Instrumentation do
+ subject(:instrumentation) { described_class.new(result_dir: result_dir) }
+
let(:result_dir) { Dir.mktmpdir }
let(:connection) { ActiveRecord::Migration.connection }
@@ -9,17 +11,18 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
FileUtils.rm_rf(result_dir)
end
describe '#observe' do
- subject { described_class.new(result_dir: result_dir) }
-
def load_observation(result_dir, migration_name)
Gitlab::Json.parse(File.read(File.join(result_dir, migration_name, described_class::STATS_FILENAME)))
end
let(:migration_name) { 'test' }
let(:migration_version) { '12345' }
+ let(:migration_meta) { { 'max_batch_size' => 1, 'total_tuple_count' => 10, 'interval' => 60 } }
it 'executes the given block' do
- expect { |b| subject.observe(version: migration_version, name: migration_name, connection: connection, &b) }.to yield_control
+ expect do |b|
+ instrumentation.observe(version: migration_version, name: migration_name, connection: connection, meta: migration_meta, &b)
+ end.to yield_control
end
context 'behavior with observers' do
@@ -68,13 +71,17 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
end
context 'on successful execution' do
- subject { described_class.new(result_dir: result_dir).observe(version: migration_version, name: migration_name, connection: connection) {} }
+ subject do
+ instrumentation.observe(version: migration_version, name: migration_name,
+ connection: connection, meta: migration_meta) {}
+ end
it 'records a valid observation', :aggregate_failures do
expect(subject.walltime).not_to be_nil
expect(subject.success).to be_truthy
expect(subject.version).to eq(migration_version)
expect(subject.name).to eq(migration_name)
+ expect(subject.meta).to eq(migration_meta)
end
end
@@ -82,9 +89,10 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
where(exception: ['something went wrong', SystemStackError, Interrupt])
with_them do
- let(:instance) { described_class.new(result_dir: result_dir) }
-
- subject(:observe) { instance.observe(version: migration_version, name: migration_name, connection: connection) { raise exception } }
+ subject(:observe) do
+ instrumentation.observe(version: migration_version, name: migration_name,
+ connection: connection, meta: migration_meta) { raise exception }
+ end
it 'raises the exception' do
expect { observe }.to raise_error(exception)
@@ -106,14 +114,13 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
expect(subject['success']).to be_falsey
expect(subject['version']).to eq(migration_version)
expect(subject['name']).to eq(migration_name)
+ expect(subject['meta']).to include(migration_meta)
end
end
end
end
context 'sequence of migrations with failures' do
- subject { described_class.new(result_dir: result_dir) }
-
let(:migration1) { double('migration1', call: nil) }
let(:migration2) { double('migration2', call: nil) }
@@ -121,9 +128,9 @@ RSpec.describe Gitlab::Database::Migrations::Instrumentation do
let(:migration_version_2) { '98765' }
it 'records observations for all migrations' do
- subject.observe(version: migration_version, name: migration_name, connection: connection) {}
+ instrumentation.observe(version: migration_version, name: migration_name, connection: connection) {}
begin
- subject.observe(version: migration_version_2, name: migration_name_2, connection: connection) { raise 'something went wrong' }
+ instrumentation.observe(version: migration_version_2, name: migration_name_2, connection: connection) { raise 'something went wrong' }
rescue StandardError
nil
end
diff --git a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
index 73d69d55e5a..0b048617ce1 100644
--- a/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
+++ b/spec/lib/gitlab/database/migrations/test_batched_background_runner_spec.rb
@@ -69,12 +69,27 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
end
context 'running a real background migration' do
+ let(:interval) { 5.minutes }
+ let(:meta) { { "max_batch_size" => nil, "total_tuple_count" => nil, "interval" => interval } }
+
+ let(:params) do
+ {
+ version: nil,
+ connection: connection,
+ meta: {
+ interval: 300,
+ max_batch_size: nil,
+ total_tuple_count: nil
+ }
+ }
+ end
+
before do
queue_migration('CopyColumnUsingBackgroundMigrationJob',
table_name, :id,
:id, :data,
batch_size: 100,
- job_interval: 5.minutes) # job_interval is skipped when testing
+ job_interval: interval) # job_interval is skipped when testing
end
subject(:sample_migration) do
@@ -91,10 +106,9 @@ RSpec.describe Gitlab::Database::Migrations::TestBatchedBackgroundRunner, :freez
}.by_at_most(-1)
end
- it 'uses the correct connection to instrument the background migration' do
+ it 'uses the correct params to instrument the background migration' do
expect_next_instance_of(Gitlab::Database::Migrations::Instrumentation) do |instrumentation|
- expect(instrumentation).to receive(:observe).with(hash_including(connection: connection))
- .at_least(:once).and_call_original
+ expect(instrumentation).to receive(:observe).with(hash_including(params)).at_least(:once).and_call_original
end
subject
diff --git a/spec/lib/gitlab/database/partitioning_spec.rb b/spec/lib/gitlab/database/partitioning_spec.rb
index db5ca890155..855d0bc46a4 100644
--- a/spec/lib/gitlab/database/partitioning_spec.rb
+++ b/spec/lib/gitlab/database/partitioning_spec.rb
@@ -10,15 +10,15 @@ RSpec.describe Gitlab::Database::Partitioning do
around do |example|
previously_registered_models = described_class.registered_models.dup
- described_class.instance_variable_set('@registered_models', Set.new)
+ described_class.instance_variable_set(:@registered_models, Set.new)
previously_registered_tables = described_class.registered_tables.dup
- described_class.instance_variable_set('@registered_tables', Set.new)
+ described_class.instance_variable_set(:@registered_tables, Set.new)
example.run
- described_class.instance_variable_set('@registered_models', previously_registered_models)
- described_class.instance_variable_set('@registered_tables', previously_registered_tables)
+ described_class.instance_variable_set(:@registered_models, previously_registered_models)
+ described_class.instance_variable_set(:@registered_tables, previously_registered_tables)
end
describe '.register_models' do
diff --git a/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
index c1ac8f0c9cd..f24c4559349 100644
--- a/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
+++ b/spec/lib/gitlab/database/postgres_autovacuum_activity_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::PostgresAutovacuumActivity, type: :model do
+RSpec.describe Gitlab::Database::PostgresAutovacuumActivity, type: :model, feature_category: :database do
include Database::DatabaseHelpers
it { is_expected.to be_a Gitlab::Database::SharedModel }
@@ -13,7 +13,7 @@ RSpec.describe Gitlab::Database::PostgresAutovacuumActivity, type: :model do
let(:tables) { %w[foo test] }
before do
- swapout_view_for_table(:postgres_autovacuum_activity)
+ swapout_view_for_table(:postgres_autovacuum_activity, connection: ApplicationRecord.connection)
# unrelated
create(:postgres_autovacuum_activity, table: 'bar')
diff --git a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb
index b0e08ca1e67..a8dbc4be16f 100644
--- a/spec/lib/gitlab/database/postgres_foreign_key_spec.rb
+++ b/spec/lib/gitlab/database/postgres_foreign_key_spec.rb
@@ -2,28 +2,32 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model do
+RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model, feature_category: :database do
# PostgresForeignKey does not `behaves_like 'a postgres model'` because it does not correspond 1-1 with a single entry
# in pg_class
before do
- ActiveRecord::Base.connection.execute(<<~SQL)
- CREATE TABLE public.referenced_table (
- id bigserial primary key not null
- );
-
- CREATE TABLE public.other_referenced_table (
- id bigserial primary key not null
- );
-
- CREATE TABLE public.constrained_table (
- id bigserial primary key not null,
- referenced_table_id bigint not null,
- other_referenced_table_id bigint not null,
- CONSTRAINT fk_constrained_to_referenced FOREIGN KEY(referenced_table_id) REFERENCES referenced_table(id),
- CONSTRAINT fk_constrained_to_other_referenced FOREIGN KEY(other_referenced_table_id)
- REFERENCES other_referenced_table(id)
- );
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE TABLE public.referenced_table (
+ id bigserial primary key not null,
+ id_b bigserial not null,
+ UNIQUE (id, id_b)
+ );
+
+ CREATE TABLE public.other_referenced_table (
+ id bigserial primary key not null
+ );
+
+ CREATE TABLE public.constrained_table (
+ id bigserial primary key not null,
+ referenced_table_id bigint not null,
+ referenced_table_id_b bigint not null,
+ other_referenced_table_id bigint not null,
+ CONSTRAINT fk_constrained_to_referenced FOREIGN KEY(referenced_table_id, referenced_table_id_b) REFERENCES referenced_table(id, id_b) on delete restrict,
+ CONSTRAINT fk_constrained_to_other_referenced FOREIGN KEY(other_referenced_table_id)
+ REFERENCES other_referenced_table(id)
+ );
+
SQL
end
@@ -39,6 +43,14 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model do
end
end
+ describe '#by_referenced_table_name' do
+ it 'finds the foreign keys for the referenced table' do
+ expected = described_class.find_by!(name: 'fk_constrained_to_referenced')
+
+ expect(described_class.by_referenced_table_name('referenced_table')).to contain_exactly(expected)
+ end
+ end
+
describe '#by_constrained_table_identifier' do
it 'throws an error when the identifier name is not fully qualified' do
expect { described_class.by_constrained_table_identifier('constrained_table') }.to raise_error(ArgumentError, /not fully qualified/)
@@ -50,4 +62,147 @@ RSpec.describe Gitlab::Database::PostgresForeignKey, type: :model do
expect(described_class.by_constrained_table_identifier('public.constrained_table')).to match_array(expected)
end
end
+
+ describe '#by_constrained_table_name' do
+ it 'finds the foreign keys for the constrained table' do
+ expected = described_class.where(name: %w[fk_constrained_to_referenced fk_constrained_to_other_referenced]).to_a
+
+ expect(described_class.by_constrained_table_name('constrained_table')).to match_array(expected)
+ end
+ end
+
+ describe '#by_name' do
+ it 'finds foreign keys by name' do
+ expect(described_class.by_name('fk_constrained_to_referenced').pluck(:name)).to contain_exactly('fk_constrained_to_referenced')
+ end
+ end
+
+ context 'when finding columns for foreign keys' do
+ using RSpec::Parameterized::TableSyntax
+
+ let(:fks) { described_class.by_constrained_table_name('constrained_table') }
+
+ where(:fk, :expected_constrained, :expected_referenced) do
+ lazy { described_class.find_by(name: 'fk_constrained_to_referenced') } | %w[referenced_table_id referenced_table_id_b] | %w[id id_b]
+ lazy { described_class.find_by(name: 'fk_constrained_to_other_referenced') } | %w[other_referenced_table_id] | %w[id]
+ end
+
+ with_them do
+ it 'finds the correct constrained column names' do
+ expect(fk.constrained_columns).to eq(expected_constrained)
+ end
+
+ it 'finds the correct referenced column names' do
+ expect(fk.referenced_columns).to eq(expected_referenced)
+ end
+
+ describe '#by_constrained_columns' do
+ it 'finds the correct foreign key' do
+ expect(fks.by_constrained_columns(expected_constrained)).to contain_exactly(fk)
+ end
+ end
+
+ describe '#by_referenced_columns' do
+ it 'finds the correct foreign key' do
+ expect(fks.by_referenced_columns(expected_referenced)).to contain_exactly(fk)
+ end
+ end
+ end
+ end
+
+ describe '#on_delete_action' do
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ create table public.referenced_table_all_on_delete_actions (
+ id bigserial primary key not null
+ );
+
+ create table public.constrained_table_all_on_delete_actions (
+ id bigserial primary key not null,
+ ref_id_no_action bigint not null constraint fk_no_action references referenced_table_all_on_delete_actions(id),
+ ref_id_restrict bigint not null constraint fk_restrict references referenced_table_all_on_delete_actions(id) on delete restrict,
+ ref_id_nullify bigint not null constraint fk_nullify references referenced_table_all_on_delete_actions(id) on delete set null,
+ ref_id_cascade bigint not null constraint fk_cascade references referenced_table_all_on_delete_actions(id) on delete cascade,
+ ref_id_set_default bigint not null constraint fk_set_default references referenced_table_all_on_delete_actions(id) on delete set default
+ )
+ SQL
+ end
+
+ let(:fks) { described_class.by_constrained_table_name('constrained_table_all_on_delete_actions') }
+
+ context 'with an invalid on_delete_action' do
+ it 'raises an error' do
+ # the correct value is :nullify, not :set_null
+ expect { fks.by_on_delete_action(:set_null) }.to raise_error(ArgumentError)
+ end
+ end
+
+ where(:fk_name, :expected_on_delete_action) do
+ [
+ %w[fk_no_action no_action],
+ %w[fk_restrict restrict],
+ %w[fk_nullify nullify],
+ %w[fk_cascade cascade],
+ %w[fk_set_default set_default]
+ ]
+ end
+
+ with_them do
+ subject(:fk) { fks.find_by(name: fk_name) }
+
+ it 'has the appropriate on delete action' do
+ expect(fk.on_delete_action).to eq(expected_on_delete_action)
+ end
+
+ describe '#by_on_delete_action' do
+ it 'finds the key by on delete action' do
+ expect(fks.by_on_delete_action(expected_on_delete_action)).to contain_exactly(fk)
+ end
+ end
+ end
+ end
+
+ context 'when supporting foreign keys to inherited tables in postgres 12' do
+ before do
+ skip('not supported before postgres 12') if ApplicationRecord.database.version.to_f < 12
+
+ ApplicationRecord.connection.execute(<<~SQL)
+ create table public.parent (
+ id bigserial primary key not null
+ ) partition by hash(id);
+
+ create table public.child partition of parent for values with (modulus 2, remainder 1);
+
+ create table public.referencing_partitioned (
+ id bigserial not null primary key,
+ constraint fk_inherited foreign key (id) references parent(id)
+ )
+ SQL
+ end
+
+ describe '#is_inherited' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:fk, :inherited) do
+ lazy { described_class.find_by(name: 'fk_inherited') } | false
+ lazy { described_class.by_referenced_table_identifier('public.child').first! } | true
+ lazy { described_class.find_by(name: 'fk_constrained_to_referenced') } | false
+ end
+
+ with_them do
+ it 'has the appropriate inheritance value' do
+ expect(fk.is_inherited).to eq(inherited)
+ end
+ end
+ end
+
+ describe '#not_inherited' do
+ let(:fks) { described_class.by_constrained_table_identifier('public.referencing_partitioned') }
+
+ it 'lists all non-inherited foreign keys' do
+ expect(fks.pluck(:referenced_table_name)).to contain_exactly('parent', 'child')
+ expect(fks.not_inherited.pluck(:referenced_table_name)).to contain_exactly('parent')
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/query_analyzer_spec.rb b/spec/lib/gitlab/database/query_analyzer_spec.rb
index 6dc9ffc4aba..0b849063562 100644
--- a/spec/lib/gitlab/database/query_analyzer_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzer_spec.rb
@@ -10,7 +10,6 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
before do
allow(described_class.instance).to receive(:all_analyzers).and_return([analyzer, disabled_analyzer])
allow(analyzer).to receive(:enabled?).and_return(true)
- allow(analyzer).to receive(:raw?).and_return(false)
allow(analyzer).to receive(:suppressed?).and_return(false)
allow(analyzer).to receive(:begin!)
allow(analyzer).to receive(:end!)
@@ -182,13 +181,6 @@ RSpec.describe Gitlab::Database::QueryAnalyzer, query_analyzers: false do
expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error
end
- it 'does call analyze with raw sql when raw? is true' do
- expect(analyzer).to receive(:raw?).and_return(true)
- expect(analyzer).to receive(:analyze).with('SELECT 1 FROM projects')
-
- expect { process_sql("SELECT 1 FROM projects") }.not_to raise_error
- end
-
def process_sql(sql)
described_class.instance.within do
ApplicationRecord.load_balancer.read_write do |connection|
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
index 62c5ead855a..3a92f35d585 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_metrics_spec.rb
@@ -53,6 +53,14 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasMetrics, query_ana
gitlab_schemas: "gitlab_ci",
db_config_name: "ci"
}
+ },
+ "for query accessing gitlab_main and unknown schema" => {
+ model: ApplicationRecord,
+ sql: "SELECT 1 FROM projects LEFT JOIN not_in_schema ON not_in_schema.project_id=projects.id",
+ expectations: {
+ gitlab_schemas: "gitlab_main,undefined_not_in_schema",
+ db_config_name: "main"
+ }
}
}
end
diff --git a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
index ddf5793049d..47038bbd138 100644
--- a/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/gitlab_schemas_validate_connection_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection, query_analyzers: false do
+RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection, query_analyzers: false,
+ feature_category: :pods do
let(:analyzer) { described_class }
# We keep only the GitlabSchemasValidateConnection analyzer running
@@ -51,6 +52,12 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::GitlabSchemasValidateConnection
sql: "SELECT 1 FROM ci_builds",
expect_error: /The query tried to access \["ci_builds"\]/,
setup: -> (_) { skip_if_multiple_databases_not_setup }
+ },
+ "for query accessing unknown gitlab_schema" => {
+ model: ::ApplicationRecord,
+ sql: "SELECT 1 FROM new_table",
+ expect_error: /The query tried to access \["new_table"\] \(of undefined_new_table\)/,
+ setup: -> (_) { skip_if_multiple_databases_not_setup }
}
}
end
diff --git a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
index 22a70dc7df0..a4322689bf9 100644
--- a/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/prevent_cross_database_modification_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification, query_analyzers: false do
+RSpec.describe Gitlab::Database::QueryAnalyzers::PreventCrossDatabaseModification, query_analyzers: false,
+ feature_category: :pods do
let_it_be(:pipeline, refind: true) { create(:ci_pipeline) }
let_it_be(:project, refind: true) { create(:project) }
diff --git a/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb b/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
index bcc39c0c3db..22ff66ff55e 100644
--- a/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
+++ b/spec/lib/gitlab/database/query_analyzers/query_recorder_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::QueryAnalyzers::QueryRecorder, query_analyzers: false do
+RSpec.describe Gitlab::Database::QueryAnalyzers::QueryRecorder, feature_category: :database, query_analyzers: false do
# We keep only the QueryRecorder analyzer running
around do |example|
described_class.with_suppressed(false) do
@@ -11,7 +11,6 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::QueryRecorder, query_analyzers:
end
context 'with query analyzer' do
- let(:query) { 'SELECT 1 FROM projects' }
let(:log_path) { Rails.root.join(described_class::LOG_PATH) }
let(:log_file) { described_class.log_file }
@@ -20,14 +19,44 @@ RSpec.describe Gitlab::Database::QueryAnalyzers::QueryRecorder, query_analyzers:
end
shared_examples_for 'an enabled query recorder' do
- it 'logs queries to a file' do
- allow(FileUtils).to receive(:mkdir_p)
- .with(log_path)
- expect(File).to receive(:write)
- .with(log_file, /^{"sql":"#{query}/, mode: 'a')
- expect(described_class).to receive(:analyze).with(/^#{query}/).and_call_original
-
- expect { ApplicationRecord.connection.execute(query) }.not_to raise_error
+ using RSpec::Parameterized::TableSyntax
+
+ normalized_query = <<~SQL.strip.tr("\n", ' ')
+ SELECT \\\\"projects\\\\".\\\\"id\\\\"
+ FROM \\\\"projects\\\\"
+ WHERE \\\\"projects\\\\".\\\\"namespace_id\\\\" = \\?
+ AND \\\\"projects\\\\".\\\\"id\\\\" IN \\(\\?,\\?,\\?\\);
+ SQL
+
+ where(:list_parameter, :bind_parameters) do
+ '$2, $3' | [1, 2, 3]
+ '$2, $3, $4' | [1, 2, 3, 4]
+ '$2 ,$3 ,$4 ,$5' | [1, 2, 3, 4, 5]
+ '$2 , $3 , $4 , $5, $6' | [1, 2, 3, 4, 5, 6]
+ '$2, $3 ,$4 , $5,$6,$7' | [1, 2, 3, 4, 5, 6, 7]
+ '$2,$3,$4,$5,$6,$7,$8' | [1, 2, 3, 4, 5, 6, 7, 8]
+ end
+
+ with_them do
+ before do
+ allow(described_class).to receive(:analyze).and_call_original
+ allow(FileUtils).to receive(:mkdir_p)
+ .with(log_path)
+ end
+
+ it 'logs normalized queries to a file' do
+ expect(File).to receive(:write)
+ .with(log_file, /^{"normalized":"#{normalized_query}/, mode: 'a')
+
+ expect do
+ ApplicationRecord.connection.exec_query(<<~SQL.strip.tr("\n", ' '), 'SQL', bind_parameters)
+ SELECT "projects"."id"
+ FROM "projects"
+ WHERE "projects"."namespace_id" = $1
+ AND "projects"."id" IN (#{list_parameter});
+ SQL
+ end.not_to raise_error
+ end
end
end
diff --git a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
index bb91617714a..bf993e85cb8 100644
--- a/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/coordinator_spec.rb
@@ -2,16 +2,18 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Reindexing::Coordinator do
+RSpec.describe Gitlab::Database::Reindexing::Coordinator, feature_category: :database do
include Database::DatabaseHelpers
include ExclusiveLeaseHelpers
- let(:notifier) { instance_double(Gitlab::Database::Reindexing::GrafanaNotifier, notify_start: nil, notify_end: nil) }
let(:index) { create(:postgres_index) }
let(:connection) { index.connection }
+ let(:notifier) do
+ instance_double(Gitlab::Database::Reindexing::GrafanaNotifier, notify_start: nil, notify_end: nil)
+ end
let!(:lease) { stub_exclusive_lease(lease_key, uuid, timeout: lease_timeout) }
- let(:lease_key) { "gitlab/database/reindexing/coordinator/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
+ let(:lease_key) { "gitlab/database/indexing/actions/#{Gitlab::Database::PRIMARY_DATABASE_NAME}" }
let(:lease_timeout) { 1.day }
let(:uuid) { 'uuid' }
@@ -19,75 +21,83 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
model = Gitlab::Database.database_base_models[Gitlab::Database::PRIMARY_DATABASE_NAME]
Gitlab::Database::SharedModel.using_connection(model.connection) do
+ swapout_view_for_table(:postgres_indexes, connection: model.connection)
example.run
end
end
- before do
- swapout_view_for_table(:postgres_indexes)
- end
-
describe '#perform' do
subject { described_class.new(index, notifier).perform }
let(:reindexer) { instance_double(Gitlab::Database::Reindexing::ReindexConcurrently, perform: nil) }
let(:action) { create(:reindex_action, index: index) }
- before do
- allow(Gitlab::Database::Reindexing::ReindexConcurrently).to receive(:new).with(index).and_return(reindexer)
- allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
- end
+ context 'when executed during the weekend', time_travel_to: '2023-01-07T09:44:07Z' do
+ before do
+ allow(Gitlab::Database::Reindexing::ReindexConcurrently).to receive(:new).with(index).and_return(reindexer)
+ allow(Gitlab::Database::Reindexing::ReindexAction).to receive(:create_for).with(index).and_return(action)
+ end
- context 'locking' do
- it 'acquires a lock while reindexing' do
- expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
+ context 'locking' do
+ it 'acquires a lock while reindexing' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
- expect(reindexer).to receive(:perform).ordered
+ expect(reindexer).to receive(:perform).ordered
- expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
+ expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
- subject
- end
+ subject
+ end
- it 'does not perform reindexing actions if lease is not granted' do
- expect(lease).to receive(:try_obtain).ordered.and_return(false)
- expect(Gitlab::Database::Reindexing::ReindexConcurrently).not_to receive(:new)
+ it 'does not perform reindexing actions if lease is not granted' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ expect(Gitlab::Database::Reindexing::ReindexConcurrently).not_to receive(:new)
- subject
+ subject
+ end
end
- end
- context 'notifications' do
- it 'sends #notify_start before reindexing' do
- expect(notifier).to receive(:notify_start).with(action).ordered
- expect(reindexer).to receive(:perform).ordered
+ context 'notifications' do
+ it 'sends #notify_start before reindexing' do
+ expect(notifier).to receive(:notify_start).with(action).ordered
+ expect(reindexer).to receive(:perform).ordered
- subject
- end
+ subject
+ end
- it 'sends #notify_end after reindexing and updating the action is done' do
- expect(action).to receive(:finish).ordered
- expect(notifier).to receive(:notify_end).with(action).ordered
+ it 'sends #notify_end after reindexing and updating the action is done' do
+ expect(action).to receive(:finish).ordered
+ expect(notifier).to receive(:notify_end).with(action).ordered
- subject
+ subject
+ end
end
- end
- context 'action tracking' do
- it 'calls #finish on the action' do
- expect(reindexer).to receive(:perform).ordered
- expect(action).to receive(:finish).ordered
+ context 'action tracking' do
+ it 'calls #finish on the action' do
+ expect(reindexer).to receive(:perform).ordered
+ expect(action).to receive(:finish).ordered
- subject
- end
+ subject
+ end
- it 'upon error, it still calls finish and raises the error' do
- expect(reindexer).to receive(:perform).ordered.and_raise('something went wrong')
- expect(action).to receive(:finish).ordered
+ it 'upon error, it still calls finish and raises the error' do
+ expect(reindexer).to receive(:perform).ordered.and_raise('something went wrong')
+ expect(action).to receive(:finish).ordered
- expect { subject }.to raise_error(/something went wrong/)
+ expect { subject }.to raise_error(/something went wrong/)
- expect(action).to be_failed
+ expect(action).to be_failed
+ end
+ end
+ end
+
+ context 'when executed during the week', time_travel_to: '2023-01-09T09:44:07Z' do
+ it 'does not start reindexing' do
+ expect(lease).not_to receive(:try_obtain)
+ expect(Gitlab::Database::Reindexing::ReindexConcurrently).not_to receive(:new)
+
+ expect(subject).to be_nil
end
end
end
@@ -97,33 +107,45 @@ RSpec.describe Gitlab::Database::Reindexing::Coordinator do
subject(:drop) { described_class.new(index, notifier).drop }
- context 'when exclusive lease is granted' do
- it 'drops the index with lock retries' do
- expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
+ context 'when executed during the weekend', time_travel_to: '2023-01-07T09:44:07Z' do
+ context 'when exclusive lease is granted' do
+ it 'drops the index with lock retries' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(uuid)
+
+ expect_query("SET lock_timeout TO '60000ms'")
+ expect_query("DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"#{index.name}\"")
+ expect_query("RESET idle_in_transaction_session_timeout; RESET lock_timeout")
- expect_query("SET lock_timeout TO '60000ms'")
- expect_query("DROP INDEX CONCURRENTLY IF EXISTS \"public\".\"#{index.name}\"")
- expect_query("RESET idle_in_transaction_session_timeout; RESET lock_timeout")
+ expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
- expect(Gitlab::ExclusiveLease).to receive(:cancel).ordered.with(lease_key, uuid)
+ drop
+ end
- drop
+ def expect_query(sql)
+ expect(connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql|
+ method.call(sql.sub(/CONCURRENTLY/, ''))
+ end
+ end
end
- def expect_query(sql)
- expect(connection).to receive(:execute).ordered.with(sql).and_wrap_original do |method, sql|
- method.call(sql.sub(/CONCURRENTLY/, ''))
+ context 'when exclusive lease is not granted' do
+ it 'does not drop the index' do
+ expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ expect(Gitlab::Database::WithLockRetriesOutsideTransaction).not_to receive(:new)
+ expect(connection).not_to receive(:execute)
+
+ drop
end
end
end
- context 'when exclusive lease is not granted' do
- it 'does not drop the index' do
- expect(lease).to receive(:try_obtain).ordered.and_return(false)
+ context 'when executed during the week', time_travel_to: '2023-01-09T09:44:07Z' do
+ it 'does not start reindexing' do
+ expect(lease).not_to receive(:try_obtain)
expect(Gitlab::Database::WithLockRetriesOutsideTransaction).not_to receive(:new)
expect(connection).not_to receive(:execute)
- drop
+ expect(drop).to be_nil
end
end
end
diff --git a/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
index 1bccdda3be1..e67c97cbf9c 100644
--- a/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/grafana_notifier_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do
let(:action) { create(:reindex_action) }
before do
- swapout_view_for_table(:postgres_indexes)
+ swapout_view_for_table(:postgres_indexes, connection: ApplicationRecord.connection)
end
let(:headers) do
@@ -25,7 +25,9 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do
let(:response) { double('response', success?: true) }
def expect_api_call(payload)
- expect(Gitlab::HTTP).to receive(:post).with("#{api_url}/api/annotations", body: payload.to_json, headers: headers, allow_local_requests: true).and_return(response)
+ expect(Gitlab::HTTP).to receive(:post).with(
+ "#{api_url}/api/annotations", body: payload.to_json, headers: headers, allow_local_requests: true
+ ).and_return(response)
end
shared_examples_for 'interacting with Grafana annotations API' do
@@ -109,7 +111,9 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do
end
context 'additional tag is provided' do
- subject { described_class.new(api_key: api_key, api_url: api_url, additional_tag: additional_tag).notify_start(action) }
+ subject do
+ described_class.new(api_key: api_key, api_url: api_url, additional_tag: additional_tag).notify_start(action)
+ end
let(:payload) do
{
@@ -163,7 +167,9 @@ RSpec.describe Gitlab::Database::Reindexing::GrafanaNotifier do
end
context 'additional tag is provided' do
- subject { described_class.new(api_key: api_key, api_url: api_url, additional_tag: additional_tag).notify_end(action) }
+ subject do
+ described_class.new(api_key: api_key, api_url: api_url, additional_tag: additional_tag).notify_end(action)
+ end
let(:payload) do
{
diff --git a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
index 2ae9037959d..e82a2ab467d 100644
--- a/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/index_selection_spec.rb
@@ -2,14 +2,16 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
+RSpec.describe Gitlab::Database::Reindexing::IndexSelection, feature_category: :database do
include Database::DatabaseHelpers
subject { described_class.new(Gitlab::Database::PostgresIndex.all).to_a }
+ let(:connection) { ApplicationRecord.connection }
+
before do
- swapout_view_for_table(:postgres_index_bloat_estimates)
- swapout_view_for_table(:postgres_indexes)
+ swapout_view_for_table(:postgres_index_bloat_estimates, connection: connection)
+ swapout_view_for_table(:postgres_indexes, connection: connection)
create_list(:postgres_index, 10, ondisk_size_bytes: 10.gigabytes).each_with_index do |index, i|
create(:postgres_index_bloat_estimate, index: index, bloat_size_bytes: 2.gigabyte * (i + 1))
@@ -17,7 +19,7 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
end
def execute(sql)
- ActiveRecord::Base.connection.execute(sql)
+ connection.execute(sql)
end
it 'orders by highest relative bloat first' do
@@ -74,4 +76,30 @@ RSpec.describe Gitlab::Database::Reindexing::IndexSelection do
expect(subject.map(&:name).sort).to eq(not_recently_reindexed.map(&:name).sort)
end
end
+
+ context 'with restricted tables' do
+ let!(:ci_builds) do
+ create(
+ :postgres_index_bloat_estimate,
+ index: create(:postgres_index, ondisk_size_bytes: 100.gigabytes, tablename: 'ci_builds'),
+ bloat_size_bytes: 20.gigabyte
+ )
+ end
+
+ context 'when executed on Fridays', time_travel_to: '2022-12-16T09:44:07Z' do
+ it { expect(subject).not_to include(ci_builds.index) }
+ end
+
+ context 'when executed on Saturdays', time_travel_to: '2022-12-17T09:44:07Z' do
+ it { expect(subject).to include(ci_builds.index) }
+ end
+
+ context 'when executed on Sundays', time_travel_to: '2022-12-18T09:44:07Z' do
+ it { expect(subject).not_to include(ci_builds.index) }
+ end
+
+ context 'when executed on Mondays', time_travel_to: '2022-12-19T09:44:07Z' do
+ it { expect(subject).not_to include(ci_builds.index) }
+ end
+ end
end
diff --git a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
index 1b409924acc..06b89e08737 100644
--- a/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
+++ b/spec/lib/gitlab/database/reindexing/reindex_action_spec.rb
@@ -2,13 +2,13 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Reindexing::ReindexAction do
+RSpec.describe Gitlab::Database::Reindexing::ReindexAction, feature_category: :database do
include Database::DatabaseHelpers
let(:index) { create(:postgres_index) }
before_all do
- swapout_view_for_table(:postgres_indexes)
+ swapout_view_for_table(:postgres_indexes, connection: ApplicationRecord.connection)
end
it { is_expected.to be_a Gitlab::Database::SharedModel }
diff --git a/spec/lib/gitlab/database/reindexing_spec.rb b/spec/lib/gitlab/database/reindexing_spec.rb
index fa26aa59329..6575c92e313 100644
--- a/spec/lib/gitlab/database/reindexing_spec.rb
+++ b/spec/lib/gitlab/database/reindexing_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Database::Reindexing, feature_category: :database do
+RSpec.describe Gitlab::Database::Reindexing, feature_category: :database, time_travel_to: '2023-01-07T09:44:07Z' do
include ExclusiveLeaseHelpers
include Database::DatabaseHelpers
@@ -76,7 +76,7 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database do
let(:limit) { 5 }
before_all do
- swapout_view_for_table(:postgres_indexes)
+ swapout_view_for_table(:postgres_indexes, connection: ApplicationRecord.connection)
end
before do
@@ -147,7 +147,7 @@ RSpec.describe Gitlab::Database::Reindexing, feature_category: :database do
subject { described_class.perform_from_queue(maximum_records: limit) }
before_all do
- swapout_view_for_table(:postgres_indexes)
+ swapout_view_for_table(:postgres_indexes, connection: ApplicationRecord.connection)
end
let(:limit) { 2 }
diff --git a/spec/lib/gitlab/database/tables_truncate_spec.rb b/spec/lib/gitlab/database/tables_truncate_spec.rb
index 4d04bd67a1e..9af0b964221 100644
--- a/spec/lib/gitlab/database/tables_truncate_spec.rb
+++ b/spec/lib/gitlab/database/tables_truncate_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_base,
- :suppress_gitlab_schemas_validate_connection do
+ :suppress_gitlab_schemas_validate_connection, feature_category: :pods do
include MigrationsHelpers
let(:min_batch_size) { 1 }
@@ -18,7 +18,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
let(:main_db_shared_item_model) { table("_test_gitlab_shared_items", database: "main") }
let(:main_db_partitioned_item) { table("_test_gitlab_hook_logs", database: "main") }
let(:main_db_partitioned_item_detached) do
- table("gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101", database: "main")
+ table("gitlab_partitions_dynamic._test_gitlab_hook_logs_202201", database: "main")
end
# CI Database
@@ -29,7 +29,7 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
let(:ci_db_shared_item_model) { table("_test_gitlab_shared_items", database: "ci") }
let(:ci_db_partitioned_item) { table("_test_gitlab_hook_logs", database: "ci") }
let(:ci_db_partitioned_item_detached) do
- table("gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101", database: "ci")
+ table("gitlab_partitions_dynamic._test_gitlab_hook_logs_202201", database: "ci")
end
shared_examples 'truncating legacy tables on a database' do
@@ -64,19 +64,19 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
id bigserial not null,
created_at timestamptz not null,
item_id BIGINT NOT NULL,
- primary key (id, created_at),
+ PRIMARY KEY (id, created_at),
CONSTRAINT fk_constrained_1 FOREIGN KEY(item_id) REFERENCES _test_gitlab_main_items(id)
) PARTITION BY RANGE(created_at);
- CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202201
PARTITION OF _test_gitlab_hook_logs
FOR VALUES FROM ('20220101') TO ('20220131');
- CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_20220201
+ CREATE TABLE gitlab_partitions_dynamic._test_gitlab_hook_logs_202202
PARTITION OF _test_gitlab_hook_logs
FOR VALUES FROM ('20220201') TO ('20220228');
- ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_20220101;
+ ALTER TABLE _test_gitlab_hook_logs DETACH PARTITION gitlab_partitions_dynamic._test_gitlab_hook_logs_202201;
SQL
main_connection.execute(main_tables_sql)
@@ -124,14 +124,14 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
Gitlab::Database::SharedModel.using_connection(main_connection) do
Postgresql::DetachedPartition.create!(
- table_name: '_test_gitlab_hook_logs_20220101',
+ table_name: '_test_gitlab_hook_logs_202201',
drop_after: Time.current
)
end
Gitlab::Database::SharedModel.using_connection(ci_connection) do
Postgresql::DetachedPartition.create!(
- table_name: '_test_gitlab_hook_logs_20220101',
+ table_name: '_test_gitlab_hook_logs_202201',
drop_after: Time.current
)
end
@@ -176,7 +176,8 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
Gitlab::Database::LockWritesManager.new(
table_name: table,
connection: connection,
- database_name: connection.pool.db_config.name
+ database_name: connection.pool.db_config.name,
+ with_retries: false
).lock_writes
end
end
@@ -236,6 +237,25 @@ RSpec.describe Gitlab::Database::TablesTruncate, :reestablished_active_record_ba
end
end
+ context 'when one of the attached partitions happened to be locked for writes' do
+ before do
+ skip if connection.pool.db_config.name != 'ci'
+
+ Gitlab::Database::LockWritesManager.new(
+ table_name: "#{Gitlab::Database::DYNAMIC_PARTITIONS_SCHEMA}._test_gitlab_hook_logs_202202",
+ connection: connection,
+ database_name: connection.pool.db_config.name,
+ with_retries: false
+ ).lock_writes
+ end
+
+ it 'truncates the locked partition successfully' do
+ expect do
+ truncate_legacy_tables
+ end.to change { ci_db_partitioned_item.count }.from(5).to(0)
+ end
+ end
+
context 'with geo configured' do
let(:geo_connection) { Gitlab::Database.database_base_models[:geo].connection }
diff --git a/spec/lib/gitlab/database_spec.rb b/spec/lib/gitlab/database_spec.rb
index 1a482b33a92..86bc8e71fd7 100644
--- a/spec/lib/gitlab/database_spec.rb
+++ b/spec/lib/gitlab/database_spec.rb
@@ -302,6 +302,26 @@ RSpec.describe Gitlab::Database do
end
end
+ describe '.database_base_models_with_gitlab_shared' do
+ before do
+ Gitlab::Database.instance_variable_set(:@database_base_models_with_gitlab_shared, nil)
+ end
+
+ it 'memoizes the models' do
+ expect { Gitlab::Database.database_base_models_with_gitlab_shared }.to change { Gitlab::Database.instance_variable_get(:@database_base_models_with_gitlab_shared) }.from(nil)
+ end
+ end
+
+ describe '.database_base_models_using_load_balancing' do
+ before do
+ Gitlab::Database.instance_variable_set(:@database_base_models_using_load_balancing, nil)
+ end
+
+ it 'memoizes the models' do
+ expect { Gitlab::Database.database_base_models_using_load_balancing }.to change { Gitlab::Database.instance_variable_get(:@database_base_models_using_load_balancing) }.from(nil)
+ end
+ end
+
describe '#true_value' do
it 'returns correct value' do
expect(described_class.true_value).to eq "'t'"
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb
index 51bee6d45e4..861852d8f0b 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_base_spec.rb
@@ -26,6 +26,17 @@ RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBase do
end
end
+ describe '#diff_files' do
+ subject(:diff_files) { described_class.new(diffable, diff_options: nil).diff_files }
+
+ it 'measures diffs_highlight_cache_decorate' do
+ allow(Gitlab::Metrics).to receive(:measure).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_highlight_cache_decorate).and_call_original
+
+ diff_files
+ end
+ end
+
describe '#cache_key' do
subject(:cache_key) { described_class.new(diffable, diff_options: nil).cache_key }
diff --git a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
index 9ac242459bf..8e14f48ae29 100644
--- a/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/merge_request_diff_batch_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch, feature_category: :code_review do
+RSpec.describe Gitlab::Diff::FileCollection::MergeRequestDiffBatch, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request) }
let(:batch_page) { 0 }
let(:batch_size) { 10 }
diff --git a/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb b/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
index 74e5e667702..ee956d04325 100644
--- a/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
+++ b/spec/lib/gitlab/diff/file_collection/paginated_merge_request_diff_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_category: :code_review do
+RSpec.describe Gitlab::Diff::FileCollection::PaginatedMergeRequestDiff, feature_category: :code_review_workflow do
let(:merge_request) { create(:merge_request) }
let(:page) { 1 }
let(:per_page) { 10 }
diff --git a/spec/lib/gitlab/error_tracking_spec.rb b/spec/lib/gitlab/error_tracking_spec.rb
index 4900547e9e9..5eedd716a4a 100644
--- a/spec/lib/gitlab/error_tracking_spec.rb
+++ b/spec/lib/gitlab/error_tracking_spec.rb
@@ -154,6 +154,32 @@ RSpec.describe Gitlab::ErrorTracking do
end
end
+ describe '.log_and_raise_exception' do
+ subject(:log_and_raise_exception) do
+ described_class.log_and_raise_exception(exception, extra)
+ end
+
+ it 'only logs and raises the exception' do
+ expect(Raven).not_to receive(:capture_exception)
+ expect(Sentry).not_to receive(:capture_exception)
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(logger_payload)
+
+ expect { log_and_raise_exception }.to raise_error(RuntimeError)
+ end
+
+ context 'when extra details are provided' do
+ let(:extra) { { test: 1, my_token: 'test' } }
+
+ it 'filters parameters' do
+ expect(Gitlab::ErrorTracking::Logger).to receive(:error).with(
+ hash_including({ 'extra.test' => 1, 'extra.my_token' => '[FILTERED]' })
+ )
+
+ expect { log_and_raise_exception }.to raise_error(RuntimeError)
+ end
+ end
+ end
+
describe '.track_exception' do
subject(:track_exception) do
described_class.track_exception(exception, extra)
diff --git a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
index ae2e343377d..14d5cef103b 100644
--- a/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/ref_service_spec.rb
@@ -409,17 +409,6 @@ RSpec.describe Gitlab::GitalyClient::RefService do
end
end
- describe '#pack_refs' do
- it 'sends a pack_refs message' do
- expect_any_instance_of(Gitaly::RefService::Stub)
- .to receive(:pack_refs)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(:pack_refs_response))
-
- client.pack_refs
- end
- end
-
describe '#find_refs_by_oid' do
let(:oid) { project.repository.commit.id }
diff --git a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
index 5aef250afac..5eb60d2caa5 100644
--- a/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
+++ b/spec/lib/gitlab/gitaly_client/repository_service_spec.rb
@@ -21,39 +21,6 @@ RSpec.describe Gitlab::GitalyClient::RepositoryService do
end
end
- describe '#garbage_collect' do
- it 'sends a garbage_collect message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:garbage_collect)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(:garbage_collect_response))
-
- client.garbage_collect(true, prune: true)
- end
- end
-
- describe '#repack_full' do
- it 'sends a repack_full message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:repack_full)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(:repack_full_response))
-
- client.repack_full(true)
- end
- end
-
- describe '#repack_incremental' do
- it 'sends a repack_incremental message' do
- expect_any_instance_of(Gitaly::RepositoryService::Stub)
- .to receive(:repack_incremental)
- .with(gitaly_request_with_path(storage_name, relative_path), kind_of(Hash))
- .and_return(double(:repack_incremental_response))
-
- client.repack_incremental
- end
- end
-
describe '#optimize_repository' do
it 'sends a optimize_repository message' do
expect_any_instance_of(Gitaly::RepositoryService::Stub)
diff --git a/spec/lib/gitlab/gitaly_client_spec.rb b/spec/lib/gitlab/gitaly_client_spec.rb
index 3d33bf93c23..f5e75242f40 100644
--- a/spec/lib/gitlab/gitaly_client_spec.rb
+++ b/spec/lib/gitlab/gitaly_client_spec.rb
@@ -4,13 +4,19 @@ require 'spec_helper'
# We stub Gitaly in `spec/support/gitaly.rb` for other tests. We don't want
# those stubs while testing the GitalyClient itself.
-RSpec.describe Gitlab::GitalyClient do
+RSpec.describe Gitlab::GitalyClient, feature_category: :gitaly do
def stub_repos_storages(address)
allow(Gitlab.config.repositories).to receive(:storages).and_return({
'default' => { 'gitaly_address' => address }
})
end
+ around do |example|
+ described_class.clear_stubs!
+ example.run
+ described_class.clear_stubs!
+ end
+
describe '.query_time', :request_store do
it 'increments query times' do
subject.add_query_time(0.4510004)
@@ -157,45 +163,131 @@ RSpec.describe Gitlab::GitalyClient do
end
end
+ describe '.create_channel' do
+ where(:storage, :address, :expected_target) do
+ [
+ ['default', 'unix:tmp/gitaly.sock', 'unix:tmp/gitaly.sock'],
+ ['default', 'tcp://localhost:9876', 'localhost:9876'],
+ ['default', 'tls://localhost:9876', 'localhost:9876']
+ ]
+ end
+
+ with_them do
+ before do
+ allow(Gitlab.config.repositories).to receive(:storages).and_return(
+ 'default' => { 'gitaly_address' => address },
+ 'other' => { 'gitaly_address' => address }
+ )
+ end
+
+ it 'creates channel based on storage' do
+ channel = described_class.create_channel(storage)
+
+ expect(channel).to be_a(GRPC::Core::Channel)
+ expect(channel.target).to eql(expected_target)
+ end
+
+ it 'caches channel based on storage' do
+ channel_1 = described_class.create_channel(storage)
+ channel_2 = described_class.create_channel(storage)
+
+ expect(channel_1).to equal(channel_2)
+ end
+
+ it 'returns different channels for different storages' do
+ channel_1 = described_class.create_channel(storage)
+ channel_2 = described_class.create_channel('other')
+
+ expect(channel_1).not_to equal(channel_2)
+ end
+ end
+ end
+
describe '.stub' do
- # Notice that this is referring to gRPC "stubs", not rspec stubs
- before do
- described_class.clear_stubs!
+ matcher :be_a_grpc_channel do |expected_address|
+ match { |actual| actual.is_a?(::GRPC::Core::Channel) && actual.target == expected_address }
+ end
+
+ matcher :have_same_channel do |expected|
+ match do |actual|
+ # gRPC client stub does not expose the underlying channel. We need a way
+ # to verify two stubs have the same channel. So, no way around.
+ expected_channel = expected.instance_variable_get(:@ch)
+ actual_channel = actual.instance_variable_get(:@ch)
+ expected_channel.is_a?(GRPC::Core::Channel) &&
+ actual_channel.is_a?(GRPC::Core::Channel) &&
+ expected_channel == actual_channel
+ end
end
context 'when passed a UNIX socket address' do
- it 'passes the address as-is to GRPC' do
- address = 'unix:/tmp/gitaly.sock'
- stub_repos_storages address
+ let(:address) { 'unix:/tmp/gitaly.sock' }
- expect(Gitaly::CommitService::Stub).to receive(:new).with(address, any_args)
+ before do
+ stub_repos_storages address
+ end
+ it 'passes the address as-is to GRPC' do
+ expect(Gitaly::CommitService::Stub).to receive(:new).with(
+ address, nil, channel_override: be_a_grpc_channel(address), interceptors: []
+ )
described_class.stub(:commit_service, 'default')
end
+
+ it 'shares the same channel object with other stub' do
+ stub_commit = described_class.stub(:commit_service, 'default')
+ stub_blob = described_class.stub(:blob_service, 'default')
+
+ expect(stub_commit).to have_same_channel(stub_blob)
+ end
end
context 'when passed a TLS address' do
- it 'strips tls:// prefix before passing it to GRPC::Core::Channel initializer' do
- address = 'localhost:9876'
+ let(:address) { 'localhost:9876' }
+
+ before do
prefixed_address = "tls://#{address}"
stub_repos_storages prefixed_address
+ end
- expect(Gitaly::CommitService::Stub).to receive(:new).with(address, any_args)
+ it 'strips tls:// prefix before passing it to GRPC::Core::Channel initializer' do
+ expect(Gitaly::CommitService::Stub).to receive(:new).with(
+ address, nil, channel_override: be_a(GRPC::Core::Channel), interceptors: []
+ )
described_class.stub(:commit_service, 'default')
end
+
+ it 'shares the same channel object with other stub' do
+ stub_commit = described_class.stub(:commit_service, 'default')
+ stub_blob = described_class.stub(:blob_service, 'default')
+
+ expect(stub_commit).to have_same_channel(stub_blob)
+ end
end
context 'when passed a TCP address' do
- it 'strips tcp:// prefix before passing it to GRPC::Core::Channel initializer' do
- address = 'localhost:9876'
+ let(:address) { 'localhost:9876' }
+
+ before do
prefixed_address = "tcp://#{address}"
stub_repos_storages prefixed_address
+ end
- expect(Gitaly::CommitService::Stub).to receive(:new).with(address, any_args)
+ it 'strips tcp:// prefix before passing it to GRPC::Core::Channel initializer' do
+ expect(Gitaly::CommitService::Stub).to receive(:new).with(
+ address, nil, channel_override: be_a(GRPC::Core::Channel), interceptors: []
+ )
described_class.stub(:commit_service, 'default')
end
+
+ it 'shares the same channel object with other stub' do
+ stub_commit = described_class.stub(:commit_service, 'default')
+ stub_blob = described_class.stub(:blob_service, 'default')
+
+ expect(stub_commit).to have_same_channel(stub_blob)
+ end
end
end
diff --git a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
index 69a4d646562..6bfbfbdeddf 100644
--- a/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/importer/gist_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_category: :importer do
+RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_category: :importers do
subject { described_class.new(gist_object, user.id).execute }
let_it_be(:user) { create(:user) }
@@ -63,7 +63,7 @@ RSpec.describe Gitlab::GithubGistsImport::Importer::GistImporter, feature_catego
expect(user.snippets.count).to eq(0)
expect(result.error?).to eq(true)
- expect(result.errors).to match_array(['Snippet max file count exceeded'])
+ expect(result.errors).to match_array(['Snippet maximum file count exceeded'])
end
end
diff --git a/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb b/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
index 704999a99a9..d555a847ea5 100644
--- a/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/importer/gists_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubGistsImport::Importer::GistsImporter, feature_category: :importer do
+RSpec.describe Gitlab::GithubGistsImport::Importer::GistsImporter, feature_category: :importers do
subject(:result) { described_class.new(user, token).execute }
let_it_be(:user) { create(:user) }
diff --git a/spec/lib/gitlab/github_gists_import/representation/gist_spec.rb b/spec/lib/gitlab/github_gists_import/representation/gist_spec.rb
index 480aefb2c74..d6b47a1e837 100644
--- a/spec/lib/gitlab/github_gists_import/representation/gist_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/representation/gist_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubGistsImport::Representation::Gist, feature_category: :importer do
+RSpec.describe Gitlab::GithubGistsImport::Representation::Gist, feature_category: :importers do
shared_examples 'a Gist' do
it 'returns an instance of Gist' do
expect(gist).to be_an_instance_of(described_class)
diff --git a/spec/lib/gitlab/github_gists_import/status_spec.rb b/spec/lib/gitlab/github_gists_import/status_spec.rb
index 4cbbbd430eb..d2016ef0248 100644
--- a/spec/lib/gitlab/github_gists_import/status_spec.rb
+++ b/spec/lib/gitlab/github_gists_import/status_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubGistsImport::Status, :clean_gitlab_redis_cache, feature_category: :importer do
+RSpec.describe Gitlab::GithubGistsImport::Status, :clean_gitlab_redis_cache, feature_category: :importers do
subject(:import_status) { described_class.new(user.id) }
let_it_be(:user) { create(:user) }
diff --git a/spec/lib/gitlab/github_import/bulk_importing_spec.rb b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
index af31cb6c873..136ddb566aa 100644
--- a/spec/lib/gitlab/github_import/bulk_importing_spec.rb
+++ b/spec/lib/gitlab/github_import/bulk_importing_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importer do
+RSpec.describe Gitlab::GithubImport::BulkImporting, feature_category: :importers do
let(:project) { instance_double(Project, id: 1) }
let(:importer) { MyImporter.new(project, double) }
let(:importer_class) do
diff --git a/spec/lib/gitlab/github_import/client_spec.rb b/spec/lib/gitlab/github_import/client_spec.rb
index 526a8721ff3..d69bc4d60ee 100644
--- a/spec/lib/gitlab/github_import/client_spec.rb
+++ b/spec/lib/gitlab/github_import/client_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Client do
+RSpec.describe Gitlab::GithubImport::Client, feature_category: :importer do
subject(:client) { described_class.new('foo', parallel: parallel) }
let(:parallel) { true }
@@ -614,6 +614,46 @@ RSpec.describe Gitlab::GithubImport::Client do
client.search_repos_by_name_graphql('test')
end
+ context 'when relation type option present' do
+ context 'when relation type is owned' do
+ let(:expected_query) { 'test in:name is:public,private user:user' }
+
+ it 'searches for repositories within the organization based on name' do
+ expect(client.octokit).to receive(:post).with(
+ '/graphql', { query: expected_graphql }.to_json
+ )
+
+ client.search_repos_by_name_graphql('test', relation_type: 'owned')
+ end
+ end
+
+ context 'when relation type is organization' do
+ let(:expected_query) { 'test in:name is:public,private org:test-login' }
+
+ it 'searches for repositories within the organization based on name' do
+ expect(client.octokit).to receive(:post).with(
+ '/graphql', { query: expected_graphql }.to_json
+ )
+
+ client.search_repos_by_name_graphql(
+ 'test', relation_type: 'organization', organization_login: 'test-login'
+ )
+ end
+ end
+
+ context 'when relation type is collaborated' do
+ let(:expected_query) { 'test in:name is:public,private repo:repo1 repo:repo2' }
+
+ it 'searches for collaborated repositories based on name' do
+ expect(client.octokit).to receive(:post).with(
+ '/graphql', { query: expected_graphql }.to_json
+ )
+
+ client.search_repos_by_name_graphql('test', relation_type: 'collaborated')
+ end
+ end
+ end
+
context 'when pagination options present' do
context 'with "first" option' do
let(:expected_graphql_params) do
diff --git a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
index ad9ef4afddd..9e295ab215a 100644
--- a/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/labels_importer_spec.rb
@@ -2,7 +2,8 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cache, feature_category: :importer do
+RSpec.describe Gitlab::GithubImport::Importer::LabelsImporter, :clean_gitlab_redis_cache,
+feature_category: :importers do
let(:project) { create(:project, import_source: 'foo/bar') }
let(:client) { double(:client) }
let(:importer) { described_class.new(project, client) }
diff --git a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
index 8667729d79b..47b9a41c364 100644
--- a/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/milestones_importer_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Gitlab::GithubImport::Importer::MilestonesImporter, :clean_gitlab_redis_cache,
- feature_category: :importer do
+ feature_category: :importers do
let(:project) { create(:project, import_source: 'foo/bar') }
let(:client) { double(:client) }
let(:importer) { described_class.new(project, client) }
diff --git a/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
index d6b7411e640..d999bb3a3a3 100644
--- a/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/protected_branch_importer_spec.rb
@@ -15,6 +15,9 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
let(:expected_allow_force_push) { false }
let(:expected_code_owner_approval_required) { false }
+ let(:allowed_to_push_users) { [] }
+ let(:push_access_levels_number) { 1 }
+ let(:push_access_levels_attributes) { [{ access_level: expected_push_access_level }] }
let(:github_protected_branch) do
Gitlab::GithubImport::Representation::ProtectedBranch.new(
id: branch_name,
@@ -22,7 +25,8 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
required_conversation_resolution: required_conversation_resolution,
required_signatures: required_signatures,
required_pull_request_reviews: required_pull_request_reviews,
- require_code_owner_reviews: require_code_owner_reviews_on_github
+ require_code_owner_reviews: require_code_owner_reviews_on_github,
+ allowed_to_push_users: allowed_to_push_users
)
end
@@ -36,7 +40,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
let(:expected_ruleset) do
{
name: 'protection',
- push_access_levels_attributes: [{ access_level: expected_push_access_level }],
+ push_access_levels_attributes: push_access_levels_attributes,
merge_access_levels_attributes: [{ access_level: expected_merge_access_level }],
allow_force_push: expected_allow_force_push,
code_owner_approval_required: expected_code_owner_approval_required
@@ -56,7 +60,7 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
it 'creates protected branch and access levels for given github rule' do
expect { importer.execute }.to change(ProtectedBranch, :count).by(1)
- .and change(ProtectedBranch::PushAccessLevel, :count).by(1)
+ .and change(ProtectedBranch::PushAccessLevel, :count).by(push_access_levels_number)
.and change(ProtectedBranch::MergeAccessLevel, :count).by(1)
end
end
@@ -220,10 +224,97 @@ RSpec.describe Gitlab::GithubImport::Importer::ProtectedBranchImporter do
context 'when required_pull_request_reviews rule is enabled on GitHub' do
let(:required_pull_request_reviews) { true }
- let(:expected_push_access_level) { Gitlab::Access::NO_ACCESS }
- let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
- it_behaves_like 'create branch protection by the strictest ruleset'
+ context 'when no user is allowed to bypass push restrictions' do
+ let(:expected_push_access_level) { Gitlab::Access::NO_ACCESS }
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when there are users who are allowed to bypass push restrictions' do
+ let(:owner_id) { project.owner.id }
+ let(:owner_username) { project.owner.username }
+ let(:other_user) { create(:user) }
+ let(:other_user_id) { other_user.id }
+ let(:other_user_username) { other_user.username }
+ let(:allowed_to_push_users) do
+ [
+ { id: owner_id, login: owner_username },
+ { id: other_user_id, login: other_user_username }
+ ]
+ end
+
+ context 'when the protected_refs_for_users feature is available', if: Gitlab.ee? do
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+
+ before do
+ stub_licensed_features(protected_refs_for_users: true)
+ end
+
+ context 'when the users are found on GitLab' do
+ let(:push_access_levels_number) { 2 }
+ let(:push_access_levels_attributes) do
+ [
+ { user_id: owner_id },
+ { user_id: other_user_id }
+ ]
+ end
+
+ before do
+ project.add_member(other_user, :maintainer)
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(owner_id, owner_username).and_return(owner_id)
+ allow(finder).to receive(:find).with(other_user_id, other_user_username).and_return(other_user_id)
+ end
+ end
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when one of found users is not a member of the imported project' do
+ let(:push_access_levels_number) { 1 }
+ let(:push_access_levels_attributes) do
+ [
+ { user_id: owner_id }
+ ]
+ end
+
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).with(owner_id, owner_username).and_return(owner_id)
+ allow(finder).to receive(:find).with(other_user_id, other_user_username).and_return(other_user_id)
+ end
+ end
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+
+ context 'when the user are not found on GitLab' do
+ before do
+ allow_next_instance_of(Gitlab::GithubImport::UserFinder) do |finder|
+ allow(finder).to receive(:find).and_return(nil)
+ end
+ end
+
+ let(:expected_push_access_level) { Gitlab::Access::NO_ACCESS }
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
+
+ context 'when the protected_refs_for_users feature is not available' do
+ before do
+ stub_licensed_features(protected_refs_for_users: false)
+ end
+
+ let(:expected_push_access_level) { Gitlab::Access::NO_ACCESS }
+ let(:expected_merge_access_level) { Gitlab::Access::MAINTAINER }
+
+ it_behaves_like 'create branch protection by the strictest ruleset'
+ end
+ end
end
context 'when required_pull_request_reviews rule is disabled on GitHub' do
diff --git a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
index ccbe5b5fc50..fe4d3e9d90b 100644
--- a/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
+++ b/spec/lib/gitlab/github_import/importer/releases_importer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter, feature_category: :importer do
+RSpec.describe Gitlab::GithubImport::Importer::ReleasesImporter, feature_category: :importers do
let(:project) { create(:project) }
let(:client) { double(:client) }
let(:importer) { described_class.new(project, client) }
diff --git a/spec/lib/gitlab/github_import/page_counter_spec.rb b/spec/lib/gitlab/github_import/page_counter_spec.rb
index 511b19c00e5..ddb62cc8fad 100644
--- a/spec/lib/gitlab/github_import/page_counter_spec.rb
+++ b/spec/lib/gitlab/github_import/page_counter_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache, feature_category: :importer do
+RSpec.describe Gitlab::GithubImport::PageCounter, :clean_gitlab_redis_cache, feature_category: :importers do
let(:project) { double(:project, id: 1) }
let(:counter) { described_class.new(project, :issues) }
diff --git a/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
index 60cae79459e..e57ea31d1d2 100644
--- a/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
+++ b/spec/lib/gitlab/github_import/representation/protected_branch_spec.rb
@@ -28,6 +28,14 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
it 'includes the protected branch require_code_owner_reviews' do
expect(protected_branch.require_code_owner_reviews).to eq true
end
+
+ it 'includes the protected branch allowed_to_push_users' do
+ expect(protected_branch.allowed_to_push_users[0])
+ .to be_an_instance_of(Gitlab::GithubImport::Representation::User)
+
+ expect(protected_branch.allowed_to_push_users[0].id).to eq(4)
+ expect(protected_branch.allowed_to_push_users[0].login).to eq('alice')
+ end
end
end
@@ -40,7 +48,7 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
)
enabled_setting = Struct.new(:enabled, keyword_init: true)
required_pull_request_reviews = Struct.new(
- :url, :dismissal_restrictions, :require_code_owner_reviews,
+ :url, :dismissal_restrictions, :require_code_owner_reviews, :bypass_pull_request_allowances,
keyword_init: true
)
response.new(
@@ -57,7 +65,17 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
required_pull_request_reviews: required_pull_request_reviews.new(
url: 'https://example.com/branches/main/protection/required_pull_request_reviews',
dismissal_restrictions: {},
- require_code_owner_reviews: true
+ require_code_owner_reviews: true,
+ bypass_pull_request_allowances: {
+ users: [
+ {
+ login: 'alice',
+ id: 4,
+ url: 'https://api.github.com/users/cervols',
+ type: 'User'
+ }
+ ]
+ }
)
)
end
@@ -76,7 +94,8 @@ RSpec.describe Gitlab::GithubImport::Representation::ProtectedBranch do
'required_conversation_resolution' => true,
'required_signatures' => true,
'required_pull_request_reviews' => true,
- 'require_code_owner_reviews' => true
+ 'require_code_owner_reviews' => true,
+ 'allowed_to_push_users' => [{ 'id' => 4, 'login' => 'alice' }]
}
end
diff --git a/spec/lib/gitlab/http_spec.rb b/spec/lib/gitlab/http_spec.rb
index 929fd37ee40..57e4b4fc74b 100644
--- a/spec/lib/gitlab/http_spec.rb
+++ b/spec/lib/gitlab/http_spec.rb
@@ -51,10 +51,10 @@ RSpec.describe Gitlab::HTTP do
end
@original_net_http = Net.send(:remove_const, :HTTP)
- @webmock_net_http = WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_get('@webMockNetHTTP')
+ @webmock_net_http = WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_get(:@webMockNetHTTP)
Net.send(:const_set, :HTTP, mocked_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set('@webMockNetHTTP', mocked_http)
+ WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, mocked_http)
# Reload Gitlab::NetHttpAdapter
Gitlab.send(:remove_const, :NetHttpAdapter)
@@ -72,7 +72,7 @@ RSpec.describe Gitlab::HTTP do
after(:all) do
Net.send(:remove_const, :HTTP)
Net.send(:const_set, :HTTP, @original_net_http)
- WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set('@webMockNetHTTP', @webmock_net_http)
+ WebMock::HttpLibAdapters::NetHttpAdapter.instance_variable_set(:@webMockNetHTTP, @webmock_net_http)
# Reload Gitlab::NetHttpAdapter
Gitlab.send(:remove_const, :NetHttpAdapter)
diff --git a/spec/lib/gitlab/import_export/all_models.yml b/spec/lib/gitlab/import_export/all_models.yml
index b34399d20f1..8750bf4387c 100644
--- a/spec/lib/gitlab/import_export/all_models.yml
+++ b/spec/lib/gitlab/import_export/all_models.yml
@@ -422,7 +422,9 @@ project:
- wiki_page_hooks_integrations
- deployment_hooks_integrations
- alert_hooks_integrations
+- incident_hooks_integrations
- vulnerability_hooks_integrations
+- apple_app_store_integration
- campfire_integration
- confluence_integration
- datadog_integration
@@ -482,6 +484,8 @@ project:
- project_repository
- users
- requesters
+- namespace_members
+- namespace_requesters
- deploy_keys_projects
- deploy_keys
- users_star_projects
@@ -664,6 +668,7 @@ project:
- pipeline_metadata
- disable_download_button
- dependency_list_exports
+- sbom_occurrences
award_emoji:
- awardable
- user
@@ -679,8 +684,6 @@ timelogs:
- note
push_event_payload:
- event
-issuable_severity:
-- issue
issue_assignees:
- issue
- assignee
@@ -705,6 +708,7 @@ metrics:
resource_label_events:
- user
- issue
+- work_item
- merge_request
- epic
- label
@@ -857,11 +861,13 @@ approvals:
resource_milestone_events:
- user
- issue
+ - work_item
- merge_request
- milestone
resource_state_events:
- user
- issue
+ - work_item
- merge_request
- source_merge_request
- epic
@@ -874,6 +880,7 @@ iteration:
resource_iteration_events:
- user
- issue
+ - work_item
- merge_request
- iteration
iterations_cadence:
diff --git a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
index 4ee825c71b6..a8b4b9a6f05 100644
--- a/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/base/relation_object_saver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
+RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver, feature_category: :importers do
let(:project) { create(:project) }
let(:relation_object) { build(:issue, project: project) }
let(:relation_definition) { {} }
@@ -34,6 +34,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
it 'saves relation object with subrelations' do
expect(relation_object.notes).to receive(:<<).and_call_original
+ expect(relation_object).to receive(:save).and_call_original
saver.execute
@@ -80,6 +81,7 @@ RSpec.describe Gitlab::ImportExport::Base::RelationObjectSaver do
it 'saves valid subrelations and logs invalid subrelation' do
expect(relation_object.notes).to receive(:<<).twice.and_call_original
+ expect(relation_object).to receive(:save).and_call_original
expect(Gitlab::Import::Logger)
.to receive(:info)
.with(
diff --git a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
index ce888b71d5e..f18d9e64f52 100644
--- a/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
+++ b/spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::FastHashSerializer do
+RSpec.describe Gitlab::ImportExport::FastHashSerializer, :with_license do
# FastHashSerializer#execute generates the hash which is not easily accessible
# and includes `JSONBatchRelation` items which are serialized at this point.
# Wrapping the result into JSON generating/parsing is for making
diff --git a/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb b/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb
deleted file mode 100644
index a5b03974bc0..00000000000
--- a/spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb
+++ /dev/null
@@ -1,153 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::ImportExport::Group::LegacyTreeRestorer do
- include ImportExport::CommonUtil
-
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
-
- describe 'restore group tree' do
- before_all do
- # Using an admin for import, so we can check assignment of existing members
- user = create(:admin, email: 'root@gitlabexample.com')
- create(:user, email: 'adriene.mcclure@gitlabexample.com')
- create(:user, email: 'gwendolyn_robel@gitlabexample.com')
-
- RSpec::Mocks.with_temporary_scope do
- @group = create(:group, name: 'group', path: 'group')
- @shared = Gitlab::ImportExport::Shared.new(@group)
-
- setup_import_export_config('group_exports/complex')
-
- group_tree_restorer = described_class.new(user: user, shared: @shared, group: @group, group_hash: nil)
-
- @restored_group_json = group_tree_restorer.restore
- end
- end
-
- context 'JSON' do
- it 'restores models based on JSON' do
- expect(@restored_group_json).to be_truthy
- end
-
- it 'has the group description' do
- expect(Group.find_by_path('group').description).to eq('Group Description')
- end
-
- it 'has group labels' do
- expect(@group.labels.count).to eq(10)
- end
-
- context 'issue boards' do
- it 'has issue boards' do
- expect(@group.boards.count).to eq(1)
- end
-
- it 'has board label lists' do
- lists = @group.boards.find_by(name: 'first board').lists
-
- expect(lists.count).to eq(3)
- expect(lists.first.label.title).to eq('TSL')
- expect(lists.second.label.title).to eq('Sosync')
- end
- end
-
- it 'has badges' do
- expect(@group.badges.count).to eq(1)
- end
-
- it 'has milestones' do
- expect(@group.milestones.count).to eq(5)
- end
-
- it 'has group children' do
- expect(@group.children.count).to eq(2)
- end
-
- it 'has group members' do
- expect(@group.members.map(&:user).map(&:email)).to contain_exactly('root@gitlabexample.com', 'adriene.mcclure@gitlabexample.com', 'gwendolyn_robel@gitlabexample.com')
- end
- end
- end
-
- context 'excluded attributes' do
- let!(:source_user) { create(:user, id: 123) }
- let!(:importer_user) { create(:user) }
- let(:group) { create(:group) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: importer_user, shared: shared, group: group, group_hash: nil) }
- let(:group_json) { Gitlab::Json.parse(File.read(File.join(shared.export_path, 'group.json'))) }
-
- shared_examples 'excluded attributes' do
- excluded_attributes = %w[
- id
- owner_id
- parent_id
- created_at
- updated_at
- runners_token
- runners_token_encrypted
- saml_discovery_token
- ]
-
- before do
- group.add_owner(importer_user)
-
- setup_import_export_config('group_exports/complex')
- end
-
- excluded_attributes.each do |excluded_attribute|
- it 'does not allow override of excluded attributes' do
- expect(group_json[excluded_attribute]).not_to eq(group.public_send(excluded_attribute))
- end
- end
- end
-
- include_examples 'excluded attributes'
- end
-
- context 'group.json file access check' do
- let(:user) { create(:user) }
- let!(:group) { create(:group, name: 'group2', path: 'group2') }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
- let(:restored_group_json) { group_tree_restorer.restore }
-
- it 'does not read a symlink' do
- Dir.mktmpdir do |tmpdir|
- setup_symlink(tmpdir, 'group.json')
- allow(shared).to receive(:export_path).and_call_original
-
- expect(group_tree_restorer.restore).to eq(false)
- expect(shared.errors).to include('Incorrect JSON format')
- end
- end
- end
-
- context 'group visibility levels' do
- let(:user) { create(:user) }
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_restorer) { described_class.new(user: user, shared: shared, group: group, group_hash: nil) }
-
- before do
- setup_import_export_config(filepath)
-
- group_tree_restorer.restore
- end
-
- shared_examples 'with visibility level' do |visibility_level, expected_visibilities|
- context "when visibility level is #{visibility_level}" do
- let(:group) { create(:group, visibility_level) }
- let(:filepath) { "group_exports/visibility_levels/#{visibility_level}" }
-
- it "imports all subgroups as #{visibility_level}" do
- expect(group.children.map(&:visibility_level)).to match_array(expected_visibilities)
- end
- end
- end
-
- include_examples 'with visibility level', :public, [20, 10, 0]
- include_examples 'with visibility level', :private, [0, 0, 0]
- include_examples 'with visibility level', :internal, [10, 10, 0]
- end
-end
diff --git a/spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb b/spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb
deleted file mode 100644
index f5a4fc79c90..00000000000
--- a/spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb
+++ /dev/null
@@ -1,159 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Gitlab::ImportExport::Group::LegacyTreeSaver do
- describe 'saves the group tree into a json object' do
- let(:shared) { Gitlab::ImportExport::Shared.new(group) }
- let(:group_tree_saver) { described_class.new(group: group, current_user: user, shared: shared) }
- let(:export_path) { "#{Dir.tmpdir}/group_tree_saver_spec" }
- let(:user) { create(:user) }
- let!(:group) { setup_group }
-
- before do
- group.add_maintainer(user)
- allow(Gitlab::ImportExport).to receive(:storage_path).and_return(export_path)
- end
-
- after do
- FileUtils.rm_rf(export_path)
- end
-
- it 'saves group successfully' do
- expect(group_tree_saver.save).to be true
- end
-
- # It is mostly duplicated in
- # `spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb`
- # except:
- # context 'with description override' do
- # context 'group members' do
- # ^ These are specific for the Group::LegacyTreeSaver
- context 'JSON' do
- let(:saved_group_json) do
- group_tree_saver.save # rubocop:disable Rails/SaveBang
- group_json(group_tree_saver.full_path)
- end
-
- it 'saves the correct json' do
- expect(saved_group_json).to include({ 'description' => 'description' })
- end
-
- it 'has milestones' do
- expect(saved_group_json['milestones']).not_to be_empty
- end
-
- it 'has labels' do
- expect(saved_group_json['labels']).not_to be_empty
- end
-
- it 'has boards' do
- expect(saved_group_json['boards']).not_to be_empty
- end
-
- it 'has board label list' do
- expect(saved_group_json['boards'].first['lists']).not_to be_empty
- end
-
- it 'has group members' do
- expect(saved_group_json['members']).not_to be_empty
- end
-
- it 'has priorities associated to labels' do
- expect(saved_group_json['labels'].first['priorities']).not_to be_empty
- end
-
- it 'has badges' do
- expect(saved_group_json['badges']).not_to be_empty
- end
-
- context 'group children' do
- let(:children) { group.children }
-
- it 'exports group children' do
- expect(saved_group_json['children'].length).to eq(children.count)
- end
-
- it 'exports group children of children' do
- expect(saved_group_json['children'].first['children'].length).to eq(children.first.children.count)
- end
- end
-
- context 'group members' do
- let(:user2) { create(:user, email: 'group@member.com') }
- let(:member_emails) do
- saved_group_json['members'].map do |pm|
- pm['user']['public_email']
- end
- end
-
- before do
- user2.update!(public_email: user2.email)
- group.add_developer(user2)
- end
-
- it 'exports group members as group owner' do
- group.add_owner(user)
-
- expect(member_emails).to include('group@member.com')
- end
-
- context 'as admin' do
- let(:user) { create(:admin) }
-
- it 'exports group members as admin' do
- expect(member_emails).to include('group@member.com')
- end
-
- it 'exports group members' do
- member_types = saved_group_json['members'].map { |pm| pm['source_type'] }
-
- expect(member_types).to all(eq('Namespace'))
- end
- end
- end
-
- context 'group attributes' do
- shared_examples 'excluded attributes' do
- excluded_attributes = %w[
- id
- owner_id
- parent_id
- created_at
- updated_at
- runners_token
- runners_token_encrypted
- saml_discovery_token
- ]
-
- excluded_attributes.each do |excluded_attribute|
- it 'does not contain excluded attribute' do
- expect(saved_group_json).not_to include(excluded_attribute => group.public_send(excluded_attribute))
- end
- end
- end
-
- include_examples 'excluded attributes'
- end
- end
- end
-
- def setup_group
- group = create(:group, description: 'description')
- sub_group = create(:group, description: 'description', parent: group)
- create(:group, description: 'description', parent: sub_group)
- create(:milestone, group: group)
- create(:group_badge, group: group)
- group_label = create(:group_label, group: group)
- create(:label_priority, label: group_label, priority: 1)
- board = create(:board, group: group, milestone_id: Milestone::Upcoming.id)
- create(:list, board: board, label: group_label)
- create(:group_badge, group: group)
-
- group
- end
-
- def group_json(filename)
- ::JSON.parse(File.read(filename))
- end
-end
diff --git a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
index 15108d28bf2..74b6e039601 100644
--- a/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
+++ b/spec/lib/gitlab/import_export/project/tree_saver_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::Project::TreeSaver do
+RSpec.describe Gitlab::ImportExport::Project::TreeSaver, :with_license do
let_it_be(:export_path) { "#{Dir.tmpdir}/project_tree_saver_spec" }
let_it_be(:exportable_path) { 'project' }
let_it_be(:user) { create(:user) }
diff --git a/spec/lib/gitlab/import_export/safe_model_attributes.yml b/spec/lib/gitlab/import_export/safe_model_attributes.yml
index 75d980cd5f4..e14e929faf3 100644
--- a/spec/lib/gitlab/import_export/safe_model_attributes.yml
+++ b/spec/lib/gitlab/import_export/safe_model_attributes.yml
@@ -702,7 +702,9 @@ Badge:
ProjectCiCdSetting:
- group_runners_enabled
- runner_token_expiration_interval
+- default_git_depth
ProjectSetting:
+- squash_option
- allow_merge_on_skipped_pipeline
- only_allow_merge_if_all_status_checks_passed
- has_confluence
@@ -916,6 +918,7 @@ PushRule:
- reject_unsigned_commits
- commit_committer_check
- regexp_uses_re2
+ - reject_non_dco_commits
MergeRequest::CleanupSchedule:
- id
- scheduled_at
diff --git a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
index ebb0d62afa0..e348e8f7991 100644
--- a/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
+++ b/spec/lib/gitlab/import_export/snippets_repo_restorer_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::SnippetsRepoRestorer do
+RSpec.describe Gitlab::ImportExport::SnippetsRepoRestorer, :clean_gitlab_redis_repository_cache, feature_category: :importers do
describe 'bundle a snippet Git repo' do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, namespace: user.namespace) }
@@ -26,9 +26,18 @@ RSpec.describe Gitlab::ImportExport::SnippetsRepoRestorer do
shared_examples 'imports snippet repositories' do
before do
snippet1.snippet_repository&.delete
+ # We need to explicitly invalidate repository.exists? from cache by calling repository.expire_exists_cache.
+ # Previously, we didn't have to do this because snippet1.repository_exists? would hit Rails.cache, which is a
+ # NullStore, thus cache.read would always be false.
+ # Now, since we are using a separate instance of Redis, ie Gitlab::Redis::RepositoryCache,
+ # snippet.repository_exists? would still be true because snippet.repository.remove doesn't invalidate the
+ # cache (snippet.repository.remove only makes gRPC call to Gitaly).
+ # See https://gitlab.com/gitlab-org/gitlab/-/merge_requests/107232#note_1214358593 for more.
+ snippet1.repository.expire_exists_cache
snippet1.repository.remove
snippet2.snippet_repository&.delete
+ snippet2.repository.expire_exists_cache
snippet2.repository.remove
end
diff --git a/spec/lib/gitlab/import_export/version_checker_spec.rb b/spec/lib/gitlab/import_export/version_checker_spec.rb
index 14c62edb786..b3730d85f13 100644
--- a/spec/lib/gitlab/import_export/version_checker_spec.rb
+++ b/spec/lib/gitlab/import_export/version_checker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::ImportExport::VersionChecker do
+RSpec.describe Gitlab::ImportExport::VersionChecker, feature_category: :import do
include ImportExport::CommonUtil
let!(:shared) { Gitlab::ImportExport::Shared.new(nil) }
diff --git a/spec/lib/gitlab/instrumentation_helper_spec.rb b/spec/lib/gitlab/instrumentation_helper_spec.rb
index 7d78d25f18e..ce67d1d0297 100644
--- a/spec/lib/gitlab/instrumentation_helper_spec.rb
+++ b/spec/lib/gitlab/instrumentation_helper_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
require 'rspec-parameterized'
require 'support/helpers/rails_helpers'
-RSpec.describe Gitlab::InstrumentationHelper do
+RSpec.describe Gitlab::InstrumentationHelper, :clean_gitlab_redis_repository_cache, :clean_gitlab_redis_cache,
+ feature_category: :scalability do
using RSpec::Parameterized::TableSyntax
describe '.add_instrumentation_data', :request_store do
@@ -22,19 +23,42 @@ RSpec.describe Gitlab::InstrumentationHelper do
expect(payload).to include(db_count: 0, db_cached_count: 0, db_write_count: 0)
end
- context 'when Gitaly calls are made' do
- it 'adds Gitaly data and omits Redis data' do
- project = create(:project)
- RequestStore.clear!
- project.repository.exists?
+ shared_examples 'make Gitaly calls' do
+ context 'when Gitaly calls are made' do
+ it 'adds Gitaly and Redis data' do
+ project = create(:project)
+ RequestStore.clear!
+ project.repository.exists?
- subject
+ subject
- expect(payload[:gitaly_calls]).to eq(1)
- expect(payload[:gitaly_duration_s]).to be >= 0
- expect(payload[:redis_calls]).to be_nil
- expect(payload[:redis_duration_ms]).to be_nil
+ expect(payload[:gitaly_calls]).to eq(1)
+ expect(payload[:gitaly_duration_s]).to be >= 0
+ # With MultiStore, the number of `redis_calls` depends on whether primary_store
+ # (Gitlab::Redis::Repositorycache) and secondary_store (Gitlab::Redis::Cache) are of the same instance.
+ # In GitLab.com CI, primary and secondary are the same instance, thus only 1 call being made. If primary
+ # and secondary are different instances, an additional fallback read to secondary_store will be made because
+ # the first `get` call is a cache miss. Then, the following expect will fail.
+ expect(payload[:redis_calls]).to eq(1)
+ expect(payload[:redis_duration_ms]).to be_nil
+ end
+ end
+ end
+
+ context 'when multistore ff use_primary_and_secondary_stores_for_repository_cache is enabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: true)
end
+
+ it_behaves_like 'make Gitaly calls'
+ end
+
+ context 'when multistore ff use_primary_and_secondary_stores_for_repository_cache is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
+ end
+
+ it_behaves_like 'make Gitaly calls'
end
context 'when Redis calls are made' do
diff --git a/spec/lib/gitlab/memory/reporter_spec.rb b/spec/lib/gitlab/memory/reporter_spec.rb
index 924397ceb4f..64ae740a5d7 100644
--- a/spec/lib/gitlab/memory/reporter_spec.rb
+++ b/spec/lib/gitlab/memory/reporter_spec.rb
@@ -26,15 +26,15 @@ RSpec.describe Gitlab::Memory::Reporter, :aggregate_failures, feature_category:
FileUtils.rm_rf(reports_path)
end
- describe '#run_report', time_travel_to: '2020-02-02 10:30:45 0000' do
+ describe '#run_report', time_travel_to: '2020-02-02 10:30:45 +0000' do
let(:report_duration_counter) { instance_double(::Prometheus::Client::Counter) }
let(:file_size) { 1_000_000 }
let(:report_file) { "#{reports_path}/fake_report.2020-02-02.10:30:45:000.worker_1.abc123.gz" }
-
- let(:input) { StringIO.new }
- let(:output) { StringIO.new }
+ let(:output) { File.read(report_file) }
before do
+ stub_const('Gitlab::Memory::Reporter::COMPRESS_CMD', %w[cat])
+
allow(SecureRandom).to receive(:uuid).and_return('abc123')
allow(Gitlab::Metrics).to receive(:counter).and_return(report_duration_counter)
@@ -44,22 +44,13 @@ RSpec.describe Gitlab::Memory::Reporter, :aggregate_failures, feature_category:
allow(File).to receive(:size).with(report_file).and_return(file_size)
allow(logger).to receive(:info)
-
- stub_gzip
end
shared_examples 'runs and stores reports' do
it 'runs the given report and returns true' do
expect(reporter.run_report(report)).to be(true)
- expect(output.string).to eq('I ran')
- end
-
- it 'closes read and write streams' do
- expect(input).to receive(:close).ordered.at_least(:once)
- expect(output).to receive(:close).ordered.at_least(:once)
-
- reporter.run_report(report)
+ expect(output).to eq('I ran')
end
it 'logs start and finish event' do
@@ -111,39 +102,47 @@ RSpec.describe Gitlab::Memory::Reporter, :aggregate_failures, feature_category:
end
context 'when an error occurs' do
- before do
- allow(report).to receive(:run).and_raise(RuntimeError.new('report failed'))
- end
+ shared_examples 'handles errors gracefully' do
+ it 'logs the error and returns false' do
+ expect(logger).to receive(:info).ordered.with(hash_including(message: 'started'))
+ expect(logger).to receive(:error).ordered.with(
+ hash_including(
+ message: 'failed', error: match(error_message)
+ ))
+
+ expect(reporter.run_report(report)).to be(false)
+ end
+
+ context 'when compression process is still running' do
+ it 'terminates the process' do
+ allow(logger).to receive(:info)
+ allow(logger).to receive(:error)
- it 'logs the error and returns false' do
- expect(logger).to receive(:info).ordered.with(hash_including(message: 'started'))
- expect(logger).to receive(:error).ordered.with(
- hash_including(
- message: 'failed', error: '#<RuntimeError: report failed>'
- ))
+ expect(Gitlab::ProcessManagement).to receive(:signal).with(an_instance_of(Integer), :KILL)
- expect(reporter.run_report(report)).to be(false)
+ reporter.run_report(report)
+ end
+ end
end
- it 'closes read and write streams' do
- allow(logger).to receive(:info)
- allow(logger).to receive(:error)
+ context 'when cause was an error being raised' do
+ let(:error_message) { 'report failed' }
- expect(input).to receive(:close).ordered.at_least(:once)
- expect(output).to receive(:close).ordered.at_least(:once)
+ before do
+ allow(report).to receive(:run).and_raise(RuntimeError.new('report failed'))
+ end
- reporter.run_report(report)
+ it_behaves_like 'handles errors gracefully'
end
- context 'when compression process is still running' do
- it 'terminates the process' do
- allow(logger).to receive(:info)
- allow(logger).to receive(:error)
+ context 'when cause was compression command failing' do
+ let(:error_message) { "StandardError: exit 1: cat:" }
- expect(Gitlab::ProcessManagement).to receive(:signal).with(an_instance_of(Integer), :KILL)
-
- reporter.run_report(report)
+ before do
+ stub_const('Gitlab::Memory::Reporter::COMPRESS_CMD', %w[cat --bad-flag])
end
+
+ it_behaves_like 'handles errors gracefully'
end
end
@@ -191,16 +190,4 @@ RSpec.describe Gitlab::Memory::Reporter, :aggregate_failures, feature_category:
it_behaves_like 'runs and stores reports'
end
end
-
- # We need to stub out the call into gzip. We do this by intercepting the write
- # end of the pipe and replacing it with a StringIO instead, which we can
- # easily inspect for contents.
- def stub_gzip
- pid = 42
- allow(IO).to receive(:pipe).and_return([input, output])
- allow(Process).to receive(:spawn).with(
- "gzip", "--fast", in: input, out: an_instance_of(File), err: an_instance_of(IO)
- ).and_return(pid)
- allow(Process).to receive(:waitpid).with(pid)
- end
end
diff --git a/spec/lib/gitlab/memory/watchdog_spec.rb b/spec/lib/gitlab/memory/watchdog_spec.rb
index 1603dda0c39..0b2f24476d9 100644
--- a/spec/lib/gitlab/memory/watchdog_spec.rb
+++ b/spec/lib/gitlab/memory/watchdog_spec.rb
@@ -98,7 +98,8 @@ RSpec.describe Gitlab::Memory::Watchdog, :aggregate_failures, feature_category:
expect(reporter).to receive(:stopped).once
.with(
memwd_handler_class: handler.class.name,
- memwd_sleep_time_s: sleep_time_seconds
+ memwd_sleep_time_s: sleep_time_seconds,
+ memwd_reason: 'background task stopped'
)
watchdog.call
diff --git a/spec/lib/gitlab/merge_requests/message_generator_spec.rb b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
index 59aaffc4377..ac9a9aa2897 100644
--- a/spec/lib/gitlab/merge_requests/message_generator_spec.rb
+++ b/spec/lib/gitlab/merge_requests/message_generator_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::MergeRequests::MessageGenerator, feature_category: :code_review do
+RSpec.describe Gitlab::MergeRequests::MessageGenerator, feature_category: :code_review_workflow do
let(:merge_commit_template) { nil }
let(:squash_commit_template) { nil }
let(:project) do
diff --git a/spec/lib/gitlab/observability_spec.rb b/spec/lib/gitlab/observability_spec.rb
index 2b1d22d9019..8068d2f8ec9 100644
--- a/spec/lib/gitlab/observability_spec.rb
+++ b/spec/lib/gitlab/observability_spec.rb
@@ -1,6 +1,6 @@
# frozen_string_literal: true
-require 'fast_spec_helper'
+require 'spec_helper'
RSpec.describe Gitlab::Observability do
describe '.observability_url' do
@@ -30,4 +30,39 @@ RSpec.describe Gitlab::Observability do
it { is_expected.to eq(observe_url) }
end
end
+
+ describe '.observability_enabled?' do
+ let_it_be(:group) { build(:user) }
+ let_it_be(:user) { build(:group) }
+
+ subject do
+ described_class.observability_enabled?(user, group)
+ end
+
+ it 'checks if read_observability ability is allowed for the given user and group' do
+ allow(Ability).to receive(:allowed?).and_return(true)
+
+ subject
+
+ expect(Ability).to have_received(:allowed?).with(user, :read_observability, group)
+ end
+
+ it 'returns true if the read_observability ability is allowed' do
+ allow(Ability).to receive(:allowed?).and_return(true)
+
+ expect(subject).to eq(true)
+ end
+
+ it 'returns false if the read_observability ability is not allowed' do
+ allow(Ability).to receive(:allowed?).and_return(false)
+
+ expect(subject).to eq(false)
+ end
+
+ it 'returns false if observability url is missing' do
+ allow(described_class).to receive(:observability_url).and_return("")
+
+ expect(subject).to eq(false)
+ end
+ end
end
diff --git a/spec/lib/gitlab/pages/cache_control_spec.rb b/spec/lib/gitlab/pages/cache_control_spec.rb
index d46124e0e16..dd15aa87441 100644
--- a/spec/lib/gitlab/pages/cache_control_spec.rb
+++ b/spec/lib/gitlab/pages/cache_control_spec.rb
@@ -3,20 +3,23 @@
require 'spec_helper'
RSpec.describe Gitlab::Pages::CacheControl, feature_category: :pages do
- describe '.for_namespace' do
- subject(:cache_control) { described_class.for_namespace(1) }
+ RSpec.shared_examples 'cache_control' do |type|
+ it { expect(subject.cache_key).to match(/pages_domain_for_#{type}_1_*/) }
- it { expect(subject.cache_key).to match(/pages_domain_for_namespace_1_*/) }
+ describe '#clear_cache', :use_clean_rails_redis_caching do
+ before do
+ Rails.cache.write("pages_domain_for_#{type}_1", ['settings-hash'])
+ Rails.cache.write("pages_domain_for_#{type}_1_settings-hash", 'payload')
+ end
- describe '#clear_cache' do
it 'clears the cache' do
expect(Rails.cache)
.to receive(:delete_multi)
.with(
array_including(
[
- "pages_domain_for_namespace_1",
- /pages_domain_for_namespace_1_*/
+ "pages_domain_for_#{type}_1",
+ "pages_domain_for_#{type}_1_settings-hash"
]
))
@@ -25,63 +28,53 @@ RSpec.describe Gitlab::Pages::CacheControl, feature_category: :pages do
end
end
- describe '.for_project' do
- subject(:cache_control) { described_class.for_project(1) }
+ describe '.for_namespace' do
+ subject(:cache_control) { described_class.for_namespace(1) }
- it { expect(subject.cache_key).to match(/pages_domain_for_project_1_*/) }
+ it_behaves_like 'cache_control', 'namespace'
+ end
- describe '#clear_cache' do
- it 'clears the cache' do
- expect(Rails.cache)
- .to receive(:delete_multi)
- .with(
- array_including(
- [
- "pages_domain_for_project_1",
- /pages_domain_for_project_1_*/
- ]
- ))
+ describe '.for_domain' do
+ subject(:cache_control) { described_class.for_domain(1) }
- subject.clear_cache
- end
- end
+ it_behaves_like 'cache_control', 'domain'
end
describe '#cache_key' do
it 'does not change the pages config' do
- expect { described_class.new(type: :project, id: 1).cache_key }
+ expect { described_class.new(type: :domain, id: 1).cache_key }
.not_to change(Gitlab.config, :pages)
end
it 'is based on pages settings' do
access_control = Gitlab.config.pages.access_control
- cache_key = described_class.new(type: :project, id: 1).cache_key
+ cache_key = described_class.new(type: :domain, id: 1).cache_key
stub_config(pages: { access_control: !access_control })
- expect(described_class.new(type: :project, id: 1).cache_key).not_to eq(cache_key)
+ expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key)
end
it 'is based on the force_pages_access_control settings' do
force_pages_access_control = ::Gitlab::CurrentSettings.force_pages_access_control
- cache_key = described_class.new(type: :project, id: 1).cache_key
+ cache_key = described_class.new(type: :domain, id: 1).cache_key
::Gitlab::CurrentSettings.force_pages_access_control = !force_pages_access_control
- expect(described_class.new(type: :project, id: 1).cache_key).not_to eq(cache_key)
+ expect(described_class.new(type: :domain, id: 1).cache_key).not_to eq(cache_key)
end
it 'caches the application settings hash' do
expect(Rails.cache)
.to receive(:write)
- .with("pages_domain_for_project_1", kind_of(Set))
+ .with('pages_domain_for_domain_1', kind_of(Set))
- described_class.new(type: :project, id: 1).cache_key
+ described_class.new(type: :domain, id: 1).cache_key
end
end
it 'fails with invalid type' do
expect { described_class.new(type: :unknown, id: nil) }
- .to raise_error(ArgumentError, "type must be :namespace or :project")
+ .to raise_error(ArgumentError, 'type must be :namespace or :domain')
end
end
diff --git a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
index 879c874b134..dc62fcb4478 100644
--- a/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
+++ b/spec/lib/gitlab/pagination/cursor_based_keyset_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
expect(subject.available_for_type?(Group.all)).to be_truthy
end
+ it 'returns true for Ci::Build' do
+ expect(subject.available_for_type?(Ci::Build.all)).to be_truthy
+ end
+
it 'return false for other types of relations' do
expect(subject.available_for_type?(User.all)).to be_falsey
end
@@ -29,6 +33,12 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it { is_expected.to be false }
end
+
+ context 'when relation is Ci::Build' do
+ let(:relation) { Ci::Build.all }
+
+ it { is_expected.to be false }
+ end
end
describe '.available?' do
@@ -45,6 +55,20 @@ RSpec.describe Gitlab::Pagination::CursorBasedKeyset do
it 'return false for other types of relations' do
expect(subject.available?(cursor_based_request_context, User.all)).to be_falsey
+ expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_falsey
+ end
+ end
+
+ context 'with order-by id desc' do
+ let(:order_by) { :id }
+ let(:sort) { :desc }
+
+ it 'returns true for Ci::Build' do
+ expect(subject.available?(cursor_based_request_context, Ci::Build.all)).to be_truthy
+ end
+
+ it 'returns true for AuditEvent' do
+ expect(subject.available?(cursor_based_request_context, AuditEvent.all)).to be_truthy
end
end
diff --git a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
index 4f1d380ab0a..e85b0354ff6 100644
--- a/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
+++ b/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb
@@ -92,34 +92,6 @@ RSpec.describe Gitlab::Pagination::Keyset::SimpleOrderBuilder do
end
end
- context "NULLS order given as as an Arel literal" do
- context 'when NULLS LAST order is given without a tie-breaker' do
- let(:scope) { Project.order(Project.arel_table[:created_at].asc.nulls_last) }
-
- it 'sets the column definition for created_at appropriately' do
- expect(column_definition.attribute_name).to eq('created_at')
- end
-
- it 'orders by primary key' do
- expect(sql_with_order)
- .to end_with('ORDER BY "projects"."created_at" ASC NULLS LAST, "projects"."id" DESC')
- end
- end
-
- context 'when NULLS FIRST order is given with a tie-breaker' do
- let(:scope) { Issue.order(Issue.arel_table[:relative_position].desc.nulls_first).order(id: :asc) }
-
- it 'sets the column definition for created_at appropriately' do
- expect(column_definition.attribute_name).to eq('relative_position')
- end
-
- it 'orders by the given primary key' do
- expect(sql_with_order)
- .to end_with('ORDER BY "issues"."relative_position" DESC NULLS FIRST, "issues"."id" ASC')
- end
- end
- end
-
context "NULLS order given as as an Arel node" do
context 'when NULLS LAST order is given without a tie-breaker' do
let(:scope) { Project.order(Project.arel_table[:created_at].asc.nulls_last) }
diff --git a/spec/lib/gitlab/rack_attack_spec.rb b/spec/lib/gitlab/rack_attack_spec.rb
index 7ba4eab50c7..960a81b8c9d 100644
--- a/spec/lib/gitlab/rack_attack_spec.rb
+++ b/spec/lib/gitlab/rack_attack_spec.rb
@@ -35,7 +35,7 @@ RSpec.describe Gitlab::RackAttack, :aggregate_failures do
allow(fake_rack_attack).to receive(:cache).and_return(fake_cache)
allow(fake_cache).to receive(:store=)
- fake_rack_attack.const_set('Request', fake_rack_attack_request)
+ fake_rack_attack.const_set(:Request, fake_rack_attack_request)
stub_const("Rack::Attack", fake_rack_attack)
end
diff --git a/spec/lib/gitlab/redis/duplicate_jobs_spec.rb b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
index be20e6dcdaf..4d46a567032 100644
--- a/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
+++ b/spec/lib/gitlab/redis/duplicate_jobs_spec.rb
@@ -14,16 +14,6 @@ RSpec.describe Gitlab::Redis::DuplicateJobs do
describe '#pool' do
subject { described_class.pool }
- before do
- redis_clear_raw_config!(Gitlab::Redis::SharedState)
- redis_clear_raw_config!(Gitlab::Redis::Queues)
- end
-
- after do
- redis_clear_raw_config!(Gitlab::Redis::SharedState)
- redis_clear_raw_config!(Gitlab::Redis::Queues)
- end
-
around do |example|
clear_pool
example.run
diff --git a/spec/lib/gitlab/redis/multi_store_spec.rb b/spec/lib/gitlab/redis/multi_store_spec.rb
index 0e7eedf66b1..f198ba90d0a 100644
--- a/spec/lib/gitlab/redis/multi_store_spec.rb
+++ b/spec/lib/gitlab/redis/multi_store_spec.rb
@@ -25,7 +25,9 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
let_it_be(:instance_name) { 'TestStore' }
let_it_be(:multi_store) { described_class.new(primary_store, secondary_store, instance_name) }
- subject { multi_store.send(name, *args) }
+ subject do
+ multi_store.send(name, *args)
+ end
before do
skip_feature_flags_yaml_validation
@@ -108,34 +110,93 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
+ # rubocop:disable RSpec/MultipleMemoizedHelpers
context 'with READ redis commands' do
+ subject do
+ multi_store.send(name, *args, **kwargs)
+ end
+
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:key2) { "redis:{1}:key_b" }
let_it_be(:value1) { "redis_value1" }
let_it_be(:value2) { "redis_value2" }
let_it_be(:skey) { "redis:set:key" }
+ let_it_be(:skey2) { "redis:set:key2" }
+ let_it_be(:smemberargs) { [skey, value1] }
+ let_it_be(:hkey) { "redis:hash:key" }
+ let_it_be(:hkey2) { "redis:hash:key2" }
+ let_it_be(:zkey) { "redis:sortedset:key" }
+ let_it_be(:zkey2) { "redis:sortedset:key2" }
+ let_it_be(:hitem1) { "item1" }
+ let_it_be(:hitem2) { "item2" }
let_it_be(:keys) { [key1, key2] }
let_it_be(:values) { [value1, value2] }
let_it_be(:svalues) { [value2, value1] }
-
- where(:case_name, :name, :args, :value, :block) do
- 'execute :get command' | :get | ref(:key1) | ref(:value1) | nil
- 'execute :mget command' | :mget | ref(:keys) | ref(:values) | nil
- 'execute :mget with block' | :mget | ref(:keys) | ref(:values) | ->(value) { value }
- 'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | nil
- 'execute :scard command' | :scard | ref(:skey) | 2 | nil
+ let_it_be(:hgetargs) { [hkey, hitem1] }
+ let_it_be(:hmgetval) { [value1] }
+ let_it_be(:mhmgetargs) { [hkey, hitem1] }
+ let_it_be(:hvalmapped) { { "item1" => value1 } }
+ let_it_be(:sscanargs) { [skey2, 0] }
+ let_it_be(:sscanval) { ["0", [value1]] }
+ let_it_be(:sscan_eachval) { [value1] }
+ let_it_be(:sscan_each_arg) { { match: '*1*' } }
+ let_it_be(:hscan_eachval) { [[hitem1, value1]] }
+ let_it_be(:zscan_eachval) { [[value1, 1.0]] }
+ let_it_be(:scan_each_arg) { { match: 'redis*' } }
+ let_it_be(:scan_each_val) { [key1, key2, skey, skey2, hkey, hkey2, zkey, zkey2] }
+
+ # rubocop:disable Layout/LineLength
+ where(:case_name, :name, :args, :value, :kwargs, :block) do
+ 'execute :get command' | :get | ref(:key1) | ref(:value1) | {} | nil
+ 'execute :mget command' | :mget | ref(:keys) | ref(:values) | {} | nil
+ 'execute :mget with block' | :mget | ref(:keys) | ref(:values) | {} | ->(value) { value }
+ 'execute :smembers command' | :smembers | ref(:skey) | ref(:svalues) | {} | nil
+ 'execute :scard command' | :scard | ref(:skey) | 2 | {} | nil
+ 'execute :sismember command' | :sismember | ref(:smemberargs) | true | {} | nil
+ 'execute :exists command' | :exists | ref(:key1) | 1 | {} | nil
+ 'execute :exists? command' | :exists? | ref(:key1) | true | {} | nil
+ 'execute :hget command' | :hget | ref(:hgetargs) | ref(:value1) | {} | nil
+ 'execute :hlen command' | :hlen | ref(:hkey) | 1 | {} | nil
+ 'execute :hgetall command' | :hgetall | ref(:hkey) | ref(:hvalmapped) | {} | nil
+ 'execute :hexists command' | :hexists | ref(:hgetargs) | true | {} | nil
+ 'execute :hmget command' | :hmget | ref(:hgetargs) | ref(:hmgetval) | {} | nil
+ 'execute :mapped_hmget command' | :mapped_hmget | ref(:mhmgetargs) | ref(:hvalmapped) | {} | nil
+ 'execute :sscan command' | :sscan | ref(:sscanargs) | ref(:sscanval) | {} | nil
+
+ # we run *scan_each here as they are reads too
+ 'execute :scan_each command' | :scan_each | nil | ref(:scan_each_val) | ref(:scan_each_arg) | nil
+ 'execute :sscan_each command' | :sscan_each | ref(:skey2) | ref(:sscan_eachval) | {} | nil
+ 'execute :sscan_each w block' | :sscan_each | ref(:skey) | ref(:sscan_eachval) | ref(:sscan_each_arg) | nil
+ 'execute :hscan_each command' | :hscan_each | ref(:hkey) | ref(:hscan_eachval) | {} | nil
+ 'execute :hscan_each w block' | :hscan_each | ref(:hkey2) | ref(:hscan_eachval) | ref(:sscan_each_arg) | nil
+ 'execute :zscan_each command' | :zscan_each | ref(:zkey) | ref(:zscan_eachval) | {} | nil
+ 'execute :zscan_each w block' | :zscan_each | ref(:zkey2) | ref(:zscan_eachval) | ref(:sscan_each_arg) | nil
end
+ # rubocop:enable Layout/LineLength
- before(:all) do
+ before do
primary_store.set(key1, value1)
primary_store.set(key2, value2)
- primary_store.sadd?(skey, value1)
- primary_store.sadd?(skey, value2)
+ primary_store.sadd?(skey, [value1, value2])
+ primary_store.sadd?(skey2, [value1])
+ primary_store.hset(hkey, hitem1, value1)
+ primary_store.hset(hkey2, hitem1, value1, hitem2, value2)
+ primary_store.zadd(zkey, 1, value1)
+ primary_store.zadd(zkey2, [[1, value1], [2, value2]])
secondary_store.set(key1, value1)
secondary_store.set(key2, value2)
- secondary_store.sadd?(skey, value1)
- secondary_store.sadd?(skey, value2)
+ secondary_store.sadd?(skey, [value1, value2])
+ secondary_store.sadd?(skey2, [value1])
+ secondary_store.hset(hkey, hitem1, value1)
+ secondary_store.hset(hkey2, hitem1, value1, hitem2, value2)
+ secondary_store.zadd(zkey, 1, value1)
+ secondary_store.zadd(zkey2, [[1, value1], [2, value2]])
+ end
+
+ after do
+ primary_store.flushdb
+ secondary_store.flushdb
end
RSpec.shared_examples_for 'reads correct value' do
@@ -157,7 +218,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
it 'fallback and execute on secondary instance' do
- expect(secondary_store).to receive(name).with(*args).and_call_original
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
subject
end
@@ -181,7 +242,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when fallback read from the secondary instance raises an exception' do
before do
- allow(secondary_store).to receive(name).with(*args).and_raise(StandardError)
+ allow(secondary_store).to receive(name).with(*expected_args).and_raise(StandardError)
end
it 'fails with exception' do
@@ -192,7 +253,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
RSpec.shared_examples_for 'secondary store' do
it 'execute on the secondary instance' do
- expect(secondary_store).to receive(name).with(*args).and_call_original
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
subject
end
@@ -208,6 +269,8 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
with_them do
describe name.to_s do
+ let(:expected_args) { kwargs&.present? ? [*args, { **kwargs }] : Array(args) }
+
before do
allow(primary_store).to receive(name).and_call_original
allow(secondary_store).to receive(name).and_call_original
@@ -215,7 +278,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when reading from the primary is successful' do
it 'returns the correct value' do
- expect(primary_store).to receive(name).with(*args).and_call_original
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
subject
end
@@ -231,7 +294,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when reading from primary instance is raising an exception' do
before do
- allow(primary_store).to receive(name).with(*args).and_raise(StandardError)
+ allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
allow(Gitlab::ErrorTracking).to receive(:log_exception)
end
@@ -245,9 +308,10 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
include_examples 'fallback read from the secondary store'
end
- context 'when reading from primary instance return no value' do
+ context 'when reading from empty primary instance' do
before do
- allow(primary_store).to receive(name).and_return(nil)
+ # this ensures a cache miss without having to stub primary store
+ primary_store.flushdb
end
include_examples 'fallback read from the secondary store'
@@ -256,7 +320,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
context 'when the command is executed within pipelined block' do
subject do
multi_store.pipelined do |pipeline|
- pipeline.send(name, *args)
+ pipeline.send(name, *args, **kwargs)
end
end
@@ -266,7 +330,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
2.times do
expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- expect(pipeline).to receive(name).with(*args).once.and_call_original
+ expect(pipeline).to receive(name).with(*expected_args).once.and_call_original
end
end
@@ -276,7 +340,7 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
if params[:block]
subject do
- multi_store.send(name, *args, &block)
+ multi_store.send(name, *expected_args, &block)
end
context 'when block is provided' do
@@ -297,6 +361,115 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
it_behaves_like 'secondary store'
end
+
+ context 'when use_primary_and_secondary_stores feature flag is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'when using secondary store as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+ expect(primary_store).not_to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when using primary store as default' do
+ it 'executes only on primary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).not_to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+ end
+ end
+ end
+ end
+ end
+ # rubocop:enable RSpec/MultipleMemoizedHelpers
+
+ context 'with nested command in block' do
+ let(:skey) { "test_set" }
+ let(:values) { %w[{x}a {x}b {x}c] }
+
+ before do
+ primary_store.set('{x}a', 1)
+ primary_store.set('{x}b', 2)
+ primary_store.set('{x}c', 3)
+
+ secondary_store.set('{x}a', 10)
+ secondary_store.set('{x}b', 20)
+ secondary_store.set('{x}c', 30)
+ end
+
+ subject do
+ multi_store.mget(values) do |v|
+ multi_store.sadd(skey, v)
+ multi_store.scard(skey)
+ v # mget receiving block returns the last line of the block for cache-hit check
+ end
+ end
+
+ RSpec.shared_examples_for 'primary instance executes block' do
+ it 'ensures primary instance is executing the block' do
+ expect(primary_store).to receive(:send).with(:mget, values).and_call_original
+ expect(primary_store).to receive(:send).with(:sadd, skey, %w[1 2 3]).and_call_original
+ expect(primary_store).to receive(:send).with(:scard, skey).and_call_original
+
+ expect(secondary_store).not_to receive(:send).with(:mget, values).and_call_original
+ expect(secondary_store).not_to receive(:send).with(:sadd, skey, %w[1 2 3]).and_call_original
+ expect(secondary_store).not_to receive(:send).with(:scard, skey).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when using both stores' do
+ context 'when primary instance is default store' do
+ it_behaves_like 'primary instance executes block'
+ end
+
+ context 'when secondary instance is default store' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ # multistore read still favours the primary store
+ it_behaves_like 'primary instance executes block'
+ end
+ end
+
+ context 'when using 1 store only' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'when primary instance is default store' do
+ it_behaves_like 'primary instance executes block'
+ end
+
+ context 'when secondary instance is default store' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'ensures only secondary instance is executing the block' do
+ expect(secondary_store).to receive(:send).with(:mget, values).and_call_original
+ expect(secondary_store).to receive(:send).with(:sadd, skey, %w[10 20 30]).and_call_original
+ expect(secondary_store).to receive(:send).with(:scard, skey).and_call_original
+
+ expect(primary_store).not_to receive(:send).with(:mget, values).and_call_original
+ expect(primary_store).not_to receive(:send).with(:sadd, skey, %w[10 20 30]).and_call_original
+ expect(primary_store).not_to receive(:send).with(:scard, skey).and_call_original
+
+ subject
+ end
end
end
end
@@ -316,9 +489,17 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
+ # rubocop:disable RSpec/MultipleMemoizedHelpers
context 'with WRITE redis commands' do
+ let_it_be(:ikey1) { "counter1" }
+ let_it_be(:ikey2) { "counter2" }
+ let_it_be(:iargs) { [ikey2, 3] }
+ let_it_be(:ivalue1) { "1" }
+ let_it_be(:ivalue2) { "3" }
let_it_be(:key1) { "redis:{1}:key_a" }
let_it_be(:key2) { "redis:{1}:key_b" }
+ let_it_be(:key3) { "redis:{1}:key_c" }
+ let_it_be(:key4) { "redis:{1}:key_d" }
let_it_be(:value1) { "redis_value1" }
let_it_be(:value2) { "redis_value2" }
let_it_be(:key1_value1) { [key1, value1] }
@@ -331,27 +512,50 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
let_it_be(:skey_value1) { [skey, [value1]] }
let_it_be(:skey_value2) { [skey, [value2]] }
let_it_be(:script) { %(redis.call("set", "#{key1}", "#{value1}")) }
-
+ let_it_be(:hkey1) { "redis:{1}:hash_a" }
+ let_it_be(:hkey2) { "redis:{1}:hash_b" }
+ let_it_be(:item) { "item" }
+ let_it_be(:hdelarg) { [hkey1, item] }
+ let_it_be(:hsetarg) { [hkey2, item, value1] }
+ let_it_be(:mhsetarg) { [hkey2, { "item" => value1 }] }
+ let_it_be(:hgetarg) { [hkey2, item] }
+ let_it_be(:expireargs) { [key3, ttl] }
+
+ # rubocop:disable Layout/LineLength
where(:case_name, :name, :args, :expected_value, :verification_name, :verification_args) do
- 'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
- 'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
- 'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
- 'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
- 'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
- 'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
- 'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
- 'execute :eval command' | :eval | ref(:script) | ref(:value1) | :get | ref(:key1)
+ 'execute :set command' | :set | ref(:key1_value1) | ref(:value1) | :get | ref(:key1)
+ 'execute :setnx command' | :setnx | ref(:key1_value2) | ref(:value1) | :get | ref(:key2)
+ 'execute :setex command' | :setex | ref(:key1_ttl_value1) | ref(:ttl) | :ttl | ref(:key1)
+ 'execute :sadd command' | :sadd | ref(:skey_value2) | ref(:svalues1) | :smembers | ref(:skey)
+ 'execute :srem command' | :srem | ref(:skey_value1) | [] | :smembers | ref(:skey)
+ 'execute :del command' | :del | ref(:key2) | nil | :get | ref(:key2)
+ 'execute :unlink command' | :unlink | ref(:key3) | nil | :get | ref(:key3)
+ 'execute :flushdb command' | :flushdb | nil | 0 | :dbsize | nil
+ 'execute :eval command' | :eval | ref(:script) | ref(:value1) | :get | ref(:key1)
+ 'execute :incr command' | :incr | ref(:ikey1) | ref(:ivalue1) | :get | ref(:ikey1)
+ 'execute :incrby command' | :incrby | ref(:iargs) | ref(:ivalue2) | :get | ref(:ikey2)
+ 'execute :hset command' | :hset | ref(:hsetarg) | ref(:value1) | :hget | ref(:hgetarg)
+ 'execute :hdel command' | :hdel | ref(:hdelarg) | nil | :hget | ref(:hdelarg)
+ 'execute :expire command' | :expire | ref(:expireargs) | ref(:ttl) | :ttl | ref(:key3)
+ 'execute :mapped_hmset command' | :mapped_hmset | ref(:mhsetarg) | ref(:value1) | :hget | ref(:hgetarg)
end
+ # rubocop:enable Layout/LineLength
before do
primary_store.flushdb
secondary_store.flushdb
primary_store.set(key2, value1)
+ primary_store.set(key3, value1)
+ primary_store.set(key4, value1)
primary_store.sadd?(skey, value1)
+ primary_store.hset(hkey2, item, value1)
secondary_store.set(key2, value1)
+ secondary_store.set(key3, value1)
+ secondary_store.set(key4, value1)
secondary_store.sadd?(skey, value1)
+ secondary_store.hset(hkey2, item, value1)
end
with_them do
@@ -375,6 +579,34 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
include_examples 'verify that store contains values', :secondary_store
end
+ context 'when use_primary_and_secondary_stores feature flag is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'when using secondary store as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes only on secondary redis store', :aggregate_errors do
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+ expect(primary_store).not_to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when using primary store as default' do
+ it 'executes only on primary redis store', :aggregate_errors do
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).not_to receive(name).with(*expected_args).and_call_original
+
+ subject
+ end
+ end
+ end
+
context 'when executing on the primary instance is raising an exception' do
before do
allow(primary_store).to receive(name).with(*expected_args).and_raise(StandardError)
@@ -419,6 +651,121 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
end
+ # rubocop:enable RSpec/MultipleMemoizedHelpers
+
+ context 'with ENUMERATOR_COMMANDS redis commands' do
+ let_it_be(:hkey) { "redis:hash" }
+ let_it_be(:skey) { "redis:set" }
+ let_it_be(:zkey) { "redis:sortedset" }
+ let_it_be(:rvalue) { "value1" }
+ let_it_be(:scan_kwargs) { { match: 'redis:hash' } }
+
+ where(:case_name, :name, :args, :kwargs) do
+ 'execute :scan_each command' | :scan_each | nil | ref(:scan_kwargs)
+ 'execute :sscan_each command' | :sscan_each | ref(:skey) | {}
+ 'execute :hscan_each command' | :hscan_each | ref(:hkey) | {}
+ 'execute :zscan_each command' | :zscan_each | ref(:zkey) | {}
+ end
+
+ before(:all) do
+ primary_store.hset(hkey, rvalue, 1)
+ primary_store.sadd?(skey, rvalue)
+ primary_store.zadd(zkey, 1, rvalue)
+
+ secondary_store.hset(hkey, rvalue, 1)
+ secondary_store.sadd?(skey, rvalue)
+ secondary_store.zadd(zkey, 1, rvalue)
+ end
+
+ RSpec.shared_examples_for 'enumerator commands execution' do |both_stores, default_primary|
+ context 'without block passed in' do
+ subject do
+ multi_store.send(name, *args, **kwargs)
+ end
+
+ it 'returns an enumerator' do
+ expect(subject).to be_instance_of(Enumerator)
+ end
+ end
+
+ context 'with block passed in' do
+ subject do
+ multi_store.send(name, *args, **kwargs) { |key| multi_store.incr(rvalue) }
+ end
+
+ it 'returns nil' do
+ expect(subject).to eq(nil)
+ end
+
+ it 'runs block on correct Redis instance' do
+ if both_stores
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).not_to receive(name)
+
+ expect(primary_store).to receive(:incr).with(rvalue)
+ expect(secondary_store).to receive(:incr).with(rvalue)
+ elsif default_primary
+ expect(primary_store).to receive(name).with(*expected_args).and_call_original
+ expect(primary_store).to receive(:incr).with(rvalue)
+
+ expect(secondary_store).not_to receive(name)
+ expect(secondary_store).not_to receive(:incr).with(rvalue)
+ else
+ expect(secondary_store).to receive(name).with(*expected_args).and_call_original
+ expect(secondary_store).to receive(:incr).with(rvalue)
+
+ expect(primary_store).not_to receive(name)
+ expect(primary_store).not_to receive(:incr).with(rvalue)
+ end
+
+ subject
+ end
+ end
+ end
+
+ with_them do
+ describe name.to_s do
+ let(:expected_args) { kwargs.present? ? [*args, { **kwargs }] : Array(args) }
+
+ before do
+ allow(primary_store).to receive(name).and_call_original
+ allow(secondary_store).to receive(name).and_call_original
+ end
+
+ context 'when only using 1 store' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'when using secondary store as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it_behaves_like 'enumerator commands execution', false, false
+ end
+
+ context 'when using primary store as default' do
+ it_behaves_like 'enumerator commands execution', false, true
+ end
+ end
+
+ context 'when using both stores' do
+ context 'when using secondary store as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it_behaves_like 'enumerator commands execution', true, false
+ end
+
+ context 'when using primary store as default' do
+ it_behaves_like 'enumerator commands execution', true, true
+ end
+ end
+ end
+ end
+ end
RSpec.shared_examples_for 'pipelined command' do |name|
let_it_be(:key1) { "redis:{1}:key_a" }
@@ -554,6 +901,34 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
end
end
end
+
+ context 'when use_primary_and_secondary_stores feature flag is disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_test_store: false)
+ end
+
+ context 'when using secondary store as default' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'executes on secondary store', :aggregate_errors do
+ expect(primary_store).not_to receive(:send).and_call_original
+ expect(secondary_store).to receive(:send).and_call_original
+
+ subject
+ end
+ end
+
+ context 'when using primary store as default' do
+ it 'executes on primary store', :aggregate_errors do
+ expect(secondary_store).not_to receive(:send).and_call_original
+ expect(primary_store).to receive(:send).and_call_original
+
+ subject
+ end
+ end
+ end
end
end
@@ -565,129 +940,211 @@ RSpec.describe Gitlab::Redis::MultiStore, feature_category: :redis do
include_examples 'pipelined command', :pipelined
end
- context 'with unsupported command' do
- let(:counter) { Gitlab::Metrics::NullMetric.instance }
-
- before do
- primary_store.flushdb
- secondary_store.flushdb
- allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
- end
-
- let_it_be(:key) { "redis:counter" }
+ describe '#ping' do
+ subject { multi_store.ping }
- subject { multi_store.incr(key) }
+ context 'when using both stores' do
+ before do
+ allow(multi_store).to receive(:use_primary_and_secondary_stores?).and_return(true)
+ end
- it 'responds to missing method' do
- expect(multi_store).to receive(:respond_to_missing?).and_call_original
+ context 'without message' do
+ it 'returns PONG' do
+ expect(subject).to eq('PONG')
+ end
+ end
- expect(multi_store.respond_to?(:incr)).to be(true)
- end
+ context 'with message' do
+ it 'returns the same message' do
+ expect(multi_store.ping('hello world')).to eq('hello world')
+ end
+ end
- it 'executes method missing' do
- expect(multi_store).to receive(:method_missing)
+ shared_examples 'returns an error' do
+ before do
+ allow(store).to receive(:ping).and_raise('boom')
+ end
- subject
- end
+ it 'returns the error' do
+ expect { subject }.to raise_error('boom')
+ end
+ end
- context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
- it 'logs MethodMissingError' do
- expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
- an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
- hash_including(command_name: :incr, instance_name: instance_name)
- )
+ context 'when primary store returns an error' do
+ let(:store) { primary_store }
- subject
+ it_behaves_like 'returns an error'
end
- it 'increments method missing counter' do
- expect(counter).to receive(:increment).with(command: :incr, instance_name: instance_name)
+ context 'when secondary store returns an error' do
+ let(:store) { secondary_store }
- subject
+ it_behaves_like 'returns an error'
end
end
- context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
- subject { multi_store.info }
+ shared_examples 'single store as default store' do
+ context 'when the store retuns success' do
+ it 'returns response from the respective store' do
+ expect(store).to receive(:ping).and_return('PONG')
- it 'does not log MethodMissingError' do
- expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+ subject
- subject
+ expect(subject).to eq('PONG')
+ end
end
- it 'does not increment method missing counter' do
- expect(counter).not_to receive(:increment)
+ context 'when the store returns an error' do
+ before do
+ allow(store).to receive(:ping).and_raise('boom')
+ end
- subject
+ it 'returns the error' do
+ expect { subject }.to raise_error('boom')
+ end
end
end
- context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ context 'when using only one store' do
before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: true)
+ allow(multi_store).to receive(:use_primary_and_secondary_stores?).and_return(false)
end
- it 'fallback and executes only on the secondary store', :aggregate_errors do
- expect(primary_store).to receive(:incr).with(key).and_call_original
- expect(secondary_store).not_to receive(:incr)
+ context 'when using primary_store as default store' do
+ let(:store) { primary_store }
- subject
+ before do
+ allow(multi_store).to receive(:use_primary_store_as_default?).and_return(true)
+ end
+
+ it_behaves_like 'single store as default store'
end
- it 'correct value is stored on the secondary store', :aggregate_errors do
- subject
+ context 'when using secondary_store as default store' do
+ let(:store) { secondary_store }
- expect(secondary_store.get(key)).to be_nil
- expect(primary_store.get(key)).to eq('1')
+ before do
+ allow(multi_store).to receive(:use_primary_store_as_default?).and_return(false)
+ end
+
+ it_behaves_like 'single store as default store'
end
end
+ end
- context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ context 'with unsupported command' do
+ let(:counter) { Gitlab::Metrics::NullMetric.instance }
+
+ before do
+ primary_store.flushdb
+ secondary_store.flushdb
+ allow(Gitlab::Metrics).to receive(:counter).and_return(counter)
+ end
+
+ subject { multi_store.command }
+
+ context 'when in test environment' do
+ it 'raises error' do
+ expect { subject }.to raise_error(instance_of(Gitlab::Redis::MultiStore::MethodMissingError))
+ end
+ end
+
+ context 'when not in test environment' do
before do
- stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ stub_rails_env('production')
end
- it 'fallback and executes only on the secondary store', :aggregate_errors do
- expect(secondary_store).to receive(:incr).with(key).and_call_original
- expect(primary_store).not_to receive(:incr)
+ it 'responds to missing method' do
+ expect(multi_store).to receive(:respond_to_missing?).and_call_original
- subject
+ expect(multi_store.respond_to?(:command)).to be(true)
end
- it 'correct value is stored on the secondary store', :aggregate_errors do
+ it 'executes method missing' do
+ expect(multi_store).to receive(:method_missing)
+
subject
+ end
+
+ context 'when command is not in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ it 'logs MethodMissingError' do
+ expect(Gitlab::ErrorTracking).to receive(:log_exception).with(
+ an_instance_of(Gitlab::Redis::MultiStore::MethodMissingError),
+ hash_including(command_name: :command, instance_name: instance_name)
+ )
+
+ subject
+ end
+
+ it 'increments method missing counter' do
+ expect(counter).to receive(:increment).with(command: :command, instance_name: instance_name)
+
+ subject
+ end
- expect(primary_store.get(key)).to be_nil
- expect(secondary_store.get(key)).to eq('1')
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(primary_store).to receive(:command).and_call_original
+ expect(secondary_store).not_to receive(:command)
+
+ subject
+ end
end
- end
- context 'when the command is executed within pipelined block' do
- subject do
- multi_store.pipelined do |pipeline|
- pipeline.incr(key)
+ context 'when command is in SKIP_LOG_METHOD_MISSING_FOR_COMMANDS' do
+ subject { multi_store.info }
+
+ it 'does not log MethodMissingError' do
+ expect(Gitlab::ErrorTracking).not_to receive(:log_exception)
+
+ subject
+ end
+
+ it 'does not increment method missing counter' do
+ expect(counter).not_to receive(:increment)
+
+ subject
end
end
- it 'is executed only 1 time on each instance', :aggregate_errors do
- expect(primary_store).to receive(:pipelined).once.and_call_original
- expect(secondary_store).to receive(:pipelined).once.and_call_original
+ context 'with feature flag :use_primary_store_as_default_for_test_store is enabled' do
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(primary_store).to receive(:command).and_call_original
+ expect(secondary_store).not_to receive(:command)
- 2.times do
- expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
- expect(pipeline).to receive(:incr).with(key).once
- end
+ subject
end
+ end
- subject
+ context 'with feature flag :use_primary_store_as_default_for_test_store is disabled' do
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_test_store: false)
+ end
+
+ it 'fallback and executes only on the secondary store', :aggregate_errors do
+ expect(secondary_store).to receive(:command).and_call_original
+ expect(primary_store).not_to receive(:command)
+
+ subject
+ end
end
- it "both redis stores are containing correct values", :aggregate_errors do
- subject
+ context 'when the command is executed within pipelined block' do
+ subject do
+ multi_store.pipelined(&:command)
+ end
+
+ it 'is executed only 1 time on each instance', :aggregate_errors do
+ expect(primary_store).to receive(:pipelined).once.and_call_original
+ expect(secondary_store).to receive(:pipelined).once.and_call_original
+
+ 2.times do
+ expect_next_instance_of(Redis::PipelinedConnection) do |pipeline|
+ expect(pipeline).to receive(:command).once
+ end
+ end
- expect(primary_store.get(key)).to eq('1')
- expect(secondary_store.get(key)).to eq('1')
+ subject
+ end
end
end
end
diff --git a/spec/lib/gitlab/redis/repository_cache_spec.rb b/spec/lib/gitlab/redis/repository_cache_spec.rb
new file mode 100644
index 00000000000..b11e9ebf1f3
--- /dev/null
+++ b/spec/lib/gitlab/redis/repository_cache_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Gitlab::Redis::RepositoryCache, feature_category: :scalability do
+ include_examples "redis_new_instance_shared_examples", 'repository_cache', Gitlab::Redis::Cache
+ include_examples "redis_shared_examples"
+
+ describe '#pool' do
+ let(:config_new_format_host) { "spec/fixtures/config/redis_new_format_host.yml" }
+ let(:config_new_format_socket) { "spec/fixtures/config/redis_new_format_socket.yml" }
+
+ subject { described_class.pool }
+
+ before do
+ allow(described_class).to receive(:config_file_name).and_return(config_new_format_host)
+ allow(Gitlab::Redis::Cache).to receive(:config_file_name).and_return(config_new_format_socket)
+ end
+
+ around do |example|
+ clear_pool
+ example.run
+ ensure
+ clear_pool
+ end
+
+ it 'instantiates an instance of MultiStore' do
+ subject.with do |redis_instance|
+ expect(redis_instance).to be_instance_of(::Gitlab::Redis::MultiStore)
+
+ expect(redis_instance.primary_store.connection[:id]).to eq("redis://test-host:6379/99")
+ expect(redis_instance.secondary_store.connection[:id]).to eq("unix:///path/to/redis.sock/0")
+
+ expect(redis_instance.instance_name).to eq('RepositoryCache')
+ end
+ end
+
+ it_behaves_like 'multi store feature flags', :use_primary_and_secondary_stores_for_repository_cache,
+ :use_primary_store_as_default_for_repository_cache
+ end
+
+ describe '#raw_config_hash' do
+ it 'has a legacy default URL' do
+ expect(subject).to receive(:fetch_config).and_return(false)
+
+ expect(subject.send(:raw_config_hash)).to eq(url: 'redis://localhost:6380')
+ end
+ end
+end
diff --git a/spec/lib/gitlab/redis/sidekiq_status_spec.rb b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
index 76d130d67f7..e7cf229b494 100644
--- a/spec/lib/gitlab/redis/sidekiq_status_spec.rb
+++ b/spec/lib/gitlab/redis/sidekiq_status_spec.rb
@@ -18,18 +18,10 @@ RSpec.describe Gitlab::Redis::SidekiqStatus do
subject { described_class.pool }
before do
- redis_clear_raw_config!(Gitlab::Redis::SharedState)
- redis_clear_raw_config!(Gitlab::Redis::Queues)
-
allow(Gitlab::Redis::SharedState).to receive(:config_file_name).and_return(config_new_format_host)
allow(Gitlab::Redis::Queues).to receive(:config_file_name).and_return(config_new_format_socket)
end
- after do
- redis_clear_raw_config!(Gitlab::Redis::SharedState)
- redis_clear_raw_config!(Gitlab::Redis::Queues)
- end
-
around do |example|
clear_pool
example.run
diff --git a/spec/lib/gitlab/regex_spec.rb b/spec/lib/gitlab/regex_spec.rb
index 89ef76d246e..9532a30144f 100644
--- a/spec/lib/gitlab/regex_spec.rb
+++ b/spec/lib/gitlab/regex_spec.rb
@@ -7,7 +7,7 @@ require_relative '../../support/shared_examples/lib/gitlab/regex_shared_examples
# All specs that can be run with fast_spec_helper only
# See regex_requires_app_spec for tests that require the full spec_helper
-RSpec.describe Gitlab::Regex do
+RSpec.describe Gitlab::Regex, feature_category: :tooling do
shared_examples_for 'project/group name chars regex' do
it { is_expected.to match('gitlab-ce') }
it { is_expected.to match('GitLab CE') }
@@ -72,6 +72,59 @@ RSpec.describe Gitlab::Regex do
it { is_expected.to eq("can contain only letters, digits, emojis, '_', '.', dash, space, parenthesis. It must start with letter, digit, emoji or '_'.") }
end
+ describe '.bulk_import_namespace_path_regex' do
+ subject { described_class.bulk_import_namespace_path_regex }
+
+ it { is_expected.not_to match('?gitlab') }
+ it { is_expected.not_to match("Users's something") }
+ it { is_expected.not_to match('/source') }
+ it { is_expected.not_to match('http:') }
+ it { is_expected.not_to match('https:') }
+ it { is_expected.not_to match('example.com/?stuff=true') }
+ it { is_expected.not_to match('example.com:5000/?stuff=true') }
+ it { is_expected.not_to match('http://gitlab.example/gitlab-org/manage/import/gitlab-migration-test') }
+ it { is_expected.not_to match('_good_for_me!') }
+ it { is_expected.not_to match('good_for+you') }
+ it { is_expected.not_to match('source/') }
+ it { is_expected.not_to match('.source/full./path') }
+
+ it { is_expected.to match('source') }
+ it { is_expected.to match('.source') }
+ it { is_expected.to match('_source') }
+ it { is_expected.to match('source/full') }
+ it { is_expected.to match('source/full/path') }
+ it { is_expected.to match('.source/.full/.path') }
+ it { is_expected.to match('domain_namespace') }
+ it { is_expected.to match('gitlab-migration-test') }
+ end
+
+ describe '.group_path_regex' do
+ subject { described_class.group_path_regex }
+
+ it { is_expected.not_to match('?gitlab') }
+ it { is_expected.not_to match("Users's something") }
+ it { is_expected.not_to match('/source') }
+ it { is_expected.not_to match('http:') }
+ it { is_expected.not_to match('https:') }
+ it { is_expected.not_to match('example.com/?stuff=true') }
+ it { is_expected.not_to match('example.com:5000/?stuff=true') }
+ it { is_expected.not_to match('http://gitlab.example/gitlab-org/manage/import/gitlab-migration-test') }
+ it { is_expected.not_to match('_good_for_me!') }
+ it { is_expected.not_to match('good_for+you') }
+ it { is_expected.not_to match('source/') }
+ it { is_expected.not_to match('.source/full./path') }
+
+ it { is_expected.not_to match('source/full') }
+ it { is_expected.not_to match('source/full/path') }
+ it { is_expected.not_to match('.source/.full/.path') }
+
+ it { is_expected.to match('source') }
+ it { is_expected.to match('.source') }
+ it { is_expected.to match('_source') }
+ it { is_expected.to match('domain_namespace') }
+ it { is_expected.to match('gitlab-migration-test') }
+ end
+
describe '.environment_name_regex' do
subject { described_class.environment_name_regex }
diff --git a/spec/lib/gitlab/relative_positioning/mover_spec.rb b/spec/lib/gitlab/relative_positioning/mover_spec.rb
index cbb15ae876d..85e985b1b6f 100644
--- a/spec/lib/gitlab/relative_positioning/mover_spec.rb
+++ b/spec/lib/gitlab/relative_positioning/mover_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe RelativePositioning::Mover do
+RSpec.describe RelativePositioning::Mover, feature_category: :portfolio_management do
let_it_be(:user) { create(:user) }
let_it_be(:one_sibling, reload: true) { create(:project, creator: user, namespace: user.namespace) }
let_it_be(:one_free_space, reload: true) { create(:project, creator: user, namespace: user.namespace) }
diff --git a/spec/lib/gitlab/repository_cache/preloader_spec.rb b/spec/lib/gitlab/repository_cache/preloader_spec.rb
index 8c6618c9f8f..71244dd41ed 100644
--- a/spec/lib/gitlab/repository_cache/preloader_spec.rb
+++ b/spec/lib/gitlab/repository_cache/preloader_spec.rb
@@ -2,53 +2,80 @@
require 'spec_helper'
-RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_redis_caching do
+RSpec.describe Gitlab::RepositoryCache::Preloader, :use_clean_rails_redis_caching,
+ feature_category: :source_code_management do
let(:projects) { create_list(:project, 2, :repository) }
let(:repositories) { projects.map(&:repository) }
- describe '#preload' do
- context 'when the values are already cached' do
- before do
- # Warm the cache but use a different model so they are not memoized
- repos = Project.id_in(projects).order(:id).map(&:repository)
+ before do
+ stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
+ end
- allow(repos[0].head_tree).to receive(:readme_path).and_return('README.txt')
- allow(repos[1].head_tree).to receive(:readme_path).and_return('README.md')
+ shared_examples 'preload' do
+ describe '#preload' do
+ context 'when the values are already cached' do
+ before do
+ # Warm the cache but use a different model so they are not memoized
+ repos = Project.id_in(projects).order(:id).map(&:repository)
- repos.map(&:exists?)
- repos.map(&:readme_path)
- end
+ allow(repos[0]).to receive(:readme_path_gitaly).and_return('README.txt')
+ allow(repos[1]).to receive(:readme_path_gitaly).and_return('README.md')
- it 'prevents individual cache reads for cached methods' do
- expect(Rails.cache).to receive(:read_multi).once.and_call_original
+ repos.map(&:exists?)
+ repos.map(&:readme_path)
+ end
- described_class.new(repositories).preload(
- %i[exists? readme_path]
- )
+ it 'prevents individual cache reads for cached methods' do
+ expect(cache).to receive(:read_multi).once.and_call_original
- expect(Rails.cache).not_to receive(:read)
- expect(Rails.cache).not_to receive(:write)
+ described_class.new(repositories).preload(
+ %i[exists? readme_path]
+ )
- expect(repositories[0].exists?).to eq(true)
- expect(repositories[0].readme_path).to eq('README.txt')
+ expect(cache).not_to receive(:read)
+ expect(cache).not_to receive(:write)
- expect(repositories[1].exists?).to eq(true)
- expect(repositories[1].readme_path).to eq('README.md')
+ expect(repositories[0].exists?).to eq(true)
+ expect(repositories[0].readme_path).to eq('README.txt')
+
+ expect(repositories[1].exists?).to eq(true)
+ expect(repositories[1].readme_path).to eq('README.md')
+ end
end
- end
- context 'when values are not cached' do
- it 'reads and writes from cache individually' do
- described_class.new(repositories).preload(
- %i[exists? has_visible_content?]
- )
+ context 'when values are not cached' do
+ it 'reads and writes from cache individually' do
+ described_class.new(repositories).preload(
+ %i[exists? has_visible_content?]
+ )
- expect(Rails.cache).to receive(:read).exactly(4).times
- expect(Rails.cache).to receive(:write).exactly(4).times
+ expect(cache).to receive(:read).exactly(4).times
+ expect(cache).to receive(:write).exactly(4).times
- repositories.each(&:exists?)
- repositories.each(&:has_visible_content?)
+ repositories.each(&:exists?)
+ repositories.each(&:has_visible_content?)
+ end
end
end
end
+
+ context 'when use_primary_and_secondary_stores_for_repository_cache feature flag is enabled' do
+ let(:cache) { Gitlab::RepositoryCache.store }
+
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: true)
+ end
+
+ it_behaves_like 'preload'
+ end
+
+ context 'when use_primary_and_secondary_stores_for_repository_cache feature flag is disabled' do
+ let(:cache) { Rails.cache }
+
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
+ end
+
+ it_behaves_like 'preload'
+ end
end
diff --git a/spec/lib/gitlab/repository_hash_cache_spec.rb b/spec/lib/gitlab/repository_hash_cache_spec.rb
index 6b52c315a70..d41bf45f72e 100644
--- a/spec/lib/gitlab/repository_hash_cache_spec.rb
+++ b/spec/lib/gitlab/repository_hash_cache_spec.rb
@@ -69,20 +69,35 @@ RSpec.describe Gitlab::RepositoryHashCache, :clean_gitlab_redis_cache do
end
end
- describe "#key?" do
- subject { cache.key?(:example, "test") }
+ shared_examples "key?" do
+ describe "#key?" do
+ subject { cache.key?(:example, "test") }
- context "key exists" do
- before do
- cache.write(:example, test_hash)
+ context "key exists" do
+ before do
+ cache.write(:example, test_hash)
+ end
+
+ it { is_expected.to be(true) }
end
- it { is_expected.to be(true) }
+ context "key doesn't exist" do
+ it { is_expected.to be(false) }
+ end
end
+ end
- context "key doesn't exist" do
- it { is_expected.to be(false) }
+ context "when both multistore FF is enabled" do
+ it_behaves_like "key?"
+ end
+
+ context "when both multistore FF is disabled" do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
+ stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
end
+
+ it_behaves_like "key?"
end
describe "#read_members" do
diff --git a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
new file mode 100644
index 00000000000..2862bcc9719
--- /dev/null
+++ b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_pipeline_seeder_spec.rb
@@ -0,0 +1,58 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+NULL_LOGGER = Gitlab::JsonLogger.new('/dev/null')
+TAG_LIST = Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder::TAG_LIST.to_set
+
+RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetPipelineSeeder, feature_category: :runner_fleet do
+ subject(:seeder) do
+ described_class.new(NULL_LOGGER, projects_to_runners: projects_to_runners, job_count: job_count)
+ end
+
+ def runner_ids_for_project(runner_count, project)
+ create_list(:ci_runner, runner_count, :project, projects: [project], tag_list: TAG_LIST.to_a.sample(5)).map(&:id)
+ end
+
+ let_it_be(:projects) { create_list(:project, 4) }
+ let_it_be(:projects_to_runners) do
+ [
+ { project_id: projects[0].id, runner_ids: runner_ids_for_project(2, projects[0]) },
+ { project_id: projects[1].id, runner_ids: runner_ids_for_project(1, projects[1]) },
+ { project_id: projects[2].id, runner_ids: runner_ids_for_project(2, projects[2]) },
+ { project_id: projects[3].id, runner_ids: runner_ids_for_project(1, projects[3]) }
+ ]
+ end
+
+ describe '#seed' do
+ context 'with job_count specified' do
+ let(:job_count) { 20 }
+
+ it 'creates expected jobs', :aggregate_failures do
+ expect { seeder.seed }.to change { Ci::Build.count }.by(job_count)
+ .and change { Ci::Pipeline.count }.by(4)
+
+ expect(Ci::Pipeline.where.not(started_at: nil).map(&:queued_duration)).to all(be < 5.minutes)
+ expect(Ci::Build.where.not(started_at: nil).map(&:queued_duration)).to all(be < 5.minutes)
+
+ projects_to_runners.first(3).each do |project|
+ expect(Ci::Build.where(runner_id: project[:runner_ids])).not_to be_empty
+ end
+ end
+ end
+
+ context 'with nil job_count' do
+ let(:job_count) { nil }
+
+ before do
+ stub_const('Gitlab::Seeders::Ci::Runner::RunnerFleetPipelineSeeder::DEFAULT_JOB_COUNT', 2)
+ end
+
+ it 'creates expected jobs', :aggregate_failures do
+ expect { seeder.seed }.to change { Ci::Build.count }.by(2)
+ .and change { Ci::Pipeline.count }.by(2)
+ expect(Ci::Build.last(2).map(&:tag_list).map(&:to_set)).to all satisfy { |r| r.subset?(TAG_LIST) }
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
new file mode 100644
index 00000000000..fe52b586d49
--- /dev/null
+++ b/spec/lib/gitlab/seeders/ci/runner/runner_fleet_seeder_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+NULL_LOGGER = Gitlab::JsonLogger.new('/dev/null')
+
+RSpec.describe ::Gitlab::Seeders::Ci::Runner::RunnerFleetSeeder, feature_category: :runner_fleet do
+ let_it_be(:user) { create(:user, :admin, username: 'test-admin') }
+
+ subject(:seeder) do
+ described_class.new(NULL_LOGGER,
+ username: user.username,
+ registration_prefix: registration_prefix,
+ runner_count: runner_count)
+ end
+
+ describe '#seed', :enable_admin_mode do
+ subject(:seed) { seeder.seed }
+
+ let(:runner_count) { 20 }
+ let(:registration_prefix) { 'prefix-' }
+ let(:runner_releases_url) do
+ ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+ end
+
+ before do
+ WebMock.stub_request(:get, runner_releases_url).to_return(
+ body: '[]',
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ it 'creates expected hierarchy', :aggregate_failures do
+ expect { seed }.to change { Ci::Runner.count }.by(runner_count)
+ .and change { Ci::Runner.instance_type.count }.by(1)
+ .and change { Project.count }.by(3)
+ .and change { Group.count }.by(6)
+
+ expect(Group.search(registration_prefix)).to contain_exactly(
+ an_object_having_attributes(name: "#{registration_prefix}top-level group 1"),
+ an_object_having_attributes(name: "#{registration_prefix}top-level group 2"),
+ an_object_having_attributes(name: "#{registration_prefix}group 1.1"),
+ an_object_having_attributes(name: "#{registration_prefix}group 1.1.1"),
+ an_object_having_attributes(name: "#{registration_prefix}group 1.1.2"),
+ an_object_having_attributes(name: "#{registration_prefix}group 2.1")
+ )
+
+ expect(Project.search(registration_prefix)).to contain_exactly(
+ an_object_having_attributes(name: "#{registration_prefix}project 1.1.1.1"),
+ an_object_having_attributes(name: "#{registration_prefix}project 1.1.2.1"),
+ an_object_having_attributes(name: "#{registration_prefix}project 2.1.1")
+ )
+
+ project_1_1_1_1 = Project.find_by_name("#{registration_prefix}project 1.1.1.1")
+ project_1_1_2_1 = Project.find_by_name("#{registration_prefix}project 1.1.2.1")
+ project_2_1_1 = Project.find_by_name("#{registration_prefix}project 2.1.1")
+ expect(seed).to contain_exactly(
+ { project_id: project_1_1_1_1.id, runner_ids: an_instance_of(Array) },
+ { project_id: project_1_1_2_1.id, runner_ids: an_instance_of(Array) },
+ { project_id: project_2_1_1.id, runner_ids: an_instance_of(Array) }
+ )
+ seed.each do |project|
+ expect(project[:runner_ids].length).to be_between(0, 5)
+ expect(Project.find(project[:project_id]).all_available_runners.ids).to include(*project[:runner_ids])
+ expect(::Ci::Pipeline.for_project(project[:runner_ids])).to be_empty
+ expect(::Ci::Build.where(runner_id: project[:runner_ids])).to be_empty
+ end
+ end
+ end
+end
diff --git a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
index 5baeec93036..6f46a5aea3b 100644
--- a/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
+++ b/spec/lib/gitlab/sidekiq_daemon/memory_killer_spec.rb
@@ -307,10 +307,10 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
end
describe '#signal_and_wait' do
- let(:time) { 0 }
+ let(:time) { 0.1 }
let(:signal) { 'my-signal' }
let(:explanation) { 'my-explanation' }
- let(:check_interval_seconds) { 2 }
+ let(:check_interval_seconds) { 0.1 }
subject { memory_killer.send(:signal_and_wait, time, signal, explanation) }
@@ -318,37 +318,19 @@ RSpec.describe Gitlab::SidekiqDaemon::MemoryKiller do
stub_const("#{described_class}::CHECK_INTERVAL_SECONDS", check_interval_seconds)
end
- context 'when all jobs are finished' do
- let(:running_jobs) { {} }
-
- it 'send signal and return when all jobs finished' do
- expect(Process).to receive(:kill).with(signal, pid).ordered
- expect(Gitlab::Metrics::System).to receive(:monotonic_time).and_call_original
-
- expect(memory_killer).to receive(:enabled?).and_return(true)
-
- expect(memory_killer).not_to receive(:sleep)
-
- subject
- end
- end
+ it 'send signal and wait till deadline' do
+ expect(Process).to receive(:kill)
+ .with(signal, pid)
+ .ordered
- context 'when there are still running jobs' do
- let(:running_jobs) { { 'jid1' => { worker_class: DummyWorker } } }
-
- it 'send signal and wait till deadline if any job not finished' do
- expect(Process).to receive(:kill)
- .with(signal, pid)
- .ordered
-
- expect(Gitlab::Metrics::System).to receive(:monotonic_time)
- .and_call_original
- .at_least(:once)
+ expect(Gitlab::Metrics::System).to receive(:monotonic_time)
+ .and_call_original
+ .at_least(3)
- expect(memory_killer).to receive(:enabled?).and_return(true).at_least(:once)
+ expect(memory_killer).to receive(:enabled?).and_return(true).at_least(:twice)
+ expect(memory_killer).to receive(:sleep).at_least(:once).and_call_original
- subject
- end
+ subject
end
end
diff --git a/spec/lib/gitlab/ssh/commit_spec.rb b/spec/lib/gitlab/ssh/commit_spec.rb
index cc977a80f95..77f37857c82 100644
--- a/spec/lib/gitlab/ssh/commit_spec.rb
+++ b/spec/lib/gitlab/ssh/commit_spec.rb
@@ -1,9 +1,10 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Gitlab::Ssh::Commit do
+RSpec.describe Gitlab::Ssh::Commit, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:signed_by_key) { create(:key) }
+ let_it_be(:fingerprint) { signed_by_key.fingerprint_sha256 }
let(:commit) { create(:commit, project: project) }
let(:signature_text) { 'signature_text' }
@@ -19,8 +20,11 @@ RSpec.describe Gitlab::Ssh::Commit do
.with(Gitlab::Git::Repository, commit.sha)
.and_return(signature_data)
- allow(verifier).to receive(:verification_status).and_return(verification_status)
- allow(verifier).to receive(:signed_by_key).and_return(signed_by_key)
+ allow(verifier).to receive_messages({
+ verification_status: verification_status,
+ signed_by_key: signed_by_key,
+ key_fingerprint: fingerprint
+ })
allow(Gitlab::Ssh::Signature).to receive(:new)
.with(signature_text, signed_text, commit.committer_email)
@@ -44,6 +48,8 @@ RSpec.describe Gitlab::Ssh::Commit do
commit_sha: commit.sha,
project: project,
key_id: signed_by_key.id,
+ key_fingerprint_sha256: signed_by_key.fingerprint_sha256,
+ user_id: signed_by_key.user_id,
verification_status: 'verified'
)
end
@@ -51,6 +57,7 @@ RSpec.describe Gitlab::Ssh::Commit do
context 'when signed_by_key is nil' do
let_it_be(:signed_by_key) { nil }
+ let_it_be(:fingerprint) { nil }
let(:verification_status) { :unknown_key }
@@ -59,6 +66,8 @@ RSpec.describe Gitlab::Ssh::Commit do
commit_sha: commit.sha,
project: project,
key_id: nil,
+ key_fingerprint_sha256: nil,
+ user_id: nil,
verification_status: 'unknown_key'
)
end
diff --git a/spec/lib/gitlab/ssh/signature_spec.rb b/spec/lib/gitlab/ssh/signature_spec.rb
index 5149972dbf9..ee9b38cae7d 100644
--- a/spec/lib/gitlab/ssh/signature_spec.rb
+++ b/spec/lib/gitlab/ssh/signature_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::Ssh::Signature do
+RSpec.describe Gitlab::Ssh::Signature, feature_category: :source_code_management do
# ssh-keygen -t ed25519
let_it_be(:committer_email) { 'ssh-commit-test@example.com' }
let_it_be(:public_key_text) { 'ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIHZ8NHEnCIpC4mnot+BRxv6L+fq+TnN1CgsRrHWLmfwb' }
@@ -267,4 +267,10 @@ RSpec.describe Gitlab::Ssh::Signature do
end
end
end
+
+ describe '#key_fingerprint' do
+ it 'returns the pubkey sha256 fingerprint' do
+ expect(signature.key_fingerprint).to eq('dw7gPSvYtkCBU+BbTolbbckUEX3sL6NsGIJTQ4PYEnM')
+ end
+ end
end
diff --git a/spec/lib/gitlab/submodule_links_spec.rb b/spec/lib/gitlab/submodule_links_spec.rb
index e2bbda81780..12c322ea914 100644
--- a/spec/lib/gitlab/submodule_links_spec.rb
+++ b/spec/lib/gitlab/submodule_links_spec.rb
@@ -51,7 +51,7 @@ RSpec.describe Gitlab::SubmoduleLinks do
expect(subject.compare).to be_nil
end
- cache_store = links.instance_variable_get("@cache_store")
+ cache_store = links.instance_variable_get(:@cache_store)
expect(cache_store[ref]).to eq({ "gitlab-foss" => "git@gitlab.com:gitlab-org/gitlab-foss.git" })
end
diff --git a/spec/lib/gitlab/tracking_spec.rb b/spec/lib/gitlab/tracking_spec.rb
index 99ca402616a..e79bb2ef129 100644
--- a/spec/lib/gitlab/tracking_spec.rb
+++ b/spec/lib/gitlab/tracking_spec.rb
@@ -10,11 +10,11 @@ RSpec.describe Gitlab::Tracking do
stub_application_setting(snowplow_cookie_domain: '.gitfoo.com')
stub_application_setting(snowplow_app_id: '_abc123_')
- described_class.instance_variable_set("@tracker", nil)
+ described_class.instance_variable_set(:@tracker, nil)
end
after do
- described_class.instance_variable_set("@tracker", nil)
+ described_class.instance_variable_set(:@tracker, nil)
end
describe '.options' do
diff --git a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
index 10e336e9235..8be0769a379 100644
--- a/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
+++ b/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb
@@ -58,6 +58,50 @@ RSpec.describe Gitlab::Usage::Metrics::Aggregates::Aggregate, :clean_gitlab_redi
end
end
+ # EE version has validation that doesn't allow undefined events
+ # On CE, we detect EE events as undefined
+ context 'when configuration includes undefined events', unless: Gitlab.ee? do
+ let(:number_of_days) { 28 }
+
+ before do
+ allow(Gitlab::UsageDataCounters::HLLRedisCounter).to receive(:known_event?).with('event3').and_return(false)
+ end
+
+ where(:operator, :datasource, :expected_method, :expected_events) do
+ 'AND' | 'redis_hll' | :calculate_metrics_intersections | %w[event1 event2]
+ 'AND' | 'database' | :calculate_metrics_intersections | %w[event1 event2 event3]
+ 'OR' | 'redis_hll' | :calculate_metrics_union | %w[event1 event2]
+ 'OR' | 'database' | :calculate_metrics_union | %w[event1 event2 event3]
+ end
+
+ with_them do
+ let(:time_frame) { "#{number_of_days}d" }
+ let(:start_date) { number_of_days.days.ago.to_date }
+ let(:params) { { start_date: start_date, end_date: end_date, recorded_at: recorded_at } }
+ let(:aggregate) do
+ {
+ source: datasource,
+ operator: operator,
+ events: %w[event1 event2 event3]
+ }
+ end
+
+ subject(:calculate_count_for_aggregation) do
+ described_class
+ .new(recorded_at)
+ .calculate_count_for_aggregation(aggregation: aggregate, time_frame: time_frame)
+ end
+
+ it 'returns the number of unique events for aggregation', :aggregate_failures do
+ expect(namespace::SOURCES[datasource])
+ .to receive(expected_method)
+ .with(params.merge(metric_names: expected_events))
+ .and_return(5)
+ expect(calculate_count_for_aggregation).to eq(5)
+ end
+ end
+ end
+
context 'with invalid configuration' do
where(:time_frame, :operator, :datasource, :expected_error) do
'28d' | 'SUM' | 'redis_hll' | namespace::UnknownAggregationOperator
diff --git a/spec/lib/gitlab/usage/service_ping/legacy_metric_timing_decorator_spec.rb b/spec/lib/gitlab/usage/service_ping/legacy_metric_metadata_decorator_spec.rb
index 46592379b3d..c8c2feda234 100644
--- a/spec/lib/gitlab/usage/service_ping/legacy_metric_timing_decorator_spec.rb
+++ b/spec/lib/gitlab/usage/service_ping/legacy_metric_metadata_decorator_spec.rb
@@ -2,26 +2,31 @@
require 'spec_helper'
-RSpec.describe Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator do
+RSpec.describe Gitlab::Usage::ServicePing::LegacyMetricMetadataDecorator, feature_category: :service_ping do
using RSpec::Parameterized::TableSyntax
let(:duration) { 123 }
- where(:metric_value, :metric_class) do
- 1 | Integer
- "value" | String
- true | TrueClass
- false | FalseClass
- nil | NilClass
+ where(:metric_value, :error, :metric_class) do
+ 1 | nil | Integer
+ "value" | nil | String
+ true | nil | TrueClass
+ false | nil | FalseClass
+ nil | nil | NilClass
+ nil | StandardError.new | NilClass
end
with_them do
- let(:decorated_object) { described_class.new(metric_value, duration) }
+ let(:decorated_object) { described_class.new(metric_value, duration, error: error) }
it 'exposes a duration with the correct value' do
expect(decorated_object.duration).to eq(duration)
end
+ it 'exposes error with the correct value' do
+ expect(decorated_object.error).to eq(error)
+ end
+
it 'imitates wrapped class', :aggregate_failures do
expect(decorated_object).to eq metric_value
expect(decorated_object.class).to eq metric_class
diff --git a/spec/lib/gitlab/usage_data_metrics_spec.rb b/spec/lib/gitlab/usage_data_metrics_spec.rb
index 5d58933f1fd..34f8e5b2a2f 100644
--- a/spec/lib/gitlab/usage_data_metrics_spec.rb
+++ b/spec/lib/gitlab/usage_data_metrics_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::UsageDataMetrics do
+RSpec.describe Gitlab::UsageDataMetrics, :with_license do
describe '.uncached_data' do
subject { described_class.uncached_data }
diff --git a/spec/lib/gitlab/usage_data_queries_spec.rb b/spec/lib/gitlab/usage_data_queries_spec.rb
index 2fe43c11d27..30588324adf 100644
--- a/spec/lib/gitlab/usage_data_queries_spec.rb
+++ b/spec/lib/gitlab/usage_data_queries_spec.rb
@@ -11,9 +11,9 @@ RSpec.describe Gitlab::UsageDataQueries do
end
end
- describe '.with_duration' do
+ describe '.with_metadata' do
it 'yields passed block' do
- expect { |block| described_class.with_duration(&block) }.to yield_with_no_args
+ expect { |block| described_class.with_metadata(&block) }.to yield_with_no_args
end
end
diff --git a/spec/lib/gitlab/usage_data_spec.rb b/spec/lib/gitlab/usage_data_spec.rb
index 214331e15e8..592ac280d32 100644
--- a/spec/lib/gitlab/usage_data_spec.rb
+++ b/spec/lib/gitlab/usage_data_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::UsageData, :aggregate_failures do
+RSpec.describe Gitlab::UsageData, :aggregate_failures, feature_category: :service_ping do
include UsageDataHelpers
before do
@@ -1122,12 +1122,20 @@ RSpec.describe Gitlab::UsageData, :aggregate_failures do
end
end
- describe ".with_duration" do
+ describe ".with_metadata" do
it 'records duration' do
- expect(::Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator)
- .to receive(:new).with(2, kind_of(Float))
+ result = described_class.with_metadata { 1 + 1 }
- described_class.with_duration { 1 + 1 }
+ expect(result.duration).to be_an(Float)
+ end
+
+ it 'records error and returns nil', :aggregated_errors do
+ allow(Gitlab::ErrorTracking).to receive(:track_and_raise_for_dev_exception)
+
+ result = described_class.with_metadata { raise }
+
+ expect(result.error).to be_an(StandardError)
+ expect(result).to be_nil
end
end
diff --git a/spec/lib/gitlab/utils/lazy_attributes_spec.rb b/spec/lib/gitlab/utils/lazy_attributes_spec.rb
index 1ebc9b0d711..430b79c3063 100644
--- a/spec/lib/gitlab/utils/lazy_attributes_spec.rb
+++ b/spec/lib/gitlab/utils/lazy_attributes_spec.rb
@@ -47,9 +47,9 @@ RSpec.describe Gitlab::Utils::LazyAttributes do
end
it 'only calls the block once even if it returned `nil`', :aggregate_failures do
- expect(instance.instance_variable_get('@number')).to receive(:call).once.and_call_original
- expect(instance.instance_variable_get('@accessor_2')).to receive(:call).once.and_call_original
- expect(instance.instance_variable_get('@incorrect_type')).to receive(:call).once.and_call_original
+ expect(instance.instance_variable_get(:@number)).to receive(:call).once.and_call_original
+ expect(instance.instance_variable_get(:@accessor_2)).to receive(:call).once.and_call_original
+ expect(instance.instance_variable_get(:@incorrect_type)).to receive(:call).once.and_call_original
2.times do
instance.number
diff --git a/spec/lib/gitlab/utils/strong_memoize_spec.rb b/spec/lib/gitlab/utils/strong_memoize_spec.rb
index 287858579d6..71f2502b91c 100644
--- a/spec/lib/gitlab/utils/strong_memoize_spec.rb
+++ b/spec/lib/gitlab/utils/strong_memoize_spec.rb
@@ -2,12 +2,13 @@
require 'fast_spec_helper'
require 'rspec-benchmark'
+require 'rspec-parameterized'
RSpec.configure do |config|
config.include RSpec::Benchmark::Matchers
end
-RSpec.describe Gitlab::Utils::StrongMemoize do
+RSpec.describe Gitlab::Utils::StrongMemoize, feature_category: :not_owned do
let(:klass) do
strong_memoize_class = described_class
@@ -35,15 +36,10 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
strong_memoize_attr :method_name_attr
- def different_method_name_attr
+ def enabled?
trace << value
value
end
- strong_memoize_attr :different_method_name_attr, :different_member_name_attr
-
- def enabled?
- true
- end
strong_memoize_attr :enabled?
def method_name_with_args(*args)
@@ -80,6 +76,8 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
subject(:object) { klass.new(value) }
shared_examples 'caching the value' do
+ let(:member_name) { described_class.normalize_key(method_name) }
+
it 'only calls the block once' do
value0 = object.send(method_name)
value1 = object.send(method_name)
@@ -103,7 +101,6 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
context "with value #{value}" do
let(:value) { value }
let(:method_name) { :method_name }
- let(:member_name) { :method_name }
it_behaves_like 'caching the value'
@@ -176,31 +173,44 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
describe '#strong_memoized?' do
- let(:value) { :anything }
+ shared_examples 'memoization check' do |method_name|
+ context "for #{method_name}" do
+ let(:value) { :anything }
- subject { object.strong_memoized?(:method_name) }
+ subject { object.strong_memoized?(method_name) }
- it 'returns false if the value is uncached' do
- is_expected.to be(false)
- end
+ it 'returns false if the value is uncached' do
+ is_expected.to be(false)
+ end
- it 'returns true if the value is cached' do
- object.method_name
+ it 'returns true if the value is cached' do
+ object.public_send(method_name)
- is_expected.to be(true)
+ is_expected.to be(true)
+ end
+ end
end
+
+ it_behaves_like 'memoization check', :method_name
+ it_behaves_like 'memoization check', :enabled?
end
describe '#clear_memoization' do
- let(:value) { 'mepmep' }
+ shared_examples 'clearing memoization' do |method_name|
+ let(:member_name) { described_class.normalize_key(method_name) }
+ let(:value) { 'mepmep' }
- it 'removes the instance variable' do
- object.method_name
+ it 'removes the instance variable' do
+ object.public_send(method_name)
- object.clear_memoization(:method_name)
+ object.clear_memoization(method_name)
- expect(object.instance_variable_defined?(:@method_name)).to be(false)
+ expect(object.instance_variable_defined?(:"@#{member_name}")).to be(false)
+ end
end
+
+ it_behaves_like 'clearing memoization', :method_name
+ it_behaves_like 'clearing memoization', :enabled?
end
describe '.strong_memoize_attr' do
@@ -209,7 +219,6 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
context "memoized after method definition with value #{value}" do
let(:method_name) { :method_name_attr }
- let(:member_name) { :method_name_attr }
it_behaves_like 'caching the value'
@@ -218,30 +227,7 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
it 'retains method arity' do
- expect(klass.instance_method(member_name).arity).to eq(0)
- end
- end
-
- context "memoized before method definition with different member name and value #{value}" do
- let(:method_name) { :different_method_name_attr }
- let(:member_name) { :different_member_name_attr }
-
- it_behaves_like 'caching the value'
-
- it 'calls the existing .method_added' do
- expect(klass.method_added_list).to include(:different_method_name_attr)
- end
- end
-
- context 'with valid method name' do
- let(:method_name) { :enabled? }
-
- context 'with invalid member name' do
- let(:member_name) { :enabled? }
-
- it 'is invalid' do
- expect { object.send(method_name) { value } }.to raise_error /is not allowed as an instance variable name/
- end
+ expect(klass.instance_method(method_name).arity).to eq(0)
end
end
end
@@ -299,4 +285,41 @@ RSpec.describe Gitlab::Utils::StrongMemoize do
end
end
end
+
+ describe '.normalize_key' do
+ using RSpec::Parameterized::TableSyntax
+
+ subject { described_class.normalize_key(input) }
+
+ where(:input, :output, :valid) do
+ :key | :key | true
+ "key" | "key" | true
+ :key? | "key?" | true
+ "key?" | "key?" | true
+ :key! | "key!" | true
+ "key!" | "key!" | true
+ # invalid cases caught elsewhere
+ :"ke?y" | :"ke?y" | false
+ "ke?y" | "ke?y" | false
+ :"ke!y" | :"ke!y" | false
+ "ke!y" | "ke!y" | false
+ end
+
+ with_them do
+ let(:ivar) { "@#{output}" }
+
+ it { is_expected.to eq(output) }
+
+ if params[:valid]
+ it 'is a valid ivar name' do
+ expect { instance_variable_defined?(ivar) }.not_to raise_error
+ end
+ else
+ it 'raises a NameError error' do
+ expect { instance_variable_defined?(ivar) }
+ .to raise_error(NameError, /not allowed as an instance/)
+ end
+ end
+ end
+ end
end
diff --git a/spec/lib/gitlab/utils/usage_data_spec.rb b/spec/lib/gitlab/utils/usage_data_spec.rb
index 13d046b0816..2925ceef256 100644
--- a/spec/lib/gitlab/utils/usage_data_spec.rb
+++ b/spec/lib/gitlab/utils/usage_data_spec.rb
@@ -31,9 +31,9 @@ RSpec.describe Gitlab::Utils::UsageData do
end
end
- describe '.with_duration' do
+ describe '.with_metadata' do
it 'yields passed block' do
- expect { |block| described_class.with_duration(&block) }.to yield_with_no_args
+ expect { |block| described_class.with_metadata(&block) }.to yield_with_no_args
end
end
@@ -55,7 +55,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
allow(relation).to receive(:count).and_return(1)
described_class.count(relation, batch: false)
@@ -82,7 +82,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
allow(relation).to receive(:distinct_count_by).and_return(1)
described_class.distinct_count(relation, batch: false)
@@ -242,7 +242,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
allow(Gitlab::Database::BatchCount).to receive(:batch_sum).and_return(1)
described_class.sum(relation, :column)
@@ -272,7 +272,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
allow(Gitlab::Database::BatchCount).to receive(:batch_average).and_return(1)
@@ -367,14 +367,14 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
described_class.histogram(relation, column, buckets: 1..100)
end
context 'when query timeout' do
subject do
- with_statement_timeout(0.001) do
+ with_statement_timeout(0.001, connection: ApplicationRecord.connection) do
relation = AlertManagement::HttpIntegration.select('pg_sleep(0.002)')
described_class.histogram(relation, column, buckets: 1..100)
end
@@ -425,7 +425,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
described_class.add
end
@@ -455,7 +455,7 @@ RSpec.describe Gitlab::Utils::UsageData do
end
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
described_class.alt_usage_data
end
@@ -471,7 +471,7 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#redis_usage_data' do
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
described_class.redis_usage_data
end
@@ -520,7 +520,7 @@ RSpec.describe Gitlab::Utils::UsageData do
describe '#with_prometheus_client' do
it 'records duration' do
- expect(described_class).to receive(:with_duration)
+ expect(described_class).to receive(:with_metadata)
described_class.with_prometheus_client { |client| client }
end
diff --git a/spec/lib/gitlab/version_info_spec.rb b/spec/lib/gitlab/version_info_spec.rb
index 078f952afad..99c7a762392 100644
--- a/spec/lib/gitlab/version_info_spec.rb
+++ b/spec/lib/gitlab/version_info_spec.rb
@@ -92,6 +92,8 @@ RSpec.describe Gitlab::VersionInfo do
it { expect(described_class.parse("1.0.0-rc1-ee")).to eq(@v1_0_0) }
it { expect(described_class.parse("git 1.0.0b1")).to eq(@v1_0_0) }
it { expect(described_class.parse("git 1.0b1")).not_to be_valid }
+ it { expect(described_class.parse("1.1.#{'1' * described_class::MAX_VERSION_LENGTH}")).not_to be_valid }
+ it { expect(described_class.parse(nil)).not_to be_valid }
context 'with parse_suffix: true' do
let(:versions) do
@@ -182,4 +184,10 @@ RSpec.describe Gitlab::VersionInfo do
it { expect(@v1_0_1.without_patch).to eq(@v1_0_0) }
it { expect(@v1_0_1_rc1.without_patch).to eq(@v1_0_0) }
end
+
+ describe 'MAX_VERSION_LENGTH' do
+ subject { described_class::MAX_VERSION_LENGTH }
+
+ it { is_expected.to eq(128) }
+ end
end
diff --git a/spec/lib/google_api/cloud_platform/client_spec.rb b/spec/lib/google_api/cloud_platform/client_spec.rb
index 82ab6c089da..4ea395830ad 100644
--- a/spec/lib/google_api/cloud_platform/client_spec.rb
+++ b/spec/lib/google_api/cloud_platform/client_spec.rb
@@ -6,7 +6,6 @@ require 'google/apis/sqladmin_v1beta4'
RSpec.describe GoogleApi::CloudPlatform::Client do
let(:token) { 'token' }
let(:client) { described_class.new(token, nil) }
- let(:user_agent_options) { client.instance_eval { user_agent_header } }
let(:gcp_project_id) { String('gcp_proj_id') }
let(:operation) { true }
let(:database_instance) { Google::Apis::SqladminV1beta4::DatabaseInstance.new(state: 'RUNNABLE') }
@@ -77,150 +76,6 @@ RSpec.describe GoogleApi::CloudPlatform::Client do
end
end
- describe '#projects_zones_clusters_get' do
- subject { client.projects_zones_clusters_get(spy, spy, spy) }
-
- let(:gke_cluster) { double }
-
- before do
- allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
- .to receive(:get_zone_cluster).with(any_args, options: user_agent_options)
- .and_return(gke_cluster)
- end
-
- it { is_expected.to eq(gke_cluster) }
- end
-
- describe '#projects_zones_clusters_create' do
- subject do
- client.projects_zones_clusters_create(
- project_id, zone, cluster_name, cluster_size, machine_type: machine_type, legacy_abac: legacy_abac, enable_addons: enable_addons)
- end
-
- let(:project_id) { 'project-123' }
- let(:zone) { 'us-central1-a' }
- let(:cluster_name) { 'test-cluster' }
- let(:cluster_size) { 1 }
- let(:machine_type) { 'n1-standard-2' }
- let(:legacy_abac) { true }
- let(:enable_addons) { [] }
-
- let(:addons_config) do
- enable_addons.index_with do
- { disabled: false }
- end
- end
-
- let(:cluster_options) do
- {
- cluster: {
- name: cluster_name,
- initial_node_count: cluster_size,
- node_config: {
- machine_type: machine_type,
- oauth_scopes: [
- "https://www.googleapis.com/auth/devstorage.read_only",
- "https://www.googleapis.com/auth/logging.write",
- "https://www.googleapis.com/auth/monitoring"
- ]
- },
- master_auth: {
- client_certificate_config: {
- issue_client_certificate: true
- }
- },
- legacy_abac: {
- enabled: legacy_abac
- },
- ip_allocation_policy: {
- use_ip_aliases: true,
- cluster_ipv4_cidr_block: '/16'
- },
- addons_config: addons_config
- }
- }
- end
-
- let(:create_cluster_request_body) { double('Google::Apis::ContainerV1beta1::CreateClusterRequest') }
- let(:operation) { double }
-
- before do
- allow_any_instance_of(Google::Apis::ContainerV1beta1::ContainerService)
- .to receive(:create_cluster).with(any_args)
- .and_return(operation)
- end
-
- it 'sets corresponded parameters' do
- expect_any_instance_of(Google::Apis::ContainerV1beta1::ContainerService)
- .to receive(:create_cluster).with(project_id, zone, create_cluster_request_body, options: user_agent_options)
-
- expect(Google::Apis::ContainerV1beta1::CreateClusterRequest)
- .to receive(:new).with(cluster_options).and_return(create_cluster_request_body)
-
- expect(subject).to eq operation
- end
-
- context 'create without legacy_abac' do
- let(:legacy_abac) { false }
-
- it 'sets corresponded parameters' do
- expect_any_instance_of(Google::Apis::ContainerV1beta1::ContainerService)
- .to receive(:create_cluster).with(project_id, zone, create_cluster_request_body, options: user_agent_options)
-
- expect(Google::Apis::ContainerV1beta1::CreateClusterRequest)
- .to receive(:new).with(cluster_options).and_return(create_cluster_request_body)
-
- expect(subject).to eq operation
- end
- end
-
- context 'create with enable_addons for cloud_run' do
- let(:enable_addons) { [:http_load_balancing, :istio_config, :cloud_run_config] }
-
- it 'sets corresponded parameters' do
- expect_any_instance_of(Google::Apis::ContainerV1beta1::ContainerService)
- .to receive(:create_cluster).with(project_id, zone, create_cluster_request_body, options: user_agent_options)
-
- expect(Google::Apis::ContainerV1beta1::CreateClusterRequest)
- .to receive(:new).with(cluster_options).and_return(create_cluster_request_body)
-
- expect(subject).to eq operation
- end
- end
- end
-
- describe '#projects_zones_operations' do
- subject { client.projects_zones_operations(spy, spy, spy) }
-
- let(:operation) { double }
-
- before do
- allow_any_instance_of(Google::Apis::ContainerV1::ContainerService)
- .to receive(:get_zone_operation).with(any_args, options: user_agent_options)
- .and_return(operation)
- end
-
- it { is_expected.to eq(operation) }
- end
-
- describe '#parse_operation_id' do
- subject { client.parse_operation_id(self_link) }
-
- context 'when expected url' do
- let(:self_link) do
- 'projects/gcp-project-12345/zones/us-central1-a/operations/ope-123'
- end
-
- it { is_expected.to eq('ope-123') }
- end
-
- context 'when unexpected url' do
- let(:self_link) { '???' }
-
- it { is_expected.to be_nil }
- end
- end
-
describe '#user_agent_header' do
subject { client.instance_eval { user_agent_header } }
diff --git a/spec/lib/object_storage/direct_upload_spec.rb b/spec/lib/object_storage/direct_upload_spec.rb
index c2201fb60ac..569e6a3a7c6 100644
--- a/spec/lib/object_storage/direct_upload_spec.rb
+++ b/spec/lib/object_storage/direct_upload_spec.rb
@@ -234,6 +234,7 @@ RSpec.describe ObjectStorage::DirectUpload do
expect(subject[:GetURL]).to start_with(storage_url)
expect(subject[:StoreURL]).to start_with(storage_url)
expect(subject[:DeleteURL]).to start_with(storage_url)
+ expect(subject[:SkipDelete]).to eq(false)
expect(subject[:CustomPutHeaders]).to be_truthy
expect(subject[:PutHeaders]).to eq({})
end
diff --git a/spec/lib/sidebars/groups/menus/observability_menu_spec.rb b/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
index 3a91b1aea2f..5b993cd6f28 100644
--- a/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/observability_menu_spec.rb
@@ -20,23 +20,25 @@ RSpec.describe Sidebars::Groups::Menus::ObservabilityMenu do
allow(menu).to receive(:can?).and_call_original
end
- context 'when user can :read_observability' do
+ context 'when observability is enabled' do
before do
- allow(menu).to receive(:can?).with(user, :read_observability, group).and_return(true)
+ allow(Gitlab::Observability).to receive(:observability_enabled?).and_return(true)
end
it 'returns true' do
expect(menu.render?).to eq true
+ expect(Gitlab::Observability).to have_received(:observability_enabled?).with(user, group)
end
end
- context 'when user cannot :read_observability' do
+ context 'when observability is disabled' do
before do
- allow(menu).to receive(:can?).with(user, :read_observability, group).and_return(false)
+ allow(Gitlab::Observability).to receive(:observability_enabled?).and_return(false)
end
it 'returns false' do
expect(menu.render?).to eq false
+ expect(Gitlab::Observability).to have_received(:observability_enabled?).with(user, group)
end
end
end
diff --git a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
index 4e3c639672b..c5246fe93dd 100644
--- a/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/groups/menus/settings_menu_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Sidebars::Groups::Menus::SettingsMenu do
+RSpec.describe Sidebars::Groups::Menus::SettingsMenu, :with_license do
let_it_be(:owner) { create(:user) }
let_it_be_with_refind(:group) do
diff --git a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
index 0733e0c6521..c7aca0fb97e 100644
--- a/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
+++ b/spec/lib/sidebars/projects/menus/settings_menu_spec.rb
@@ -10,6 +10,10 @@ RSpec.describe Sidebars::Projects::Menus::SettingsMenu do
subject { described_class.new(context) }
+ before do
+ stub_feature_flags(show_pages_in_deployments_menu: false)
+ end
+
describe '#render?' do
it 'returns false when menu does not have any menu items' do
allow(subject).to receive(:has_renderable_items?).and_return(false)
diff --git a/spec/lib/sidebars/your_work/menus/issues_menu_spec.rb b/spec/lib/sidebars/your_work/menus/issues_menu_spec.rb
new file mode 100644
index 00000000000..a1206c0bc1c
--- /dev/null
+++ b/spec/lib/sidebars/your_work/menus/issues_menu_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::YourWork::Menus::IssuesMenu, feature_category: :navigation do
+ let(:user) { create(:user) }
+ let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
+
+ subject { described_class.new(context) }
+
+ include_examples 'menu item shows pill based on count', :assigned_open_issues_count
+end
diff --git a/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb b/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb
new file mode 100644
index 00000000000..b3251a54178
--- /dev/null
+++ b/spec/lib/sidebars/your_work/menus/merge_requests_menu_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::YourWork::Menus::MergeRequestsMenu, feature_category: :navigation do
+ let(:user) { create(:user) }
+ let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
+
+ subject { described_class.new(context) }
+
+ include_examples 'menu item shows pill based on count', :assigned_open_merge_requests_count
+end
diff --git a/spec/lib/sidebars/your_work/menus/todos_menu_spec.rb b/spec/lib/sidebars/your_work/menus/todos_menu_spec.rb
new file mode 100644
index 00000000000..a8177a6a01b
--- /dev/null
+++ b/spec/lib/sidebars/your_work/menus/todos_menu_spec.rb
@@ -0,0 +1,12 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Sidebars::YourWork::Menus::TodosMenu, feature_category: :navigation do
+ let(:user) { create(:user) }
+ let(:context) { Sidebars::Context.new(current_user: user, container: nil) }
+
+ subject { described_class.new(context) }
+
+ include_examples 'menu item shows pill based on count', :todos_pending_count
+end
diff --git a/spec/lib/unnested_in_filters/rewriter_spec.rb b/spec/lib/unnested_in_filters/rewriter_spec.rb
index fe34fba579b..bba27276037 100644
--- a/spec/lib/unnested_in_filters/rewriter_spec.rb
+++ b/spec/lib/unnested_in_filters/rewriter_spec.rb
@@ -69,15 +69,21 @@ RSpec.describe UnnestedInFilters::Rewriter do
let(:recorded_queries) { ActiveRecord::QueryRecorder.new { rewriter.rewrite.load } }
let(:relation) { User.where(state: :active, user_type: %i(support_bot alert_bot)).limit(2) }
+ let(:users_default_select_fields) do
+ User.default_select_columns
+ .map { |field| "\"users\".\"#{field.name}\"" }
+ .join(',')
+ end
+
let(:expected_query) do
<<~SQL
SELECT
- "users".*
+ #{users_default_select_fields}
FROM
unnest('{1,2}'::smallint[]) AS "user_types"("user_type"),
LATERAL (
SELECT
- "users".*
+ #{users_default_select_fields}
FROM
"users"
WHERE
@@ -101,13 +107,13 @@ RSpec.describe UnnestedInFilters::Rewriter do
let(:expected_query) do
<<~SQL
SELECT
- "users".*
+ #{users_default_select_fields}
FROM
unnest(ARRAY(SELECT "users"."state" FROM "users")::character varying[]) AS "states"("state"),
unnest('{1,2}'::smallint[]) AS "user_types"("user_type"),
LATERAL (
SELECT
- "users".*
+ #{users_default_select_fields}
FROM
"users"
WHERE
@@ -129,12 +135,12 @@ RSpec.describe UnnestedInFilters::Rewriter do
let(:expected_query) do
<<~SQL
SELECT
- "users".*
+ #{users_default_select_fields}
FROM
unnest('{active,blocked,banned}'::charactervarying[]) AS "states"("state"),
LATERAL (
SELECT
- "users".*
+ #{users_default_select_fields}
FROM
"users"
WHERE
@@ -181,8 +187,6 @@ RSpec.describe UnnestedInFilters::Rewriter do
let(:expected_query) do
<<~SQL
- SELECT
- "users".*
FROM
"users"
WHERE
@@ -217,7 +221,7 @@ RSpec.describe UnnestedInFilters::Rewriter do
end
it 'changes the query' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query.gsub(/\s/, '')).to include(expected_query.gsub(/\s/, ''))
end
end
@@ -226,8 +230,6 @@ RSpec.describe UnnestedInFilters::Rewriter do
let(:expected_query) do
<<~SQL
- SELECT
- "users".*
FROM
"users"
WHERE
@@ -257,7 +259,7 @@ RSpec.describe UnnestedInFilters::Rewriter do
end
it 'does not rewrite the in statement for the joined table' do
- expect(issued_query.gsub(/\s/, '')).to start_with(expected_query.gsub(/\s/, ''))
+ expect(issued_query.gsub(/\s/, '')).to include(expected_query.gsub(/\s/, ''))
end
end
diff --git a/spec/mailers/devise_mailer_spec.rb b/spec/mailers/devise_mailer_spec.rb
index b8e768b7a5f..6eb0e817803 100644
--- a/spec/mailers/devise_mailer_spec.rb
+++ b/spec/mailers/devise_mailer_spec.rb
@@ -131,6 +131,10 @@ RSpec.describe DeviseMailer do
it 'includes a link to reset the password' do
is_expected.to have_link("Reset password", href: "#{Gitlab.config.gitlab.url}/users/password/edit?reset_password_token=faketoken")
end
+
+ it 'has the mailgun suppression bypass header' do
+ is_expected.to have_header 'X-Mailgun-Suppressions-Bypass', 'true'
+ end
end
describe '#email_changed' do
diff --git a/spec/mailers/emails/imports_spec.rb b/spec/mailers/emails/imports_spec.rb
new file mode 100644
index 00000000000..039113d3098
--- /dev/null
+++ b/spec/mailers/emails/imports_spec.rb
@@ -0,0 +1,29 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require 'email_spec'
+
+RSpec.describe Emails::Imports, feature_category: :importers do
+ include EmailSpec::Matchers
+
+ let(:errors) { { 'gist_id1' => "Title can't be blank", 'gist_id2' => 'Snippet maximum file count exceeded' } }
+ let(:user) { build_stubbed(:user) }
+
+ describe '#github_gists_import_errors_email' do
+ subject { Notify.github_gists_import_errors_email('user_id', errors) }
+
+ before do
+ allow(User).to receive(:find).and_return(user)
+ end
+
+ it 'sends success email' do
+ expect(subject).to have_subject('GitHub Gists import finished with errors')
+ expect(subject).to have_content('GitHub gists that were not imported:')
+ expect(subject).to have_content("Gist with id gist_id1 failed due to error: Title can't be blank.")
+ expect(subject).to have_content('Gist with id gist_id2 failed due to error: Snippet maximum file count exceeded.')
+ end
+
+ it_behaves_like 'appearance header and footer enabled'
+ it_behaves_like 'appearance header and footer not enabled'
+ end
+end
diff --git a/spec/mailers/emails/profile_spec.rb b/spec/mailers/emails/profile_spec.rb
index cdc298d685e..1fd2a92866d 100644
--- a/spec/mailers/emails/profile_spec.rb
+++ b/spec/mailers/emails/profile_spec.rb
@@ -198,9 +198,10 @@ RSpec.describe Emails::Profile do
describe 'user personal access token has expired' do
let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user) }
context 'when valid' do
- subject { Notify.access_token_expired_email(user) }
+ subject { Notify.access_token_expired_email(user, [pat.name]) }
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
@@ -211,11 +212,12 @@ RSpec.describe Emails::Profile do
end
it 'has the correct subject' do
- is_expected.to have_subject /Your personal access token has expired/
+ is_expected.to have_subject /Your personal access tokens have expired/
end
it 'mentions the access token has expired' do
- is_expected.to have_body_text /One or more of your personal access tokens has expired/
+ is_expected.to have_body_text /The following personal access tokens have expired:/
+ is_expected.to have_body_text /#{pat.name}/
end
it 'includes a link to personal access tokens page' do
@@ -279,7 +281,7 @@ RSpec.describe Emails::Profile do
end
context 'when source is provided' do
- subject { Notify.access_token_revoked_email(user, token.name, 'secret_detection') }
+ subject { Notify.access_token_revoked_email(user, token.name, :secret_detection) }
it_behaves_like 'an email sent from GitLab'
it_behaves_like 'it should not have Gmail Actions links'
diff --git a/spec/mailers/emails/service_desk_spec.rb b/spec/mailers/emails/service_desk_spec.rb
index 1523d9b986b..e753bf2c76c 100644
--- a/spec/mailers/emails/service_desk_spec.rb
+++ b/spec/mailers/emails/service_desk_spec.rb
@@ -58,7 +58,7 @@ RSpec.describe Emails::ServiceDesk do
end
end
- shared_examples 'handle template content' do |template_key|
+ shared_examples 'handle template content' do |template_key, attachments_count|
before do
expect(Gitlab::Template::ServiceDeskTemplate).to receive(:find)
.with(template_key, issue.project)
@@ -69,6 +69,7 @@ RSpec.describe Emails::ServiceDesk do
aggregate_failures do
is_expected.to have_referable_subject(issue, include_project: false, reply: reply_in_subject)
is_expected.to have_body_text(expected_body)
+ expect(subject.attachments.count).to eq(attachments_count.to_i)
expect(subject.content_type).to include('text/html')
end
end
@@ -195,13 +196,102 @@ RSpec.describe Emails::ServiceDesk do
end
context 'with upload link in the note' do
- let_it_be(:upload_path) { '/uploads/e90decf88d8f96fe9e1389afc2e4a91f/test.jpg' }
- let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "a new comment with [file](#{upload_path})") }
-
- let(:template_content) { 'some text %{ NOTE_TEXT }' }
- let(:expected_body) { %Q(some text a new comment with <a href="#{project.web_url}#{upload_path}" data-canonical-src="#{upload_path}" data-link="true" class="gfm">file</a>) }
-
- it_behaves_like 'handle template content', 'new_note'
+ let_it_be(:secret) { 'e90decf88d8f96fe9e1389afc2e4a91f' }
+ let_it_be(:filename) { 'test.jpg' }
+ let_it_be(:path) { "#{secret}/#{filename}" }
+ let_it_be(:upload_path) { "/uploads/#{path}" }
+ let_it_be(:template_content) { 'some text %{ NOTE_TEXT }' }
+ let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "a new comment with [#{filename}](#{upload_path})") }
+ let!(:upload) { create(:upload, :issuable_upload, :with_file, model: note.project, path: path, secret: secret) }
+
+ context 'when total uploads size is more than 10mb' do
+ before do
+ allow_next_instance_of(FileUploader) do |instance|
+ allow(instance).to receive(:size).and_return(10.1.megabytes)
+ end
+ end
+
+ let_it_be(:expected_body) { %Q(some text a new comment with <a href="#{project.web_url}#{upload_path}" data-canonical-src="#{upload_path}" data-link="true" class="gfm">#{filename}</a>) }
+
+ it_behaves_like 'handle template content', 'new_note'
+ end
+
+ context 'when total uploads size is less or equal 10mb' do
+ context 'when it has only one upload' do
+ before do
+ allow_next_instance_of(FileUploader) do |instance|
+ allow(instance).to receive(:size).and_return(10.megabytes)
+ end
+ end
+
+ context 'when upload name is not changed in markdown' do
+ let_it_be(:expected_body) { %Q(some text a new comment with <strong>#{filename}</strong>) }
+
+ it_behaves_like 'handle template content', 'new_note', 1
+ end
+
+ context 'when upload name is changed in markdown' do
+ let_it_be(:upload_name_in_markdown) { 'Custom name' }
+ let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "a new comment with [#{upload_name_in_markdown}](#{upload_path})") }
+ let_it_be(:expected_body) { %Q(some text a new comment with <strong>#{upload_name_in_markdown} (#{filename})</strong>) }
+
+ it_behaves_like 'handle template content', 'new_note', 1
+ end
+ end
+
+ context 'when it has more than one upload' do
+ before do
+ allow_next_instance_of(FileUploader) do |instance|
+ allow(instance).to receive(:size).and_return(5.megabytes)
+ end
+ end
+
+ let_it_be(:secret_1) { '17817c73e368777e6f743392e334fb8a' }
+ let_it_be(:filename_1) { 'test1.jpg' }
+ let_it_be(:path_1) { "#{secret_1}/#{filename_1}" }
+ let_it_be(:upload_path_1) { "/uploads/#{path_1}" }
+ let_it_be(:note) { create(:note_on_issue, noteable: issue, project: project, note: "a new comment with [#{filename}](#{upload_path}) [#{filename_1}](#{upload_path_1})") }
+
+ context 'when all uploads processed correct' do
+ let_it_be(:upload_1) { create(:upload, :issuable_upload, :with_file, model: note.project, path: path_1, secret: secret_1) }
+ let_it_be(:expected_body) { %Q(some text a new comment with <strong>#{filename}</strong> <strong>#{filename_1}</strong>) }
+
+ it_behaves_like 'handle template content', 'new_note', 2
+ end
+
+ context 'when not all uploads processed correct' do
+ let_it_be(:expected_body) { %Q(some text a new comment with <strong>#{filename}</strong> <a href="#{project.web_url}#{upload_path_1}" data-canonical-src="#{upload_path_1}" data-link="true" class="gfm">#{filename_1}</a>) }
+
+ it_behaves_like 'handle template content', 'new_note', 1
+ end
+ end
+ end
+
+ context 'when UploaderFinder is raising error' do
+ before do
+ allow_next_instance_of(UploaderFinder) do |instance|
+ allow(instance).to receive(:execute).and_raise(StandardError)
+ end
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(StandardError, project_id: note.project_id)
+ end
+
+ let_it_be(:expected_body) { %Q(some text a new comment with <a href="#{project.web_url}#{upload_path}" data-canonical-src="#{upload_path}" data-link="true" class="gfm">#{filename}</a>) }
+
+ it_behaves_like 'handle template content', 'new_note'
+ end
+
+ context 'when FileUploader is raising error' do
+ before do
+ allow_next_instance_of(FileUploader) do |instance|
+ allow(instance).to receive(:read).and_raise(StandardError)
+ end
+ expect(Gitlab::ErrorTracking).to receive(:track_exception).with(StandardError, project_id: note.project_id)
+ end
+
+ let_it_be(:expected_body) { %Q(some text a new comment with <a href="#{project.web_url}#{upload_path}" data-canonical-src="#{upload_path}" data-link="true" class="gfm">#{filename}</a>) }
+
+ it_behaves_like 'handle template content', 'new_note'
+ end
end
context 'with all-user reference in a an external author comment' do
diff --git a/spec/metrics_server/metrics_server_spec.rb b/spec/metrics_server/metrics_server_spec.rb
index 58577d4d633..efa716754f1 100644
--- a/spec/metrics_server/metrics_server_spec.rb
+++ b/spec/metrics_server/metrics_server_spec.rb
@@ -99,20 +99,22 @@ RSpec.describe MetricsServer, feature_category: :application_performance do # ru
context 'for Golang server' do
let(:log_enabled) { false }
let(:settings) do
- {
- 'web_exporter' => {
- 'enabled' => true,
- 'address' => 'localhost',
- 'port' => '8083',
- 'log_enabled' => log_enabled
- },
- 'sidekiq_exporter' => {
- 'enabled' => true,
- 'address' => 'localhost',
- 'port' => '8082',
- 'log_enabled' => log_enabled
+ Settingslogic.new(
+ {
+ 'web_exporter' => {
+ 'enabled' => true,
+ 'address' => 'localhost',
+ 'port' => '8083',
+ 'log_enabled' => log_enabled
+ },
+ 'sidekiq_exporter' => {
+ 'enabled' => true,
+ 'address' => 'localhost',
+ 'port' => '8082',
+ 'log_enabled' => log_enabled
+ }
}
- }
+ )
end
let(:expected_port) { target == 'puma' ? '8083' : '8082' }
@@ -120,7 +122,7 @@ RSpec.describe MetricsServer, feature_category: :application_performance do # ru
{
'GOGC' => '10',
'GME_MMAP_METRICS_DIR' => metrics_dir,
- 'GME_PROBES' => 'self,mmap',
+ 'GME_PROBES' => 'self,mmap,mmap_stats',
'GME_SERVER_HOST' => 'localhost',
'GME_SERVER_PORT' => expected_port,
'GME_LOG_LEVEL' => 'quiet'
@@ -175,11 +177,13 @@ RSpec.describe MetricsServer, feature_category: :application_performance do # ru
context 'when TLS settings are present' do
before do
- %w(web_exporter sidekiq_exporter).each do |key|
- settings[key]['tls_enabled'] = true
- settings[key]['tls_cert_path'] = '/path/to/cert.pem'
- settings[key]['tls_key_path'] = '/path/to/key.pem'
- end
+ settings.web_exporter['tls_enabled'] = true
+ settings.web_exporter['tls_cert_path'] = '/path/to/cert.pem'
+ settings.web_exporter['tls_key_path'] = '/path/to/key.pem'
+
+ settings.sidekiq_exporter['tls_enabled'] = true
+ settings.sidekiq_exporter['tls_cert_path'] = '/path/to/cert.pem'
+ settings.sidekiq_exporter['tls_key_path'] = '/path/to/key.pem'
end
it 'sets the correct environment variables' do
@@ -300,12 +304,12 @@ RSpec.describe MetricsServer, feature_category: :application_performance do # ru
end
context 'for sidekiq' do
- let(:settings) { { "sidekiq_exporter" => { "enabled" => true } } }
+ let(:settings) { Settingslogic.new({ "sidekiq_exporter" => { "enabled" => true } }) }
before do
allow(::Settings).to receive(:monitoring).and_return(settings)
allow(Gitlab::Metrics::Exporter::SidekiqExporter).to receive(:instance).with(
- settings['sidekiq_exporter'], gc_requests: true, synchronous: true
+ settings.sidekiq_exporter, gc_requests: true, synchronous: true
).and_return(exporter_double)
end
@@ -358,4 +362,28 @@ RSpec.describe MetricsServer, feature_category: :application_performance do # ru
end
end
end
+
+ describe '.name' do
+ subject { described_class.name(target) }
+
+ context 'for puma' do
+ let(:target) { 'puma' }
+
+ it { is_expected.to eq 'web_exporter' }
+ end
+
+ context 'for sidekiq' do
+ let(:target) { 'sidekiq' }
+
+ it { is_expected.to eq 'sidekiq_exporter' }
+ end
+
+ context 'for invalid target' do
+ let(:target) { 'invalid' }
+
+ it 'raises error' do
+ expect { subject }.to raise_error(RuntimeError, "Target must be one of [puma,sidekiq]")
+ end
+ end
+ end
end
diff --git a/spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb b/spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb
deleted file mode 100644
index 18aa8e92560..00000000000
--- a/spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillTotalTupleCountForBatchedMigrations, :migration, schema: 20210406140057,
- feature_category: :database do
- let!(:table_name) { 'projects' }
-
- let!(:migrations) { table(:batched_background_migrations) }
-
- let!(:migration) do
- migrations.create!(
- created_at: Time.now,
- updated_at: Time.now,
- min_value: 1,
- max_value: 10_000,
- batch_size: 1_000,
- sub_batch_size: 100,
- interval: 120,
- status: 0,
- job_class_name: 'Foo',
- table_name: table_name,
- column_name: :id,
- total_tuple_count: nil
- )
- end
-
- describe '#up' do
- before do
- expect(Gitlab::Database::PgClass).to receive(:for_table).with(table_name).and_return(estimate)
- end
-
- let(:estimate) { double('estimate', cardinality_estimate: 42) }
-
- it 'updates total_tuple_count attribute' do
- migrate!
-
- migrations.all.each do |migration|
- expect(migration.total_tuple_count).to eq(estimate.cardinality_estimate)
- end
- end
- end
-end
diff --git a/spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb b/spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb
deleted file mode 100644
index 258bf7a3e69..00000000000
--- a/spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb
+++ /dev/null
@@ -1,114 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDropInvalidVulnerabilities, :migration, feature_category: :value_stream_management do
- let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let!(:users) { table(:users) }
- let!(:user) { create_user! }
- let!(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
-
- let!(:scanners) { table(:vulnerability_scanners) }
- let!(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let!(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
-
- let!(:vulnerabilities) { table(:vulnerabilities) }
- let!(:vulnerability_with_finding) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:vulnerability_without_finding) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
- let!(:primary_identifier) do
- vulnerability_identifiers.create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'Identifier for UUIDv5')
- end
-
- let!(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
- let!(:finding) do
- create_finding!(
- vulnerability_id: vulnerability_with_finding.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: primary_identifier.id
- )
- end
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules background migrations' do
- migrate!
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- expect(described_class::MIGRATION).to be_scheduled_migration(vulnerability_with_finding.id, vulnerability_with_finding.id)
- expect(described_class::MIGRATION).to be_scheduled_migration(vulnerability_without_finding.id, vulnerability_without_finding.id)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-
- def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
- users.create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- user_type: user_type,
- confirmed_at: Time.current
- )
- end
-end
diff --git a/spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb b/spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb
deleted file mode 100644
index 688fc5eb23a..00000000000
--- a/spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-#
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe CopyAdoptionSnapshotNamespace, :migration, schema: 20210430124630, feature_category: :devops_reports do
- let(:namespaces_table) { table(:namespaces) }
- let(:segments_table) { table(:analytics_devops_adoption_segments) }
- let(:snapshots_table) { table(:analytics_devops_adoption_snapshots) }
-
- it 'updates all snapshots without namespace set' do
- namespaces_table.create!(id: 123, name: 'group1', path: 'group1')
- namespaces_table.create!(id: 124, name: 'group2', path: 'group2')
-
- segments_table.create!(id: 1, namespace_id: 123)
- segments_table.create!(id: 2, namespace_id: 124)
-
- create_snapshot(id: 1, segment_id: 1)
- create_snapshot(id: 2, segment_id: 2)
- create_snapshot(id: 3, segment_id: 2, namespace_id: 123)
-
- migrate!
-
- expect(snapshots_table.find(1).namespace_id).to eq 123
- expect(snapshots_table.find(2).namespace_id).to eq 124
- expect(snapshots_table.find(3).namespace_id).to eq 123
- end
-
- def create_snapshot(**additional_params)
- defaults = {
- recorded_at: Time.zone.now,
- issue_opened: true,
- merge_request_opened: true,
- merge_request_approved: true,
- runner_configured: true,
- pipeline_succeeded: true,
- deploy_succeeded: true,
- end_time: Time.zone.now.end_of_month
- }
-
- snapshots_table.create!(defaults.merge(additional_params))
- end
-end
diff --git a/spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb b/spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb
deleted file mode 100644
index 0fb3029ec6a..00000000000
--- a/spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb
+++ /dev/null
@@ -1,25 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe CopyAdoptionSegmentsNamespace, :migration, feature_category: :devops_reports do
- let(:namespaces_table) { table(:namespaces) }
- let(:segments_table) { table(:analytics_devops_adoption_segments) }
-
- before do
- namespaces_table.create!(id: 123, name: 'group1', path: 'group1')
- namespaces_table.create!(id: 124, name: 'group2', path: 'group2')
-
- segments_table.create!(id: 1, namespace_id: 123, display_namespace_id: nil)
- segments_table.create!(id: 2, namespace_id: 124, display_namespace_id: 123)
- end
-
- it 'updates all segments without display namespace' do
- migrate!
-
- expect(segments_table.find(1).display_namespace_id).to eq 123
- expect(segments_table.find(2).display_namespace_id).to eq 123
- end
-end
diff --git a/spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb b/spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb
deleted file mode 100644
index 07a90c2d276..00000000000
--- a/spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb
+++ /dev/null
@@ -1,42 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddProjectValueStreamIdToProjectStages, schema: 20210503105022,
- feature_category: :value_stream_management do
- let(:stages) { table(:analytics_cycle_analytics_project_stages) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- let(:namespace) { table(:namespaces).create!(name: 'ns1', path: 'nsq1') }
-
- before do
- project = projects.create!(name: 'p1', namespace_id: namespace.id)
-
- stages.create!(
- project_id: project.id,
- created_at: Time.now,
- updated_at: Time.now,
- start_event_identifier: 1,
- end_event_identifier: 2,
- name: 'stage 1'
- )
-
- stages.create!(
- project_id: project.id,
- created_at: Time.now,
- updated_at: Time.now,
- start_event_identifier: 3,
- end_event_identifier: 4,
- name: 'stage 2'
- )
- end
-
- it 'deletes the existing rows' do
- migrate!
-
- expect(stages.count).to eq(0)
- end
-end
diff --git a/spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb b/spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb
deleted file mode 100644
index b514c92c52d..00000000000
--- a/spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb
+++ /dev/null
@@ -1,120 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleDropInvalidVulnerabilities2, :migration, feature_category: :value_stream_management do
- let!(:background_migration_jobs) { table(:background_migration_jobs) }
-
- let!(:namespace) { table(:namespaces).create!(name: 'user', path: 'user') }
- let!(:users) { table(:users) }
- let!(:user) { create_user! }
- let!(:project) { table(:projects).create!(id: 123, namespace_id: namespace.id) }
-
- let!(:scanners) { table(:vulnerability_scanners) }
- let!(:scanner) { scanners.create!(project_id: project.id, external_id: 'test 1', name: 'test scanner 1') }
- let!(:different_scanner) { scanners.create!(project_id: project.id, external_id: 'test 2', name: 'test scanner 2') }
-
- let!(:vulnerabilities) { table(:vulnerabilities) }
- let!(:vulnerability_with_finding) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:vulnerability_without_finding) do
- create_vulnerability!(
- project_id: project.id,
- author_id: user.id
- )
- end
-
- let!(:vulnerability_identifiers) { table(:vulnerability_identifiers) }
- let!(:primary_identifier) do
- vulnerability_identifiers.create!(
- project_id: project.id,
- external_type: 'uuid-v5',
- external_id: 'uuid-v5',
- fingerprint: '7e394d1b1eb461a7406d7b1e08f057a1cf11287a',
- name: 'Identifier for UUIDv5')
- end
-
- let!(:vulnerabilities_findings) { table(:vulnerability_occurrences) }
- let!(:finding) do
- create_finding!(
- vulnerability_id: vulnerability_with_finding.id,
- project_id: project.id,
- scanner_id: scanner.id,
- primary_identifier_id: primary_identifier.id
- )
- end
-
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- around do |example|
- freeze_time { Sidekiq::Testing.fake! { example.run } }
- end
-
- it 'schedules background migrations' do
- migrate!
-
- expect(background_migration_jobs.count).to eq(2)
- expect(background_migration_jobs.first.arguments).to eq([vulnerability_with_finding.id, vulnerability_with_finding.id])
- expect(background_migration_jobs.second.arguments).to eq([vulnerability_without_finding.id, vulnerability_without_finding.id])
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, vulnerability_with_finding.id, vulnerability_with_finding.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, vulnerability_without_finding.id, vulnerability_without_finding.id)
- end
-
- private
-
- def create_vulnerability!(project_id:, author_id:, title: 'test', severity: 7, confidence: 7, report_type: 0)
- vulnerabilities.create!(
- project_id: project_id,
- author_id: author_id,
- title: title,
- severity: severity,
- confidence: confidence,
- report_type: report_type
- )
- end
-
- # rubocop:disable Metrics/ParameterLists
- def create_finding!(
- vulnerability_id:, project_id:, scanner_id:, primary_identifier_id:,
- name: "test", severity: 7, confidence: 7, report_type: 0,
- project_fingerprint: '123qweasdzxc', location_fingerprint: 'test',
- metadata_version: 'test', raw_metadata: 'test', uuid: SecureRandom.uuid)
- vulnerabilities_findings.create!(
- vulnerability_id: vulnerability_id,
- project_id: project_id,
- name: name,
- severity: severity,
- confidence: confidence,
- report_type: report_type,
- project_fingerprint: project_fingerprint,
- scanner_id: scanner_id,
- primary_identifier_id: primary_identifier_id,
- location_fingerprint: location_fingerprint,
- metadata_version: metadata_version,
- raw_metadata: raw_metadata,
- uuid: uuid
- )
- end
- # rubocop:enable Metrics/ParameterLists
-
- def create_user!(name: "Example User", email: "user@example.com", user_type: nil)
- users.create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- user_type: user_type,
- confirmed_at: Time.current
- )
- end
-end
diff --git a/spec/migrations/20210514063252_schedule_cleanup_orphaned_lfs_objects_projects_spec.rb b/spec/migrations/20210514063252_schedule_cleanup_orphaned_lfs_objects_projects_spec.rb
deleted file mode 100644
index 8a76f0847e9..00000000000
--- a/spec/migrations/20210514063252_schedule_cleanup_orphaned_lfs_objects_projects_spec.rb
+++ /dev/null
@@ -1,37 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleCleanupOrphanedLfsObjectsProjects, schema: 20210511165250, feature_category: :git_lfs do
- let(:lfs_objects_projects) { table(:lfs_objects_projects) }
- let(:projects) { table(:projects) }
- let(:namespaces) { table(:namespaces) }
- let(:lfs_objects) { table(:lfs_objects) }
-
- let(:namespace) { namespaces.create!(name: 'namespace', path: 'namespace') }
- let(:project) { projects.create!(namespace_id: namespace.id) }
- let(:another_project) { projects.create!(namespace_id: namespace.id) }
- let(:lfs_object) { lfs_objects.create!(oid: 'abcdef', size: 1) }
- let(:another_lfs_object) { lfs_objects.create!(oid: '1abcde', size: 2) }
-
- describe '#up' do
- it 'schedules CleanupOrphanedLfsObjectsProjects background jobs' do
- stub_const("#{described_class}::BATCH_SIZE", 2)
-
- lfs_objects_project1 = lfs_objects_projects.create!(project_id: project.id, lfs_object_id: lfs_object.id)
- lfs_objects_project2 = lfs_objects_projects.create!(project_id: another_project.id, lfs_object_id: lfs_object.id)
- lfs_objects_project3 = lfs_objects_projects.create!(project_id: project.id, lfs_object_id: another_lfs_object.id)
- lfs_objects_project4 = lfs_objects_projects.create!(project_id: another_project.id, lfs_object_id: another_lfs_object.id)
-
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, lfs_objects_project1.id, lfs_objects_project2.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, lfs_objects_project3.id, lfs_objects_project4.id)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/20210601073400_fix_total_stage_in_vsa_spec.rb b/spec/migrations/20210601073400_fix_total_stage_in_vsa_spec.rb
deleted file mode 100644
index 24a71e48035..00000000000
--- a/spec/migrations/20210601073400_fix_total_stage_in_vsa_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe FixTotalStageInVsa, :migration, schema: 20210518001450, feature_category: :devops_reports do
- let(:namespaces) { table(:namespaces) }
- let(:group_value_streams) { table(:analytics_cycle_analytics_group_value_streams) }
- let(:group_stages) { table(:analytics_cycle_analytics_group_stages) }
-
- let!(:group) { namespaces.create!(name: 'ns1', path: 'ns1', type: 'Group') }
- let!(:group_vs_1) { group_value_streams.create!(name: 'default', group_id: group.id) }
- let!(:group_vs_2) { group_value_streams.create!(name: 'other', group_id: group.id) }
- let!(:group_vs_3) { group_value_streams.create!(name: 'another', group_id: group.id) }
- let!(:group_stage_total) { group_stages.create!(name: 'Total', custom: false, group_id: group.id, group_value_stream_id: group_vs_1.id, start_event_identifier: 1, end_event_identifier: 2) }
- let!(:group_stage_different_name) { group_stages.create!(name: 'Issue', custom: false, group_id: group.id, group_value_stream_id: group_vs_2.id, start_event_identifier: 1, end_event_identifier: 2) }
- let!(:group_stage_total_custom) { group_stages.create!(name: 'Total', custom: true, group_id: group.id, group_value_stream_id: group_vs_3.id, start_event_identifier: 1, end_event_identifier: 2) }
-
- it 'deduplicates issue_metrics table' do
- migrate!
-
- group_stage_total.reload
- group_stage_different_name.reload
- group_stage_total_custom.reload
-
- expect(group_stage_total.custom).to eq(true)
- expect(group_stage_different_name.custom).to eq(false)
- expect(group_stage_total_custom.custom).to eq(true)
- end
-end
diff --git a/spec/migrations/20210601080039_group_protected_environments_add_index_and_constraint_spec.rb b/spec/migrations/20210601080039_group_protected_environments_add_index_and_constraint_spec.rb
deleted file mode 100644
index 592497805de..00000000000
--- a/spec/migrations/20210601080039_group_protected_environments_add_index_and_constraint_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe GroupProtectedEnvironmentsAddIndexAndConstraint, feature_category: :continuous_delivery do
- let(:migration) { described_class.new }
- let(:protected_environments) { table(:protected_environments) }
- let(:group) { table(:namespaces).create!(name: 'group', path: 'group') }
- let(:project) { table(:projects).create!(name: 'project', path: 'project', namespace_id: group.id) }
-
- describe '#down' do
- it 'deletes only group-level configurations' do
- migration.up
-
- project_protections = [
- protected_environments.create!(project_id: project.id, name: 'production'),
- protected_environments.create!(project_id: project.id, name: 'staging')
- ]
- protected_environments.create!(group_id: group.id, name: 'production')
- protected_environments.create!(group_id: group.id, name: 'staging')
-
- migration.down
-
- expect(protected_environments.pluck(:id))
- .to match_array project_protections.map(&:id)
- end
- end
-end
diff --git a/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb b/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb
index 604504d2206..0f202129e82 100644
--- a/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb
+++ b/spec/migrations/20210708130419_reschedule_merge_request_diff_users_background_migration_spec.rb
@@ -3,7 +3,8 @@
require 'spec_helper'
require_migration!
-RSpec.describe RescheduleMergeRequestDiffUsersBackgroundMigration, :migration, feature_category: :code_review do
+RSpec.describe RescheduleMergeRequestDiffUsersBackgroundMigration,
+ :migration, feature_category: :code_review_workflow do
let(:migration) { described_class.new }
describe '#up' do
diff --git a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
index 5626b885626..e7f76eb0ae0 100644
--- a/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
+++ b/spec/migrations/20210804150320_create_base_work_item_types_spec.rb
@@ -17,7 +17,8 @@ RSpec.describe CreateBaseWorkItemTypes, :migration, feature_category: :team_plan
}
end
- after(:all) do
+ # We use append_after to make sure this runs after the schema was reset to its latest state
+ append_after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
reset_work_item_types
diff --git a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
index 2a19dc025a7..4c7ef9ac1e8 100644
--- a/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
+++ b/spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb
@@ -17,7 +17,7 @@ RSpec.describe UpsertBaseWorkItemTypes, :migration, feature_category: :team_plan
}
end
- after(:all) do
+ append_after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
reset_work_item_types
diff --git a/spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb b/spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb
index f627ea825b3..a61e450d9ab 100644
--- a/spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb
+++ b/spec/migrations/20211012134316_clean_up_migrate_merge_request_diff_commit_users_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration! 'clean_up_migrate_merge_request_diff_commit_users'
-RSpec.describe CleanUpMigrateMergeRequestDiffCommitUsers, :migration, feature_category: :code_review do
+RSpec.describe CleanUpMigrateMergeRequestDiffCommitUsers, :migration, feature_category: :code_review_workflow do
describe '#up' do
context 'when there are pending jobs' do
it 'processes the jobs immediately' do
diff --git a/spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb b/spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb
index c7a0b938ca1..968d9cf176c 100644
--- a/spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb
+++ b/spec/migrations/20211028155449_schedule_fix_merge_request_diff_commit_users_migration_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration! 'schedule_fix_merge_request_diff_commit_users_migration'
-RSpec.describe ScheduleFixMergeRequestDiffCommitUsersMigration, :migration, feature_category: :code_review do
+RSpec.describe ScheduleFixMergeRequestDiffCommitUsersMigration, :migration, feature_category: :code_review_workflow do
let(:migration) { described_class.new }
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
diff --git a/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb b/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb
index 32edd3615ff..db68e895b61 100644
--- a/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb
+++ b/spec/migrations/20211126204445_add_task_to_work_item_types_spec.rb
@@ -18,7 +18,7 @@ RSpec.describe AddTaskToWorkItemTypes, :migration, feature_category: :team_plann
}
end
- after(:all) do
+ append_after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
reset_work_item_types
diff --git a/spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb b/spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb
index 1760535e66f..85fe3d712a2 100644
--- a/spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb
+++ b/spec/migrations/20220315171129_cleanup_draft_data_from_faulty_regex_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe CleanupDraftDataFromFaultyRegex, feature_category: :code_review do
+RSpec.describe CleanupDraftDataFromFaultyRegex, feature_category: :code_review_workflow do
let(:merge_requests) { table(:merge_requests) }
let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
diff --git a/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb b/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
index e316ad25214..47d407618d2 100644
--- a/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
+++ b/spec/migrations/20220502015011_clean_up_fix_merge_request_diff_commit_users_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration! 'clean_up_fix_merge_request_diff_commit_users'
-RSpec.describe CleanUpFixMergeRequestDiffCommitUsers, :migration, feature_category: :code_review do
+RSpec.describe CleanUpFixMergeRequestDiffCommitUsers, :migration, feature_category: :code_review_workflow do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:project_namespace) { namespaces.create!(name: 'project2', path: 'project2', type: 'Project') }
diff --git a/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb b/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb
index 3ab33367303..6284608becb 100644
--- a/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb
+++ b/spec/migrations/20221018050323_add_objective_and_keyresult_to_work_item_types_spec.rb
@@ -20,7 +20,7 @@ RSpec.describe AddObjectiveAndKeyresultToWorkItemTypes, :migration, feature_cate
}
end
- after(:all) do
+ append_after(:all) do
# Make sure base types are recreated after running the migration
# because migration specs are not run in a transaction
reset_work_item_types
diff --git a/spec/migrations/20221209235940_cleanup_o_auth_access_tokens_with_null_expires_in_spec.rb b/spec/migrations/20221209235940_cleanup_o_auth_access_tokens_with_null_expires_in_spec.rb
new file mode 100644
index 00000000000..da6532a822a
--- /dev/null
+++ b/spec/migrations/20221209235940_cleanup_o_auth_access_tokens_with_null_expires_in_spec.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe CleanupOAuthAccessTokensWithNullExpiresIn, feature_category: :authentication_and_authorization do
+ let(:batched_migration) { described_class::MIGRATION }
+
+ it 'schedules background jobs for each batch of oauth_access_tokens' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :oauth_access_tokens,
+ column_name: :id,
+ interval: described_class::INTERVAL
+ )
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb b/spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb
new file mode 100644
index 00000000000..d7aa53ec35b
--- /dev/null
+++ b/spec/migrations/20221215151822_schedule_backfill_releases_author_id_spec.rb
@@ -0,0 +1,53 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe ScheduleBackfillReleasesAuthorId, feature_category: :release_orchestration do
+ context 'when there are releases without author' do
+ let(:releases_table) { table(:releases) }
+ let(:user_table) { table(:users) }
+ let(:date_time) { DateTime.now }
+ let!(:batched_migration) { described_class::MIGRATION }
+ let!(:test_user) do
+ user_table.create!(name: 'test',
+ email: 'test@example.com',
+ username: 'test',
+ projects_limit: 10)
+ end
+
+ before do
+ releases_table.create!(tag: 'tag1', name: 'tag1',
+ released_at: (date_time - 1.minute), author_id: test_user.id)
+ releases_table.create!(tag: 'tag2', name: 'tag2',
+ released_at: (date_time - 2.minutes), author_id: test_user.id)
+ releases_table.new(tag: 'tag3', name: 'tag3',
+ released_at: (date_time - 3.minutes), author_id: nil).save!(validate: false)
+ releases_table.new(tag: 'tag4', name: 'tag4',
+ released_at: (date_time - 4.minutes), author_id: nil).save!(validate: false)
+ end
+
+ it 'schedules a new batched migration' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(batched_migration).not_to have_scheduled_batched_migration
+ }
+
+ migration.after -> {
+ expect(batched_migration).to have_scheduled_batched_migration(
+ table_name: :releases,
+ column_name: :id,
+ interval: described_class::JOB_DELAY_INTERVAL,
+ job_arguments: [User.find_by(user_type: :ghost)&.id]
+ )
+ }
+ end
+ end
+ end
+
+ context 'when there are no releases without author' do
+ it 'does not schedule batched migration' do
+ expect(described_class.new.up).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/20221220131020_bump_default_partition_id_value_for_ci_tables_spec.rb b/spec/migrations/20221220131020_bump_default_partition_id_value_for_ci_tables_spec.rb
new file mode 100644
index 00000000000..c4bd243e79f
--- /dev/null
+++ b/spec/migrations/20221220131020_bump_default_partition_id_value_for_ci_tables_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe BumpDefaultPartitionIdValueForCiTables, :migration, feature_category: :continuous_integration do
+ context 'when on sass' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(true)
+ end
+
+ it 'changes default values' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(default_values).not_to include(101)
+ }
+
+ migration.after -> {
+ expect(default_values).to match_array([101])
+ }
+ end
+ end
+
+ context 'with tables already changed' do
+ before do
+ active_record_base.connection.execute(<<~SQL)
+ ALTER TABLE ci_builds ALTER COLUMN partition_id SET DEFAULT 101
+ SQL
+ end
+
+ after do
+ schema_migrate_down!
+ end
+
+ let(:alter_query) do
+ /ALTER TABLE "ci_builds" ALTER COLUMN "partition_id" SET DEFAULT 101/
+ end
+
+ it 'skips updating already changed tables' do
+ recorder = ActiveRecord::QueryRecorder.new { migrate! }
+
+ expect(recorder.log.any?(alter_query)).to be_falsey
+ expect(default_values).to match_array([101])
+ end
+ end
+ end
+
+ context 'when self-managed' do
+ before do
+ allow(Gitlab).to receive(:com?).and_return(false)
+ end
+
+ it 'does not change default values' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(default_values).not_to include(101)
+ }
+
+ migration.after -> {
+ expect(default_values).not_to include(101)
+ }
+ end
+ end
+ end
+
+ def default_values
+ values = described_class::TABLES.flat_map do |table_name, columns|
+ active_record_base
+ .connection
+ .columns(table_name)
+ .select { |column| columns.include?(column.name.to_sym) }
+ .map { |column| column.default&.to_i }
+ end
+
+ values.uniq
+ end
+end
diff --git a/spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb b/spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb
new file mode 100644
index 00000000000..6f9cfe4764a
--- /dev/null
+++ b/spec/migrations/20221221110733_remove_temp_index_for_project_statistics_upload_size_migration_spec.rb
@@ -0,0 +1,22 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe RemoveTempIndexForProjectStatisticsUploadSizeMigration,
+feature_category: :subscription_cost_management do
+ let(:table_name) { 'project_statistics' }
+ let(:index_name) { described_class::INDEX_NAME }
+
+ it 'correctly migrates up and down' do
+ reversible_migration do |migration|
+ migration.before -> {
+ expect(subject.index_exists_by_name?(table_name, index_name)).to be_truthy
+ }
+
+ migration.after -> {
+ expect(subject.index_exists_by_name?(table_name, index_name)).to be_falsy
+ }
+ end
+ end
+end
diff --git a/spec/migrations/20221222092958_sync_new_amount_used_with_amount_used_spec.rb b/spec/migrations/20221222092958_sync_new_amount_used_with_amount_used_spec.rb
new file mode 100644
index 00000000000..158560a2432
--- /dev/null
+++ b/spec/migrations/20221222092958_sync_new_amount_used_with_amount_used_spec.rb
@@ -0,0 +1,54 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe SyncNewAmountUsedWithAmountUsed, migration: :gitlab_ci, feature_category: :continuous_integration do
+ let(:project_usages) { table(:ci_project_monthly_usages) }
+ let(:migration) { described_class.new }
+
+ before do
+ # Disabling the trigger temporarily to allow records being created with out-of-sync
+ # `new_amount_used` and `amount_used`. This will simulate existing records before
+ # we add the trigger.
+ ActiveRecord::Base.connection
+ .execute("ALTER TABLE ci_project_monthly_usages DISABLE TRIGGER sync_projects_amount_used_columns")
+
+ this_month = Time.now.utc.beginning_of_month
+ last_month = 1.month.ago.utc.beginning_of_month
+ last_year = 1.year.ago.utc.beginning_of_month
+
+ project_usages.create!(project_id: 1, date: last_year)
+ project_usages.create!(project_id: 1, date: this_month, amount_used: 10, new_amount_used: 0)
+ project_usages.create!(project_id: 1, date: last_month, amount_used: 20, new_amount_used: 0)
+
+ project_usages.create!(project_id: 2, date: last_year)
+ project_usages.create!(project_id: 2, date: this_month, amount_used: 30, new_amount_used: 0)
+ project_usages.create!(project_id: 2, date: last_month, amount_used: 40, new_amount_used: 0)
+
+ ActiveRecord::Base.connection
+ .execute("ALTER TABLE ci_project_monthly_usages ENABLE TRIGGER sync_projects_amount_used_columns")
+ end
+
+ describe '#up' do
+ it "doesnt change new_amount_used values" do
+ data = project_usages.all
+ data.each do |item|
+ expect { migration.up }.to not_change { item.new_amount_used }
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'updates `new_amount_used` with values from `amount_used`' do
+ expect(project_usages.where(new_amount_used: 0).count).to eq(6)
+
+ migration.down
+
+ expect(project_usages.where(new_amount_used: 0).count).to eq(2)
+ expect(project_usages.order(:id).pluck(:new_amount_used))
+ .to contain_exactly(0, 0, 10, 20, 30, 40)
+ end
+ end
+end
diff --git a/spec/migrations/20221223123019_delete_queued_jobs_for_vulnerabilities_feedback_migration_spec.rb b/spec/migrations/20221223123019_delete_queued_jobs_for_vulnerabilities_feedback_migration_spec.rb
new file mode 100644
index 00000000000..c5e1a255653
--- /dev/null
+++ b/spec/migrations/20221223123019_delete_queued_jobs_for_vulnerabilities_feedback_migration_spec.rb
@@ -0,0 +1,33 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe DeleteQueuedJobsForVulnerabilitiesFeedbackMigration, feature_category: :vulnerability_management do
+ let!(:migration) { described_class.new }
+ let(:batched_background_migrations) { table(:batched_background_migrations) }
+
+ before do
+ batched_background_migrations.create!(
+ max_value: 10,
+ batch_size: 250,
+ sub_batch_size: 50,
+ interval: 300,
+ job_class_name: 'MigrateVulnerabilitiesFeedbackToVulnerabilitiesStateTransition',
+ table_name: 'vulnerability_feedback',
+ column_name: 'id',
+ job_arguments: [],
+ gitlab_schema: "gitlab_main"
+ )
+ end
+
+ describe "#up" do
+ it "deletes all batched migration records" do
+ expect(batched_background_migrations.count).to eq(1)
+
+ migration.up
+
+ expect(batched_background_migrations.count).to eq(0)
+ end
+ end
+end
diff --git a/spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb b/spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb
new file mode 100644
index 00000000000..aa82ca2661b
--- /dev/null
+++ b/spec/migrations/20230105172120_sync_new_amount_used_with_amount_used_on_ci_namespace_monthly_usages_table_spec.rb
@@ -0,0 +1,55 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+require_migration!
+
+RSpec.describe SyncNewAmountUsedWithAmountUsedOnCiNamespaceMonthlyUsagesTable, migration: :gitlab_ci,
+feature_category: :continuous_integration do
+ let(:namespace_usages) { table(:ci_namespace_monthly_usages) }
+ let(:migration) { described_class.new }
+
+ before do
+ # Disabling the trigger temporarily to allow records being created with out-of-sync
+ # `new_amount_used` and `amount_used`. This will simulate existing records before
+ # we add the trigger.
+ ActiveRecord::Base.connection
+ .execute("ALTER TABLE ci_namespace_monthly_usages DISABLE TRIGGER sync_namespaces_amount_used_columns")
+
+ this_month = Time.now.utc.beginning_of_month
+ last_month = 1.month.ago.utc.beginning_of_month
+ last_year = 1.year.ago.utc.beginning_of_month
+
+ namespace_usages.create!(namespace_id: 1, date: last_year)
+ namespace_usages.create!(namespace_id: 1, date: this_month, amount_used: 10, new_amount_used: 0)
+ namespace_usages.create!(namespace_id: 1, date: last_month, amount_used: 20, new_amount_used: 0)
+
+ namespace_usages.create!(namespace_id: 2, date: last_year)
+ namespace_usages.create!(namespace_id: 2, date: this_month, amount_used: 30, new_amount_used: 0)
+ namespace_usages.create!(namespace_id: 2, date: last_month, amount_used: 40, new_amount_used: 0)
+
+ ActiveRecord::Base.connection
+ .execute("ALTER TABLE ci_namespace_monthly_usages ENABLE TRIGGER sync_namespaces_amount_used_columns")
+ end
+
+ describe '#up' do
+ it "doesnt change new_amount_used values" do
+ data = namespace_usages.all
+ data.each do |item|
+ expect { migration.up }.to not_change { item.new_amount_used }
+ end
+ end
+ end
+
+ describe '#down' do
+ it 'updates `new_amount_used` with values from `amount_used`' do
+ expect(namespace_usages.where(new_amount_used: 0).count).to eq(6)
+
+ migration.down
+
+ expect(namespace_usages.where(new_amount_used: 0).count).to eq(2)
+ expect(namespace_usages.order(:id).pluck(:new_amount_used))
+ .to contain_exactly(0, 0, 10, 20, 30, 40)
+ end
+ end
+end
diff --git a/spec/migrations/20230116111252_finalize_todo_sanitization_spec.rb b/spec/migrations/20230116111252_finalize_todo_sanitization_spec.rb
new file mode 100644
index 00000000000..cd7828bbae4
--- /dev/null
+++ b/spec/migrations/20230116111252_finalize_todo_sanitization_spec.rb
@@ -0,0 +1,57 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe FinalizeTodoSanitization, :migration, feature_category: :portfolio_management do
+ let(:batched_migrations) { table(:batched_background_migrations) }
+
+ let!(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ let!(:sanitize_todos_migration) do
+ batched_migrations.create!(
+ job_class_name: 'SanitizeConfidentialTodos',
+ table_name: :notes,
+ column_name: :id,
+ job_arguments: [],
+ interval: 2.minutes,
+ min_value: 1,
+ max_value: 2,
+ batch_size: 1000,
+ sub_batch_size: 200,
+ gitlab_schema: :gitlab_main,
+ status: 3 # finished
+ )
+ end
+
+ context 'when migration finished successfully' do
+ it 'does not raise exception' do
+ expect { migrate! }.not_to raise_error
+ end
+ end
+
+ context 'with different migration statuses' do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:status, :description) do
+ 0 | 'paused'
+ 1 | 'active'
+ 4 | 'failed'
+ 5 | 'finalizing'
+ end
+
+ with_them do
+ before do
+ sanitize_todos_migration.update!(status: status)
+ end
+
+ it 'finalizes the migration' do
+ allow_next_instance_of(Gitlab::Database::BackgroundMigration::BatchedMigrationRunner) do |runner|
+ expect(runner).to receive(:finalize).with('SanitizeConfidentialTodos', :members, :id, [])
+ end
+ end
+ end
+ end
+ end
+end
diff --git a/spec/migrations/add_new_trail_plans_spec.rb b/spec/migrations/add_new_trail_plans_spec.rb
deleted file mode 100644
index 6f8de8435c6..00000000000
--- a/spec/migrations/add_new_trail_plans_spec.rb
+++ /dev/null
@@ -1,95 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe AddNewTrailPlans, :migration, feature_category: :purchase do
- describe '#up' do
- before do
- allow(Gitlab).to receive(:com?).and_return true
- end
-
- it 'creates 2 entries within the plans table' do
- expect { migrate! }.to change { AddNewTrailPlans::Plan.count }.by 2
- expect(AddNewTrailPlans::Plan.last(2).pluck(:name)).to match_array(%w(ultimate_trial premium_trial))
- end
-
- it 'creates 2 entries for plan limits' do
- expect { migrate! }.to change { AddNewTrailPlans::PlanLimits.count }.by 2
- end
-
- context 'when the plan limits for gold and silver exists' do
- before do
- table(:plans).create!(id: 1, name: 'gold', title: 'Gold')
- table(:plan_limits).create!(id: 1, plan_id: 1, storage_size_limit: 2000)
- table(:plans).create!(id: 2, name: 'silver', title: 'Silver')
- table(:plan_limits).create!(id: 2, plan_id: 2, storage_size_limit: 1000)
- end
-
- it 'duplicates the gold and silvers plan limits entries' do
- migrate!
-
- ultimate_plan_limits = AddNewTrailPlans::Plan.find_by(name: 'ultimate_trial').limits
- expect(ultimate_plan_limits.storage_size_limit).to be 2000
-
- premium_plan_limits = AddNewTrailPlans::Plan.find_by(name: 'premium_trial').limits
- expect(premium_plan_limits.storage_size_limit).to be 1000
- end
- end
-
- context 'when the instance is not SaaS' do
- before do
- allow(Gitlab).to receive(:com?).and_return false
- end
-
- it 'does not create plans and plan limits and returns' do
- expect { migrate! }.not_to change { AddNewTrailPlans::Plan.count }
- expect { migrate! }.not_to change { AddNewTrailPlans::Plan.count }
- end
- end
- end
-
- describe '#down' do
- before do
- table(:plans).create!(id: 3, name: 'other')
- table(:plan_limits).create!(plan_id: 3)
- end
-
- context 'when the instance is SaaS' do
- before do
- allow(Gitlab).to receive(:com?).and_return true
- end
-
- it 'removes the newly added ultimate and premium trial entries' do
- migrate!
-
- expect { described_class.new.down }.to change { AddNewTrailPlans::Plan.count }.by(-2)
- expect(AddNewTrailPlans::Plan.find_by(name: 'premium_trial')).to be_nil
- expect(AddNewTrailPlans::Plan.find_by(name: 'ultimate_trial')).to be_nil
-
- other_plan = AddNewTrailPlans::Plan.find_by(name: 'other')
- expect(other_plan).to be_persisted
- expect(AddNewTrailPlans::PlanLimits.count).to eq(1)
- expect(AddNewTrailPlans::PlanLimits.first.plan_id).to eq(other_plan.id)
- end
- end
-
- context 'when the instance is not SaaS' do
- before do
- allow(Gitlab).to receive(:com?).and_return false
- table(:plans).create!(id: 1, name: 'ultimate_trial', title: 'Ultimate Trial')
- table(:plans).create!(id: 2, name: 'premium_trial', title: 'Premium Trial')
- table(:plan_limits).create!(id: 1, plan_id: 1)
- table(:plan_limits).create!(id: 2, plan_id: 2)
- end
-
- it 'does not delete plans and plan limits and returns' do
- migrate!
-
- expect { described_class.new.down }.not_to change { AddNewTrailPlans::Plan.count }
- expect(AddNewTrailPlans::PlanLimits.count).to eq(3)
- end
- end
- end
-end
diff --git a/spec/migrations/backfill_clusters_integration_prometheus_enabled_spec.rb b/spec/migrations/backfill_clusters_integration_prometheus_enabled_spec.rb
deleted file mode 100644
index 1c7745a64ef..00000000000
--- a/spec/migrations/backfill_clusters_integration_prometheus_enabled_spec.rb
+++ /dev/null
@@ -1,80 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillClustersIntegrationPrometheusEnabled, :migration, feature_category: :clusters_applications_prometheus do
- def create_cluster!(label = rand(2**64).to_s)
- table(:clusters).create!(
- name: "cluster: #{label}",
- created_at: 1.day.ago,
- updated_at: 1.day.ago
- )
- end
-
- def create_clusters_applications_prometheus!(label, status:, cluster_id: nil)
- table(:clusters_applications_prometheus).create!(
- cluster_id: cluster_id || create_cluster!(label).id,
- status: status,
- version: "#{label}: version",
- created_at: 1.day.ago, # artificially aged
- updated_at: 1.day.ago, # artificially aged
- encrypted_alert_manager_token: "#{label}: token",
- encrypted_alert_manager_token_iv: "#{label}: iv"
- )
- end
-
- def create_clusters_integration_prometheus!
- table(:clusters_integration_prometheus).create!(
- cluster_id: create_cluster!.id,
- enabled: false,
- created_at: 1.day.ago,
- updated_at: 1.day.ago
- )
- end
-
- RSpec::Matchers.define :be_enabled_and_match_application_values do |application|
- match do |actual|
- actual.enabled == true &&
- actual.encrypted_alert_manager_token == application.encrypted_alert_manager_token &&
- actual.encrypted_alert_manager_token_iv == application.encrypted_alert_manager_token_iv
- end
- end
-
- describe '#up' do
- it 'backfills the enabled status and alert manager credentials from clusters_applications_prometheus' do
- status_installed = 3
- status_externally_installed = 11
- status_installable = 0
-
- existing_integration = create_clusters_integration_prometheus!
- unaffected_existing_integration = create_clusters_integration_prometheus!
- app_installed = create_clusters_applications_prometheus!('installed', status: status_installed)
- app_installed_existing_integration = create_clusters_applications_prometheus!('installed, existing integration', status: status_installed, cluster_id: existing_integration.cluster_id)
- app_externally_installed = create_clusters_applications_prometheus!('externally installed', status: status_externally_installed)
- app_other_status = create_clusters_applications_prometheus!('other status', status: status_installable)
-
- migrate!
-
- integrations = table(:clusters_integration_prometheus).all.index_by(&:cluster_id)
-
- expect(unaffected_existing_integration.reload).to eq unaffected_existing_integration
-
- integration_installed = integrations[app_installed.cluster_id]
- expect(integration_installed).to be_enabled_and_match_application_values(app_installed)
- expect(integration_installed.updated_at).to be >= 1.minute.ago # recently updated
- expect(integration_installed.updated_at).to eq(integration_installed.created_at) # recently created
-
- expect(existing_integration.reload).to be_enabled_and_match_application_values(app_installed_existing_integration)
- expect(existing_integration.updated_at).to be >= 1.minute.ago # recently updated
- expect(existing_integration.updated_at).not_to eq(existing_integration.created_at) # but not recently created
-
- integration_externally_installed = integrations[app_externally_installed.cluster_id]
- expect(integration_externally_installed).to be_enabled_and_match_application_values(app_externally_installed)
- expect(integration_externally_installed.updated_at).to be >= 1.minute.ago # recently updated
- expect(integration_externally_installed.updated_at).to eq(integration_externally_installed.created_at) # recently created
-
- expect(integrations[app_other_status.cluster_id]).to be_nil
- end
- end
-end
diff --git a/spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb b/spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb
deleted file mode 100644
index aa77a5c228a..00000000000
--- a/spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb
+++ /dev/null
@@ -1,103 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe BackfillEscalationPoliciesForOncallSchedules, feature_category: :incident_management do
- let!(:projects) { table(:projects) }
- let!(:schedules) { table(:incident_management_oncall_schedules) }
- let!(:policies) { table(:incident_management_escalation_policies) }
- let!(:rules) { table(:incident_management_escalation_rules) }
-
- # Project with no schedules
- let!(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab') }
- let!(:project_a) { projects.create!(namespace_id: namespace.id) }
-
- context 'with backfill-able schedules' do
- # Project with one schedule
- let!(:project_b) { projects.create!(namespace_id: namespace.id) }
- let!(:schedule_b1) { schedules.create!(project_id: project_b.id, iid: 1, name: 'Schedule B1') }
-
- # Project with multiple schedules
- let!(:project_c) { projects.create!(namespace_id: namespace.id) }
- let!(:schedule_c1) { schedules.create!(project_id: project_c.id, iid: 1, name: 'Schedule C1') }
- let!(:schedule_c2) { schedules.create!(project_id: project_c.id, iid: 2, name: 'Schedule C2') }
-
- # Project with a single schedule which already has a policy
- let!(:project_d) { projects.create!(namespace_id: namespace.id) }
- let!(:schedule_d1) { schedules.create!(project_id: project_d.id, iid: 1, name: 'Schedule D1') }
- let!(:policy_d1) { policies.create!(project_id: project_d.id, name: 'Policy D1') }
- let!(:rule_d1) { rules.create!(policy_id: policy_d1.id, oncall_schedule_id: schedule_d1.id, status: 2, elapsed_time_seconds: 60) }
-
- # Project with a multiple schedule, one of which already has a policy
- let!(:project_e) { projects.create!(namespace_id: namespace.id) }
- let!(:schedule_e1) { schedules.create!(project_id: project_e.id, iid: 1, name: 'Schedule E1') }
- let!(:schedule_e2) { schedules.create!(project_id: project_e.id, iid: 2, name: 'Schedule E2') }
- let!(:policy_e1) { policies.create!(project_id: project_e.id, name: 'Policy E1') }
- let!(:rule_e1) { rules.create!(policy_id: policy_e1.id, oncall_schedule_id: schedule_e2.id, status: 2, elapsed_time_seconds: 60) }
-
- # Project with a multiple schedule, with multiple policies
- let!(:project_f) { projects.create!(namespace_id: namespace.id) }
- let!(:schedule_f1) { schedules.create!(project_id: project_f.id, iid: 1, name: 'Schedule F1') }
- let!(:schedule_f2) { schedules.create!(project_id: project_f.id, iid: 2, name: 'Schedule F2') }
- let!(:policy_f1) { policies.create!(project_id: project_f.id, name: 'Policy F1') }
- let!(:rule_f1) { rules.create!(policy_id: policy_f1.id, oncall_schedule_id: schedule_f1.id, status: 2, elapsed_time_seconds: 60) }
- let!(:rule_f2) { rules.create!(policy_id: policy_f1.id, oncall_schedule_id: schedule_f2.id, status: 2, elapsed_time_seconds: 60) }
- let!(:policy_f2) { policies.create!(project_id: project_f.id, name: 'Policy F2') }
- let!(:rule_f3) { rules.create!(policy_id: policy_f2.id, oncall_schedule_id: schedule_f2.id, status: 1, elapsed_time_seconds: 10) }
-
- it 'backfills escalation policies correctly' do
- expect { migrate! }
- .to change(policies, :count).by(2)
- .and change(rules, :count).by(3)
-
- new_policy_b1, new_policy_c1 = new_polices = policies.last(2)
- new_rules = rules.last(3)
-
- expect(new_polices).to all have_attributes(name: 'On-call Escalation Policy')
- expect(new_policy_b1.description).to eq('Immediately notify Schedule B1')
- expect(new_policy_c1.description).to eq('Immediately notify Schedule C1')
- expect(policies.pluck(:project_id)).to eq(
- [
- project_d.id,
- project_e.id,
- project_f.id,
- project_f.id,
- project_b.id,
- project_c.id
- ])
-
- expect(new_rules).to all have_attributes(status: 1, elapsed_time_seconds: 0)
- expect(rules.pluck(:policy_id)).to eq(
- [
- rule_d1.policy_id,
- rule_e1.policy_id,
- rule_f1.policy_id,
- rule_f2.policy_id,
- rule_f3.policy_id,
- new_policy_b1.id,
- new_policy_c1.id,
- new_policy_c1.id
- ])
- expect(rules.pluck(:oncall_schedule_id)).to eq(
- [
- rule_d1.oncall_schedule_id,
- rule_e1.oncall_schedule_id,
- rule_f1.oncall_schedule_id,
- rule_f2.oncall_schedule_id,
- rule_f3.oncall_schedule_id,
- schedule_b1.id,
- schedule_c1.id,
- schedule_c2.id
- ])
- end
- end
-
- context 'with no schedules' do
- it 'does nothing' do
- expect { migrate! }
- .to not_change(policies, :count)
- .and not_change(rules, :count)
- end
- end
-end
diff --git a/spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb b/spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb
deleted file mode 100644
index ae2656eaf98..00000000000
--- a/spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb
+++ /dev/null
@@ -1,34 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe BackfillNugetTemporaryPackagesToProcessingStatus, :migration, feature_category: :package_registry do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:packages) { table(:packages_packages) }
-
- before do
- namespace = namespaces.create!(id: 123, name: 'test_namespace', path: 'test_namespace')
- project = projects.create!(id: 111, name: 'sample_project', path: 'sample_project', namespace_id: namespace.id)
-
- packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 4, status: 0, project_id: project.id)
- packages.create!(name: 'foo', version: '0.1.1', package_type: 4, status: 0, project_id: project.id)
- packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 4, status: 2, project_id: project.id)
- packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 1, status: 2, project_id: project.id)
- packages.create!(name: 'NuGet.Temporary.Package', version: '0.1.1', package_type: 1, status: 0, project_id: project.id)
- end
-
- it 'updates the applicable packages to processing status', :aggregate_failures do
- expect(packages.where(status: 0).count).to eq(3)
- expect(packages.where(status: 2).count).to eq(2)
- expect(packages.where(name: 'NuGet.Temporary.Package', package_type: 4, status: 0).count).to eq(1)
-
- migrate!
-
- expect(packages.where(status: 0).count).to eq(2)
- expect(packages.where(status: 2).count).to eq(3)
- expect(packages.where(name: 'NuGet.Temporary.Package', package_type: 4, status: 0).count).to eq(0)
- end
-end
diff --git a/spec/migrations/change_web_hook_events_default_spec.rb b/spec/migrations/change_web_hook_events_default_spec.rb
deleted file mode 100644
index c6c3f285ff1..00000000000
--- a/spec/migrations/change_web_hook_events_default_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ChangeWebHookEventsDefault, feature_category: :integrations do
- let(:web_hooks) { table(:web_hooks) }
- let(:projects) { table(:projects) }
- let(:groups) { table(:namespaces) }
-
- let(:group) { groups.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(name: 'gitlab', path: 'gitlab', namespace_id: group.id) }
- let(:hook) { web_hooks.create!(project_id: project.id, type: 'ProjectHook') }
- let(:group_hook) { web_hooks.create!(group_id: group.id, type: 'GroupHook') }
-
- before do
- # Simulate the wrong schema
- %w(push_events issues_events merge_requests_events tag_push_events).each do |column|
- ActiveRecord::Base.connection.execute "ALTER TABLE web_hooks ALTER COLUMN #{column} DROP DEFAULT"
- end
- end
-
- it 'sets default values' do
- migrate!
-
- expect(hook.push_events).to be true
- expect(hook.issues_events).to be false
- expect(hook.merge_requests_events).to be false
- expect(hook.tag_push_events).to be false
-
- expect(group_hook.push_events).to be true
- expect(group_hook.issues_events).to be false
- expect(group_hook.merge_requests_events).to be false
- expect(group_hook.tag_push_events).to be false
- end
-end
diff --git a/spec/migrations/clean_up_pending_builds_table_spec.rb b/spec/migrations/clean_up_pending_builds_table_spec.rb
deleted file mode 100644
index e044d4a702b..00000000000
--- a/spec/migrations/clean_up_pending_builds_table_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanUpPendingBuildsTable, :suppress_gitlab_schemas_validate_connection,
-feature_category: :continuous_integration do
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:queue) { table(:ci_pending_builds) }
- let(:builds) { table(:ci_builds) }
-
- before do
- namespaces.create!(id: 123, name: 'sample', path: 'sample')
- projects.create!(id: 123, name: 'sample', path: 'sample', namespace_id: 123)
-
- builds.create!(id: 1, project_id: 123, status: 'pending', type: 'Ci::Build')
- builds.create!(id: 2, project_id: 123, status: 'pending', type: 'GenericCommitStatus')
- builds.create!(id: 3, project_id: 123, status: 'success', type: 'Ci::Bridge')
- builds.create!(id: 4, project_id: 123, status: 'success', type: 'Ci::Build')
- builds.create!(id: 5, project_id: 123, status: 'running', type: 'Ci::Build')
- builds.create!(id: 6, project_id: 123, status: 'created', type: 'Ci::Build')
-
- queue.create!(id: 1, project_id: 123, build_id: 1)
- queue.create!(id: 2, project_id: 123, build_id: 4)
- queue.create!(id: 3, project_id: 123, build_id: 5)
- end
-
- it 'removes duplicated data from pending builds table' do
- migrate!
-
- expect(queue.all.count).to eq 1
- expect(queue.first.id).to eq 1
- expect(builds.all.count).to eq 6
- end
-
- context 'when there are multiple batches' do
- before do
- stub_const("#{described_class}::BATCH_SIZE", 1)
- end
-
- it 'iterates the data correctly' do
- migrate!
-
- expect(queue.all.count).to eq 1
- end
- end
-end
diff --git a/spec/migrations/cleanup_after_add_primary_email_to_emails_if_user_confirmed_spec.rb b/spec/migrations/cleanup_after_add_primary_email_to_emails_if_user_confirmed_spec.rb
deleted file mode 100644
index 6027199c11c..00000000000
--- a/spec/migrations/cleanup_after_add_primary_email_to_emails_if_user_confirmed_spec.rb
+++ /dev/null
@@ -1,48 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupAfterAddPrimaryEmailToEmailsIfUserConfirmed, :sidekiq, feature_category: :users do
- let(:migration) { described_class.new }
- let(:users) { table(:users) }
- let(:emails) { table(:emails) }
-
- let!(:user_1) { users.create!(name: 'confirmed-user-1', email: 'confirmed-1@example.com', confirmed_at: 3.days.ago, projects_limit: 100) }
- let!(:user_2) { users.create!(name: 'confirmed-user-2', email: 'confirmed-2@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_3) { users.create!(name: 'confirmed-user-3', email: 'confirmed-3@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_4) { users.create!(name: 'unconfirmed-user', email: 'unconfirmed@example.com', confirmed_at: nil, projects_limit: 100) }
-
- let!(:email_1) { emails.create!(email: 'confirmed-1@example.com', user_id: user_1.id, confirmed_at: 1.day.ago) }
- let!(:email_2) { emails.create!(email: 'other_2@example.com', user_id: user_2.id, confirmed_at: 1.day.ago) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- end
-
- it 'consume any pending background migration job' do
- expect_next_instance_of(Gitlab::BackgroundMigration::JobCoordinator) do |coordinator|
- expect(coordinator).to receive(:steal).with('AddPrimaryEmailToEmailsIfUserConfirmed').twice
- end
-
- migration.up
- end
-
- it 'adds the primary email to emails for leftover confirmed users that do not have their primary email in the emails table', :aggregate_failures do
- original_email_1_confirmed_at = email_1.reload.confirmed_at
-
- expect { migration.up }.to change { emails.count }.by(2)
-
- expect(emails.find_by(user_id: user_2.id, email: 'confirmed-2@example.com').confirmed_at).to eq(user_2.reload.confirmed_at)
- expect(emails.find_by(user_id: user_3.id, email: 'confirmed-3@example.com').confirmed_at).to eq(user_3.reload.confirmed_at)
- expect(email_1.reload.confirmed_at).to eq(original_email_1_confirmed_at)
-
- expect(emails.exists?(user_id: user_4.id)).to be(false)
- end
-
- it 'continues in case of errors with one email' do
- allow(Email).to receive(:create) { raise 'boom!' }
-
- expect { migration.up }.not_to raise_error
- end
-end
diff --git a/spec/migrations/cleanup_move_container_registry_enabled_to_project_feature_spec.rb b/spec/migrations/cleanup_move_container_registry_enabled_to_project_feature_spec.rb
deleted file mode 100644
index 1badde62526..00000000000
--- a/spec/migrations/cleanup_move_container_registry_enabled_to_project_feature_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe CleanupMoveContainerRegistryEnabledToProjectFeature, :migration, feature_category: :navigation do
- let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
- let(:non_null_project_features) { { pages_access_level: 20 } }
- let(:bg_class_name) { 'MoveContainerRegistryEnabledToProjectFeature' }
-
- let!(:project1) { table(:projects).create!(namespace_id: namespace.id, name: 'project 1', container_registry_enabled: true) }
- let!(:project2) { table(:projects).create!(namespace_id: namespace.id, name: 'project 2', container_registry_enabled: false) }
- let!(:project3) { table(:projects).create!(namespace_id: namespace.id, name: 'project 3', container_registry_enabled: nil) }
-
- let!(:project4) { table(:projects).create!(namespace_id: namespace.id, name: 'project 4', container_registry_enabled: true) }
- let!(:project5) { table(:projects).create!(namespace_id: namespace.id, name: 'project 5', container_registry_enabled: false) }
- let!(:project6) { table(:projects).create!(namespace_id: namespace.id, name: 'project 6', container_registry_enabled: nil) }
-
- let!(:project_feature1) { table(:project_features).create!(project_id: project1.id, container_registry_access_level: 20, **non_null_project_features) }
- let!(:project_feature2) { table(:project_features).create!(project_id: project2.id, container_registry_access_level: 0, **non_null_project_features) }
- let!(:project_feature3) { table(:project_features).create!(project_id: project3.id, container_registry_access_level: 0, **non_null_project_features) }
-
- let!(:project_feature4) { table(:project_features).create!(project_id: project4.id, container_registry_access_level: 0, **non_null_project_features) }
- let!(:project_feature5) { table(:project_features).create!(project_id: project5.id, container_registry_access_level: 20, **non_null_project_features) }
- let!(:project_feature6) { table(:project_features).create!(project_id: project6.id, container_registry_access_level: 20, **non_null_project_features) }
-
- let!(:background_migration_job1) { table(:background_migration_jobs).create!(class_name: bg_class_name, arguments: [project4.id, project5.id], status: 0) }
- let!(:background_migration_job2) { table(:background_migration_jobs).create!(class_name: bg_class_name, arguments: [project6.id, project6.id], status: 0) }
- let!(:background_migration_job3) { table(:background_migration_jobs).create!(class_name: bg_class_name, arguments: [project1.id, project3.id], status: 1) }
-
- it 'steals remaining jobs, updates any remaining rows and deletes background_migration_jobs rows' do
- expect(Gitlab::BackgroundMigration).to receive(:steal).with(bg_class_name).and_call_original
-
- migrate!
-
- expect(project_feature1.reload.container_registry_access_level).to eq(20)
- expect(project_feature2.reload.container_registry_access_level).to eq(0)
- expect(project_feature3.reload.container_registry_access_level).to eq(0)
- expect(project_feature4.reload.container_registry_access_level).to eq(20)
- expect(project_feature5.reload.container_registry_access_level).to eq(0)
- expect(project_feature6.reload.container_registry_access_level).to eq(0)
-
- expect(table(:background_migration_jobs).where(class_name: bg_class_name).count).to eq(0)
- end
-end
diff --git a/spec/migrations/cleanup_mr_attention_request_todos_spec.rb b/spec/migrations/cleanup_mr_attention_request_todos_spec.rb
index 4fa2419aa7c..cea72003ccd 100644
--- a/spec/migrations/cleanup_mr_attention_request_todos_spec.rb
+++ b/spec/migrations/cleanup_mr_attention_request_todos_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe CleanupMrAttentionRequestTodos, :migration, feature_category: :code_review do
+RSpec.describe CleanupMrAttentionRequestTodos, :migration, feature_category: :code_review_workflow do
let(:projects) { table(:projects) }
let(:namespaces) { table(:namespaces) }
let(:users) { table(:users) }
diff --git a/spec/migrations/confirm_support_bot_user_spec.rb b/spec/migrations/confirm_support_bot_user_spec.rb
deleted file mode 100644
index 863bdb13585..00000000000
--- a/spec/migrations/confirm_support_bot_user_spec.rb
+++ /dev/null
@@ -1,86 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ConfirmSupportBotUser, :migration, feature_category: :users do
- let(:users) { table(:users) }
-
- context 'when support bot user is currently unconfirmed' do
- let!(:support_bot) do
- create_user!(
- created_at: 2.days.ago,
- user_type: User::USER_TYPES['support_bot']
- )
- end
-
- it 'updates the `confirmed_at` attribute' do
- expect { migrate! }.to change { support_bot.reload.confirmed_at }
- end
-
- it 'sets `confirmed_at` to be the same as their `created_at` attribute' do
- migrate!
-
- expect(support_bot.reload.confirmed_at).to eq(support_bot.created_at)
- end
- end
-
- context 'when support bot user is already confirmed' do
- let!(:confirmed_support_bot) do
- create_user!(
- user_type: User::USER_TYPES['support_bot'],
- confirmed_at: 1.day.ago
- )
- end
-
- it 'does not change their `confirmed_at` attribute' do
- expect { migrate! }.not_to change { confirmed_support_bot.reload.confirmed_at }
- end
- end
-
- context 'when support bot user created_at is null' do
- let!(:support_bot) do
- create_user!(
- user_type: User::USER_TYPES['support_bot'],
- confirmed_at: nil,
- record_timestamps: false
- )
- end
-
- it 'updates the `confirmed_at` attribute' do
- expect { migrate! }.to change { support_bot.reload.confirmed_at }.from(nil)
- end
-
- it 'does not change the `created_at` attribute' do
- expect { migrate! }.not_to change { support_bot.reload.created_at }.from(nil)
- end
- end
-
- context 'with human users that are currently unconfirmed' do
- let!(:unconfirmed_human) do
- create_user!(
- name: 'human',
- email: 'human@example.com',
- user_type: nil
- )
- end
-
- it 'does not update their `confirmed_at` attribute' do
- expect { migrate! }.not_to change { unconfirmed_human.reload.confirmed_at }
- end
- end
-
- private
-
- def create_user!(user_type:, name: 'GitLab Support Bot', email: 'support@example.com', created_at: Time.now, confirmed_at: nil, record_timestamps: true)
- users.create!(
- name: name,
- email: email,
- username: name,
- projects_limit: 0,
- user_type: user_type,
- confirmed_at: confirmed_at,
- record_timestamps: record_timestamps
- )
- end
-end
diff --git a/spec/migrations/delete_security_findings_without_uuid_spec.rb b/spec/migrations/delete_security_findings_without_uuid_spec.rb
deleted file mode 100644
index e4c17288384..00000000000
--- a/spec/migrations/delete_security_findings_without_uuid_spec.rb
+++ /dev/null
@@ -1,36 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe DeleteSecurityFindingsWithoutUuid, :suppress_gitlab_schemas_validate_connection,
-feature_category: :vulnerability_management do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:ci_pipelines) { table(:ci_pipelines) }
- let(:ci_builds) { table(:ci_builds) }
- let(:ci_artifacts) { table(:ci_job_artifacts) }
- let(:scanners) { table(:vulnerability_scanners) }
- let(:security_scans) { table(:security_scans) }
- let(:security_findings) { table(:security_findings) }
- let(:sast_file_type) { 5 }
- let(:sast_scan_type) { 1 }
-
- let(:user) { users.create!(email: 'test@gitlab.com', projects_limit: 5) }
- let(:namespace) { namespaces.create!(name: 'gitlab', path: 'gitlab-org') }
- let(:project) { projects.create!(namespace_id: namespace.id, name: 'foo') }
- let(:ci_pipeline) { ci_pipelines.create!(project_id: project.id, ref: 'master', sha: 'adf43c3a', status: 'success') }
- let(:ci_build) { ci_builds.create!(commit_id: ci_pipeline.id, retried: false, type: 'Ci::Build') }
- let(:ci_artifact) { ci_artifacts.create!(project_id: project.id, job_id: ci_build.id, file_type: sast_file_type, file_format: 1) }
- let(:scanner) { scanners.create!(project_id: project.id, external_id: 'bandit', name: 'Bandit') }
- let(:security_scan) { security_scans.create!(build_id: ci_build.id, scan_type: sast_scan_type) }
-
- let!(:finding_1) { security_findings.create!(scan_id: security_scan.id, scanner_id: scanner.id, severity: 0, confidence: 0, project_fingerprint: Digest::SHA1.hexdigest(SecureRandom.uuid)) }
- let!(:finding_2) { security_findings.create!(scan_id: security_scan.id, scanner_id: scanner.id, severity: 0, confidence: 0, project_fingerprint: Digest::SHA1.hexdigest(SecureRandom.uuid), uuid: SecureRandom.uuid) }
-
- it 'successfully runs and does not schedule any job' do
- expect { migrate! }.to change { described_class::SecurityFinding.count }.by(-1)
- .and change { described_class::SecurityFinding.where(id: finding_1) }
- end
-end
diff --git a/spec/migrations/insert_ci_daily_pipeline_schedule_triggers_plan_limits_spec.rb b/spec/migrations/insert_ci_daily_pipeline_schedule_triggers_plan_limits_spec.rb
deleted file mode 100644
index 9358b71132c..00000000000
--- a/spec/migrations/insert_ci_daily_pipeline_schedule_triggers_plan_limits_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe InsertCiDailyPipelineScheduleTriggersPlanLimits, feature_category: :purchase do
- let!(:plans) { table(:plans) }
- let!(:plan_limits) { table(:plan_limits) }
-
- context 'when on Gitlab.com' do
- let(:free_plan) { plans.create!(name: 'free') }
- let(:bronze_plan) { plans.create!(name: 'bronze') }
- let(:silver_plan) { plans.create!(name: 'silver') }
- let(:gold_plan) { plans.create!(name: 'gold') }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(true)
-
- plan_limits.create!(plan_id: free_plan.id)
- plan_limits.create!(plan_id: bronze_plan.id)
- plan_limits.create!(plan_id: silver_plan.id)
- plan_limits.create!(plan_id: gold_plan.id)
- end
-
- it 'correctly migrates up and down' do
- reversible_migration do |migration|
- migration.before -> {
- expect(plan_limits.pluck(:plan_id, :ci_daily_pipeline_schedule_triggers)).to contain_exactly(
- [free_plan.id, 0],
- [bronze_plan.id, 0],
- [silver_plan.id, 0],
- [gold_plan.id, 0]
- )
- }
-
- migration.after -> {
- expect(plan_limits.pluck(:plan_id, :ci_daily_pipeline_schedule_triggers)).to contain_exactly(
- [free_plan.id, 24],
- [bronze_plan.id, 288],
- [silver_plan.id, 288],
- [gold_plan.id, 288]
- )
- }
- end
- end
- end
-
- context 'when on self hosted' do
- let(:default_plan) { plans.create!(name: 'default') }
-
- before do
- allow(Gitlab).to receive(:com?).and_return(false)
-
- plan_limits.create!(plan_id: default_plan.id)
- end
-
- it 'does nothing' do
- reversible_migration do |migration|
- migration.before -> {
- expect(plan_limits.pluck(:plan_id, :ci_daily_pipeline_schedule_triggers)).to contain_exactly(
- [default_plan.id, 0]
- )
- }
-
- migration.after -> {
- expect(plan_limits.pluck(:plan_id, :ci_daily_pipeline_schedule_triggers)).to contain_exactly(
- [default_plan.id, 0]
- )
- }
- end
- end
- end
-end
diff --git a/spec/migrations/migrate_elastic_index_settings_spec.rb b/spec/migrations/migrate_elastic_index_settings_spec.rb
deleted file mode 100644
index b67c4d902c7..00000000000
--- a/spec/migrations/migrate_elastic_index_settings_spec.rb
+++ /dev/null
@@ -1,44 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe MigrateElasticIndexSettings, feature_category: :global_search do
- let(:elastic_index_settings) { table(:elastic_index_settings) }
- let(:application_settings) { table(:application_settings) }
-
- context 'with application_settings present' do
- before do
- application_settings.create!(elasticsearch_replicas: 2, elasticsearch_shards: 15)
- end
-
- it 'migrates settings' do
- migrate!
-
- settings = elastic_index_settings.all
-
- expect(settings.size).to eq 1
-
- setting = settings.first
-
- expect(setting.number_of_replicas).to eq(2)
- expect(setting.number_of_shards).to eq(15)
- end
- end
-
- context 'without application_settings present' do
- it 'migrates settings' do
- migrate!
-
- settings = elastic_index_settings.all
-
- expect(settings.size).to eq 1
-
- setting = elastic_index_settings.first
-
- expect(setting.number_of_replicas).to eq(1)
- expect(setting.number_of_shards).to eq(5)
- end
- end
-end
diff --git a/spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb b/spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb
deleted file mode 100644
index 25e0ef439bd..00000000000
--- a/spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb
+++ /dev/null
@@ -1,59 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe MoveContainerRegistryEnabledToProjectFeatures3, :migration, feature_category: :container_registry do
- let(:namespace) { table(:namespaces).create!(name: 'gitlab', path: 'gitlab-org') }
-
- let!(:background_jobs) do
- table(:background_migration_jobs).create!(class_name: described_class::MIGRATION, arguments: [-1, -2])
- table(:background_migration_jobs).create!(class_name: described_class::MIGRATION, arguments: [-3, -4])
- end
-
- let!(:projects) do
- [
- table(:projects).create!(namespace_id: namespace.id, name: 'project 1'),
- table(:projects).create!(namespace_id: namespace.id, name: 'project 2'),
- table(:projects).create!(namespace_id: namespace.id, name: 'project 3'),
- table(:projects).create!(namespace_id: namespace.id, name: 'project 4')
- ]
- end
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 3)
- end
-
- around do |example|
- Sidekiq::Testing.fake! do
- freeze_time do
- example.call
- end
- end
- end
-
- it 'schedules jobs for ranges of projects' do
- # old entries in background_migration_jobs should be deleted.
- expect(table(:background_migration_jobs).count).to eq(2)
- expect(table(:background_migration_jobs).first.arguments).to eq([-1, -2])
- expect(table(:background_migration_jobs).second.arguments).to eq([-3, -4])
-
- migrate!
-
- # Since track_jobs is true, each job should have an entry in the background_migration_jobs
- # table.
- expect(table(:background_migration_jobs).count).to eq(2)
- expect(table(:background_migration_jobs).first.arguments).to eq([projects[0].id, projects[2].id])
- expect(table(:background_migration_jobs).second.arguments).to eq([projects[3].id, projects[3].id])
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(2.minutes, projects[0].id, projects[2].id)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(4.minutes, projects[3].id, projects[3].id)
- end
-
- it 'schedules jobs according to the configured batch size' do
- expect { migrate! }.to change { BackgroundMigrationWorker.jobs.size }.by(2)
- end
-end
diff --git a/spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb b/spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb
deleted file mode 100644
index 66fd5eb5ae5..00000000000
--- a/spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb
+++ /dev/null
@@ -1,61 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe PopulateDismissalInformationForVulnerabilities, feature_category: :vulnerability_management do
- let(:users) { table(:users) }
- let(:namespaces) { table(:namespaces) }
- let(:projects) { table(:projects) }
- let(:vulnerabilities) { table(:vulnerabilities) }
-
- let(:existing_dismissed_at) { Time.now }
- let(:states) { { detected: 1, dismissed: 2, resolved: 3, confirmed: 4 } }
- let!(:namespace) { namespaces.create!(name: "foo", path: "bar") }
- let!(:user_1) { users.create!(name: 'John Doe', email: 'john_doe+1@example.com', projects_limit: 5) }
- let!(:user_2) { users.create!(name: 'John Doe', email: 'john_doe+2@example.com', projects_limit: 5) }
- let!(:project) { projects.create!(namespace_id: namespace.id) }
- let!(:vulnerability_params) do
- {
- project_id: project.id,
- author_id: user_1.id,
- title: 'Vulnerability',
- severity: 5,
- confidence: 5,
- report_type: 5
- }
- end
-
- let!(:detected_vulnerability) { vulnerabilities.create!(**vulnerability_params, state: states[:detected]) }
- let!(:resolved_vulnerability) { vulnerabilities.create!(**vulnerability_params, state: states[:resolved]) }
- let!(:confirmed_vulnerability) { vulnerabilities.create!(**vulnerability_params, state: states[:confirmed]) }
-
- let!(:dismissed_vulnerability_1) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], updated_by_id: user_2.id) }
- let!(:dismissed_vulnerability_2) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], last_edited_by_id: user_2.id) }
- let!(:dismissed_vulnerability_3) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], dismissed_at: existing_dismissed_at, author_id: user_2.id) }
- let!(:dismissed_vulnerability_4) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], dismissed_by_id: user_1.id, author_id: user_2.id) }
- let!(:dismissed_vulnerability_5) { vulnerabilities.create!(**vulnerability_params, state: states[:dismissed], dismissed_at: existing_dismissed_at, dismissed_by_id: user_1.id, updated_by_id: user_2.id) }
-
- around do |example|
- freeze_time { example.run }
- end
-
- it 'updates the dismissal information for vulnerabilities' do
- expect { migrate! }.to change { dismissed_vulnerability_1.reload.dismissed_at }.from(nil).to(dismissed_vulnerability_1.updated_at)
- .and change { dismissed_vulnerability_1.reload.dismissed_by_id }.from(nil).to(user_2.id)
- .and change { dismissed_vulnerability_2.reload.dismissed_at }.from(nil).to(dismissed_vulnerability_2.updated_at)
- .and change { dismissed_vulnerability_2.reload.dismissed_by_id }.from(nil).to(user_2.id)
- .and change { dismissed_vulnerability_3.reload.dismissed_by_id }.from(nil).to(user_2.id)
- .and change { dismissed_vulnerability_4.reload.dismissed_at }.from(nil).to(dismissed_vulnerability_4.updated_at)
- .and not_change { dismissed_vulnerability_3.reload.dismissed_at }.from(existing_dismissed_at)
- .and not_change { dismissed_vulnerability_4.reload.dismissed_by_id }.from(user_1.id)
- .and not_change { dismissed_vulnerability_5.reload.dismissed_at }.from(existing_dismissed_at)
- .and not_change { dismissed_vulnerability_5.reload.dismissed_by_id }.from(user_1.id)
- .and not_change { detected_vulnerability.reload.dismissed_at }.from(nil)
- .and not_change { detected_vulnerability.reload.dismissed_by_id }.from(nil)
- .and not_change { resolved_vulnerability.reload.dismissed_at }.from(nil)
- .and not_change { resolved_vulnerability.reload.dismissed_by_id }.from(nil)
- .and not_change { confirmed_vulnerability.reload.dismissed_at }.from(nil)
- .and not_change { confirmed_vulnerability.reload.dismissed_by_id }.from(nil)
- end
-end
diff --git a/spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb b/spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb
new file mode 100644
index 00000000000..8209f317550
--- /dev/null
+++ b/spec/migrations/queue_backfill_admin_mode_scope_for_personal_access_tokens_spec.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe QueueBackfillAdminModeScopeForPersonalAccessTokens,
+ feature_category: :authentication_and_authorization do
+ describe '#up' do
+ it 'schedules background migration' do
+ migrate!
+
+ expect(described_class::MIGRATION).to have_scheduled_batched_migration(
+ table_name: :personal_access_tokens,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL)
+ end
+ end
+end
diff --git a/spec/migrations/remove_hipchat_service_records_spec.rb b/spec/migrations/remove_hipchat_service_records_spec.rb
deleted file mode 100644
index b89572b069e..00000000000
--- a/spec/migrations/remove_hipchat_service_records_spec.rb
+++ /dev/null
@@ -1,23 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe RemoveHipchatServiceRecords, feature_category: :integrations do
- let(:services) { table(:services) }
-
- before do
- services.create!(type: 'HipchatService')
- services.create!(type: 'SomeOtherType')
- end
-
- it 'removes services records of type HipchatService' do
- expect(services.count).to eq(2)
-
- migrate!
-
- expect(services.count).to eq(1)
- expect(services.first.type).to eq('SomeOtherType')
- expect(services.where(type: 'HipchatService')).to be_empty
- end
-end
diff --git a/spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb b/spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb
deleted file mode 100644
index eabf6271ded..00000000000
--- a/spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb
+++ /dev/null
@@ -1,27 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-require_migration!('add_not_valid_foreign_key_to_group_hooks')
-
-RSpec.describe RemoveRecordsWithoutGroupFromWebhooksTable, schema: 20210330091751, feature_category: :integrations do
- let(:web_hooks) { table(:web_hooks) }
- let(:groups) { table(:namespaces) }
-
- before do
- group = groups.create!(name: 'gitlab', path: 'gitlab-org')
- web_hooks.create!(group_id: group.id, type: 'GroupHook')
- web_hooks.create!(group_id: nil)
-
- AddNotValidForeignKeyToGroupHooks.new.down
- web_hooks.create!(group_id: non_existing_record_id, type: 'GroupHook')
- AddNotValidForeignKeyToGroupHooks.new.up
- end
-
- it 'removes group hooks where the referenced group does not exist', :aggregate_failures do
- expect { RemoveRecordsWithoutGroupFromWebhooksTable.new.up }.to change { web_hooks.count }.by(-1)
- expect(web_hooks.where.not(group_id: groups.select(:id)).count).to eq(0)
- expect(web_hooks.where.not(group_id: nil).count).to eq(1)
- end
-end
diff --git a/spec/migrations/schedule_add_primary_email_to_emails_if_user_confirmed_spec.rb b/spec/migrations/schedule_add_primary_email_to_emails_if_user_confirmed_spec.rb
deleted file mode 100644
index 98d3e9b7c7c..00000000000
--- a/spec/migrations/schedule_add_primary_email_to_emails_if_user_confirmed_spec.rb
+++ /dev/null
@@ -1,31 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleAddPrimaryEmailToEmailsIfUserConfirmed, :sidekiq, feature_category: :users do
- let(:migration) { described_class.new }
- let(:users) { table(:users) }
-
- let!(:user_1) { users.create!(name: 'confirmed-user-1', email: 'confirmed-1@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_2) { users.create!(name: 'confirmed-user-2', email: 'confirmed-2@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_3) { users.create!(name: 'confirmed-user-3', email: 'confirmed-3@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
- let!(:user_4) { users.create!(name: 'confirmed-user-4', email: 'confirmed-4@example.com', confirmed_at: 1.day.ago, projects_limit: 100) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 2)
- stub_const("#{described_class.name}::INTERVAL", 2.minutes.to_i)
- end
-
- it 'schedules addition of primary email to emails in delayed batches' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migration.up
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, user_1.id, user_2.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, user_3.id, user_4.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb
index 8a14bf58698..a3bec40c3f0 100644
--- a/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb
+++ b/spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb
@@ -4,7 +4,8 @@ require 'spec_helper'
require_migration!
-RSpec.describe ScheduleBackfillDraftStatusOnMergeRequestsCorrectedRegex, :sidekiq, feature_category: :code_review do
+RSpec.describe ScheduleBackfillDraftStatusOnMergeRequestsCorrectedRegex,
+ :sidekiq, feature_category: :code_review_workflow do
let(:namespaces) { table(:namespaces) }
let(:projects) { table(:projects) }
let(:merge_requests) { table(:merge_requests) }
diff --git a/spec/migrations/schedule_disable_expiration_policies_linked_to_no_container_images_spec.rb b/spec/migrations/schedule_disable_expiration_policies_linked_to_no_container_images_spec.rb
deleted file mode 100644
index ebcc3fda0a3..00000000000
--- a/spec/migrations/schedule_disable_expiration_policies_linked_to_no_container_images_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe ScheduleDisableExpirationPoliciesLinkedToNoContainerImages, feature_category: :container_registry do
- let!(:projects) { table(:projects) }
- let!(:container_expiration_policies) { table(:container_expiration_policies) }
- let!(:container_repositories) { table(:container_repositories) }
- let!(:namespaces) { table(:namespaces) }
- let!(:namespace) { namespaces.create!(name: 'test', path: 'test') }
-
- let!(:policy1) { create_expiration_policy(id: 1, enabled: true) }
- let!(:policy2) { create_expiration_policy(id: 2, enabled: false) }
- let!(:policy3) { create_expiration_policy(id: 3, enabled: false) }
- let!(:policy4) { create_expiration_policy(id: 4, enabled: true) }
- let!(:policy5) { create_expiration_policy(id: 5, enabled: false) }
- let!(:policy6) { create_expiration_policy(id: 6, enabled: false) }
- let!(:policy7) { create_expiration_policy(id: 7, enabled: true) }
- let!(:policy8) { create_expiration_policy(id: 8, enabled: true) }
- let!(:policy9) { create_expiration_policy(id: 9, enabled: true) }
-
- it 'schedules background migrations', :aggregate_failures do
- stub_const("#{described_class}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(2.minutes, 1, 4)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(4.minutes, 7, 8)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(6.minutes, 9, 9)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(3)
- end
- end
- end
-
- def create_expiration_policy(id:, enabled:)
- project = projects.create!(id: id, namespace_id: namespace.id, name: "gitlab-#{id}")
- container_expiration_policies.create!(
- enabled: enabled,
- project_id: project.id
- )
- end
-end
diff --git a/spec/migrations/schedule_update_timelogs_project_id_spec.rb b/spec/migrations/schedule_update_timelogs_project_id_spec.rb
deleted file mode 100644
index 5ce3f7dd36c..00000000000
--- a/spec/migrations/schedule_update_timelogs_project_id_spec.rb
+++ /dev/null
@@ -1,33 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleUpdateTimelogsProjectId, feature_category: :team_planning do
- let!(:namespace) { table(:namespaces).create!(name: 'namespace', path: 'namespace') }
- let!(:project) { table(:projects).create!(namespace_id: namespace.id) }
- let!(:issue) { table(:issues).create!(project_id: project.id) }
- let!(:merge_request) { table(:merge_requests).create!(target_project_id: project.id, source_branch: 'master', target_branch: 'feature') }
- let!(:timelog1) { table(:timelogs).create!(issue_id: issue.id, time_spent: 60) }
- let!(:timelog2) { table(:timelogs).create!(merge_request_id: merge_request.id, time_spent: 600) }
- let!(:timelog3) { table(:timelogs).create!(merge_request_id: merge_request.id, time_spent: 60) }
- let!(:timelog4) { table(:timelogs).create!(issue_id: issue.id, time_spent: 600) }
-
- it 'correctly schedules background migrations' do
- stub_const("#{described_class}::BATCH_SIZE", 2)
-
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(2.minutes, timelog1.id, timelog2.id)
-
- expect(described_class::MIGRATION)
- .to be_scheduled_delayed_migration(4.minutes, timelog3.id, timelog4.id)
-
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/schedule_update_users_where_two_factor_auth_required_from_group_spec.rb b/spec/migrations/schedule_update_users_where_two_factor_auth_required_from_group_spec.rb
deleted file mode 100644
index c9f22c02a0b..00000000000
--- a/spec/migrations/schedule_update_users_where_two_factor_auth_required_from_group_spec.rb
+++ /dev/null
@@ -1,29 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-require_migration!
-
-RSpec.describe ScheduleUpdateUsersWhereTwoFactorAuthRequiredFromGroup, feature_category: :require_two_factor_authentication_from_group do
- let(:users) { table(:users) }
- let!(:user_1) { users.create!(require_two_factor_authentication_from_group: false, name: "user1", email: "user1@example.com", projects_limit: 1) }
- let!(:user_2) { users.create!(require_two_factor_authentication_from_group: true, name: "user2", email: "user2@example.com", projects_limit: 1) }
- let!(:user_3) { users.create!(require_two_factor_authentication_from_group: false, name: "user3", email: "user3@example.com", projects_limit: 1) }
-
- before do
- stub_const("#{described_class.name}::BATCH_SIZE", 1)
- end
-
- it 'schedules jobs for users that do not require two factor authentication' do
- Sidekiq::Testing.fake! do
- freeze_time do
- migrate!
-
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
- 2.minutes, user_1.id, user_1.id)
- expect(described_class::MIGRATION).to be_scheduled_delayed_migration(
- 4.minutes, user_3.id, user_3.id)
- expect(BackgroundMigrationWorker.jobs.size).to eq(2)
- end
- end
- end
-end
diff --git a/spec/migrations/second_recount_epic_cache_counts_spec.rb b/spec/migrations/second_recount_epic_cache_counts_spec.rb
new file mode 100644
index 00000000000..ab4357264be
--- /dev/null
+++ b/spec/migrations/second_recount_epic_cache_counts_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+require_migration!
+
+RSpec.describe SecondRecountEpicCacheCounts, :migration, feature_category: :portfolio_management do
+ let(:migration) { described_class::MIGRATION }
+
+ describe '#up' do
+ it 'schedules a batched background migration' do
+ migrate!
+
+ expect(migration).to have_scheduled_batched_migration(
+ table_name: :epics,
+ column_name: :id,
+ interval: described_class::DELAY_INTERVAL,
+ batch_size: described_class::BATCH_SIZE,
+ max_batch_size: described_class::MAX_BATCH_SIZE,
+ sub_batch_size: described_class::SUB_BATCH_SIZE
+ )
+ end
+ end
+
+ describe '#down' do
+ it 'deletes all batched migration records' do
+ migrate!
+ schema_migrate_down!
+
+ expect(migration).not_to have_scheduled_batched_migration
+ end
+ end
+end
diff --git a/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb b/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
index fdbd8093fa5..ffd25152a45 100644
--- a/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
+++ b/spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
require_migration!
-RSpec.describe SliceMergeRequestDiffCommitMigrations, :migration, feature_category: :code_review do
+RSpec.describe SliceMergeRequestDiffCommitMigrations, :migration, feature_category: :code_review_workflow do
let(:migration) { described_class.new }
describe '#up' do
diff --git a/spec/migrations/update_invalid_web_hooks_spec.rb b/spec/migrations/update_invalid_web_hooks_spec.rb
deleted file mode 100644
index 9e69d3637b8..00000000000
--- a/spec/migrations/update_invalid_web_hooks_spec.rb
+++ /dev/null
@@ -1,30 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-require_migration!
-
-RSpec.describe UpdateInvalidWebHooks, feature_category: :integrations do
- let(:web_hooks) { table(:web_hooks) }
- let(:groups) { table(:namespaces) }
- let(:projects) { table(:projects) }
-
- before do
- group = groups.create!(name: 'gitlab', path: 'gitlab-org')
- project = projects.create!(namespace_id: group.id)
-
- web_hooks.create!(group_id: group.id, type: 'GroupHook')
- web_hooks.create!(project_id: project.id, type: 'ProjectHook')
- web_hooks.create!(group_id: group.id, project_id: project.id, type: 'ProjectHook')
- end
-
- it 'clears group_id when ProjectHook type and project_id are present', :aggregate_failures do
- expect(web_hooks.where.not(group_id: nil).where.not(project_id: nil).count).to eq(1)
-
- migrate!
-
- expect(web_hooks.where.not(group_id: nil).where.not(project_id: nil).count).to eq(0)
- expect(web_hooks.where(type: 'GroupHook').count).to eq(1)
- expect(web_hooks.where(type: 'ProjectHook').count).to eq(2)
- end
-end
diff --git a/spec/models/abuse_report_spec.rb b/spec/models/abuse_report_spec.rb
index 3871b18fdd5..b07fafabbb5 100644
--- a/spec/models/abuse_report_spec.rb
+++ b/spec/models/abuse_report_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AbuseReport do
+RSpec.describe AbuseReport, feature_category: :insider_threat do
let_it_be(:report, reload: true) { create(:abuse_report) }
let_it_be(:user, reload: true) { create(:admin) }
@@ -20,10 +20,29 @@ RSpec.describe AbuseReport do
end
describe 'validations' do
+ let(:http) { 'http://gitlab.com' }
+ let(:https) { 'https://gitlab.com' }
+ let(:ftp) { 'ftp://example.com' }
+ let(:javascript) { 'javascript:alert(window.opener.document.location)' }
+
it { is_expected.to validate_presence_of(:reporter) }
it { is_expected.to validate_presence_of(:user) }
it { is_expected.to validate_presence_of(:message) }
- it { is_expected.to validate_uniqueness_of(:user_id).with_message('has already been reported') }
+ it { is_expected.to validate_presence_of(:category) }
+
+ it do
+ is_expected.to validate_uniqueness_of(:user_id)
+ .scoped_to([:reporter_id, :category])
+ .with_message('You have already reported this user')
+ end
+
+ it { is_expected.to validate_length_of(:reported_from_url).is_at_most(512).allow_blank }
+ it { is_expected.to allow_value(http).for(:reported_from_url) }
+ it { is_expected.to allow_value(https).for(:reported_from_url) }
+ it { is_expected.not_to allow_value(ftp).for(:reported_from_url) }
+ it { is_expected.not_to allow_value(javascript).for(:reported_from_url) }
+ it { is_expected.to allow_value('http://localhost:9000').for(:reported_from_url) }
+ it { is_expected.to allow_value('https://gitlab.com').for(:reported_from_url) }
end
describe '#remove_user' do
@@ -54,4 +73,21 @@ RSpec.describe AbuseReport do
report.notify
end
end
+
+ describe 'enums' do
+ let(:categories) do
+ {
+ spam: 1,
+ offensive: 2,
+ phishing: 3,
+ crypto: 4,
+ credentials: 5,
+ copyright: 6,
+ malware: 7,
+ other: 8
+ }
+ end
+
+ it { is_expected.to define_enum_for(:category).with_values(**categories) }
+ end
end
diff --git a/spec/models/achievements/achievement_spec.rb b/spec/models/achievements/achievement_spec.rb
index 10c04d184af..9a5f4eee229 100644
--- a/spec/models/achievements/achievement_spec.rb
+++ b/spec/models/achievements/achievement_spec.rb
@@ -5,6 +5,9 @@ require 'spec_helper'
RSpec.describe Achievements::Achievement, type: :model, feature_category: :users do
describe 'associations' do
it { is_expected.to belong_to(:namespace).required }
+
+ it { is_expected.to have_many(:user_achievements).inverse_of(:achievement) }
+ it { is_expected.to have_many(:users).through(:user_achievements).inverse_of(:achievements) }
end
describe 'modules' do
diff --git a/spec/models/achievements/user_achievement_spec.rb b/spec/models/achievements/user_achievement_spec.rb
new file mode 100644
index 00000000000..a91cba2b5e2
--- /dev/null
+++ b/spec/models/achievements/user_achievement_spec.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Achievements::UserAchievement, type: :model, feature_category: :users do
+ describe 'associations' do
+ it { is_expected.to belong_to(:achievement).inverse_of(:user_achievements).required }
+ it { is_expected.to belong_to(:user).inverse_of(:user_achievements).required }
+
+ it { is_expected.to belong_to(:awarded_by_user).class_name('User').inverse_of(:awarded_user_achievements).optional }
+ it { is_expected.to belong_to(:revoked_by_user).class_name('User').inverse_of(:revoked_user_achievements).optional }
+ end
+end
diff --git a/spec/models/analytics/cycle_analytics/aggregation_spec.rb b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
index 2fb40852791..a51c21dc87e 100644
--- a/spec/models/analytics/cycle_analytics/aggregation_spec.rb
+++ b/spec/models/analytics/cycle_analytics/aggregation_spec.rb
@@ -1,13 +1,13 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
+RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model, feature_category: :value_stream_management do
describe 'associations' do
- it { is_expected.to belong_to(:group).required }
+ it { is_expected.to belong_to(:namespace).required }
end
describe 'validations' do
- it { is_expected.not_to validate_presence_of(:group) }
+ it { is_expected.not_to validate_presence_of(:namespace) }
it { is_expected.not_to validate_presence_of(:enabled) }
%i[incremental_runtimes_in_seconds incremental_processed_records full_runtimes_in_seconds full_processed_records].each do |column|
@@ -18,6 +18,10 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
expect(record.errors).to have_key(column)
end
end
+
+ it_behaves_like 'value stream analytics namespace models' do
+ let(:factory_name) { :cycle_analytics_aggregation }
+ end
end
describe 'attribute updater methods' do
@@ -126,19 +130,19 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
end
end
- describe '#safe_create_for_group' do
+ describe '#safe_create_for_namespace' do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
it 'creates the aggregation record' do
- record = described_class.safe_create_for_group(group)
+ record = described_class.safe_create_for_namespace(group)
expect(record).to be_persisted
end
context 'when non top-level group is given' do
it 'creates the aggregation record for the top-level group' do
- record = described_class.safe_create_for_group(subgroup)
+ record = described_class.safe_create_for_namespace(subgroup)
expect(record).to be_persisted
end
@@ -146,11 +150,11 @@ RSpec.describe Analytics::CycleAnalytics::Aggregation, type: :model do
context 'when the record is already present' do
it 'does nothing' do
- described_class.safe_create_for_group(group)
+ described_class.safe_create_for_namespace(group)
expect do
- described_class.safe_create_for_group(group)
- described_class.safe_create_for_group(subgroup)
+ described_class.safe_create_for_namespace(group)
+ described_class.safe_create_for_namespace(subgroup)
end.not_to change { described_class.count }
end
end
diff --git a/spec/models/analytics/cycle_analytics/project_stage_spec.rb b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
index 697b7aee022..3c7fde17355 100644
--- a/spec/models/analytics/cycle_analytics/project_stage_spec.rb
+++ b/spec/models/analytics/cycle_analytics/project_stage_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
RSpec.describe Analytics::CycleAnalytics::ProjectStage do
describe 'associations' do
- it { is_expected.to belong_to(:project) }
+ it { is_expected.to belong_to(:project).required }
end
it 'default stages must be valid' do
diff --git a/spec/models/appearance_spec.rb b/spec/models/appearance_spec.rb
index 289408231a9..54dc280d7ac 100644
--- a/spec/models/appearance_spec.rb
+++ b/spec/models/appearance_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe Appearance do
subject(:appearance) { described_class.new }
it { expect(appearance.title).to eq('') }
- it { expect(appearance.short_title).to eq('') }
+ it { expect(appearance.pwa_short_name).to eq('') }
it { expect(appearance.description).to eq('') }
it { expect(appearance.new_project_guidelines).to eq('') }
it { expect(appearance.profile_image_guidelines).to eq('') }
@@ -77,7 +77,7 @@ RSpec.describe Appearance do
end
end
- %i(logo header_logo favicon).each do |logo_type|
+ %i(logo header_logo pwa_icon favicon).each do |logo_type|
it_behaves_like 'logo paths', logo_type
end
diff --git a/spec/models/application_setting_spec.rb b/spec/models/application_setting_spec.rb
index 1454c82c531..5b99c68ec80 100644
--- a/spec/models/application_setting_spec.rb
+++ b/spec/models/application_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ApplicationSetting do
+RSpec.describe ApplicationSetting, feature_category: :not_owned, type: :model do
using RSpec::Parameterized::TableSyntax
subject(:setting) { described_class.create_from_defaults }
@@ -128,6 +128,10 @@ RSpec.describe ApplicationSetting do
it { is_expected.to validate_presence_of(:max_terraform_state_size_bytes) }
it { is_expected.to validate_numericality_of(:max_terraform_state_size_bytes).only_integer.is_greater_than_or_equal_to(0) }
+ it { is_expected.to allow_value(true).for(:user_defaults_to_private_profile) }
+ it { is_expected.to allow_value(false).for(:user_defaults_to_private_profile) }
+ it { is_expected.not_to allow_value(nil).for(:user_defaults_to_private_profile) }
+
it 'ensures max_pages_size is an integer greater than 0 (or equal to 0 to indicate unlimited/maximum)' do
is_expected.to validate_numericality_of(:max_pages_size).only_integer.is_greater_than_or_equal_to(0)
.is_less_than(::Gitlab::Pages::MAX_SIZE / 1.megabyte)
@@ -220,6 +224,10 @@ RSpec.describe ApplicationSetting do
it { is_expected.to allow_value(false).for(:bulk_import_enabled) }
it { is_expected.not_to allow_value(nil).for(:bulk_import_enabled) }
+ it { is_expected.to allow_value(true).for(:allow_runner_registration_token) }
+ it { is_expected.to allow_value(false).for(:allow_runner_registration_token) }
+ it { is_expected.not_to allow_value(nil).for(:allow_runner_registration_token) }
+
context 'when deactivate_dormant_users is enabled' do
before do
stub_application_setting(deactivate_dormant_users: true)
@@ -717,35 +725,7 @@ RSpec.describe ApplicationSetting do
end
context 'housekeeping settings' do
- it { is_expected.not_to allow_value(0).for(:housekeeping_incremental_repack_period) }
-
- it 'wants the full repack period to be at least the incremental repack period' do
- subject.housekeeping_incremental_repack_period = 2
- subject.housekeeping_full_repack_period = 1
-
- expect(subject).not_to be_valid
- end
-
- it 'wants the gc period to be at least the full repack period' do
- subject.housekeeping_full_repack_period = 100
- subject.housekeeping_gc_period = 90
-
- expect(subject).not_to be_valid
- end
-
- it 'allows the same period for incremental repack and full repack, effectively skipping incremental repack' do
- subject.housekeeping_incremental_repack_period = 2
- subject.housekeeping_full_repack_period = 2
-
- expect(subject).to be_valid
- end
-
- it 'allows the same period for full repack and gc, effectively skipping full repack' do
- subject.housekeeping_full_repack_period = 100
- subject.housekeeping_gc_period = 100
-
- expect(subject).to be_valid
- end
+ it { is_expected.not_to allow_value(0).for(:housekeeping_optimize_repository_period) }
end
context 'gitaly timeouts' do
diff --git a/spec/models/bulk_imports/entity_spec.rb b/spec/models/bulk_imports/entity_spec.rb
index f4f2b174a7b..b1c65c6b9ee 100644
--- a/spec/models/bulk_imports/entity_spec.rb
+++ b/spec/models/bulk_imports/entity_spec.rb
@@ -325,4 +325,24 @@ RSpec.describe BulkImports::Entity, type: :model do
expect(project_entity.update_service).to eq(::Projects::UpdateService)
end
end
+
+ describe '#full_path' do
+ it 'returns group full path for project entity' do
+ group_entity = build(:bulk_import_entity, :group_entity, group: build(:group))
+
+ expect(group_entity.full_path).to eq(group_entity.group.full_path)
+ end
+
+ it 'returns project full path for project entity' do
+ project_entity = build(:bulk_import_entity, :project_entity, project: build(:project))
+
+ expect(project_entity.full_path).to eq(project_entity.project.full_path)
+ end
+
+ it 'returns nil when not associated with group or project' do
+ entity = build(:bulk_import_entity, group: nil, project: nil)
+
+ expect(entity.full_path).to eq(nil)
+ end
+ end
end
diff --git a/spec/models/chat_name_spec.rb b/spec/models/chat_name_spec.rb
index 02c38479d1a..0838c232872 100644
--- a/spec/models/chat_name_spec.rb
+++ b/spec/models/chat_name_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ChatName do
+RSpec.describe ChatName, feature_category: :integrations do
let_it_be(:chat_name) { create(:chat_name) }
subject { chat_name }
@@ -11,17 +11,15 @@ RSpec.describe ChatName do
it { is_expected.to belong_to(:user) }
it { is_expected.to validate_presence_of(:user) }
- it { is_expected.to validate_presence_of(:integration) }
it { is_expected.to validate_presence_of(:team_id) }
it { is_expected.to validate_presence_of(:chat_id) }
- it { is_expected.to validate_uniqueness_of(:user_id).scoped_to(:integration_id) }
- it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:integration_id, :team_id) }
+ it { is_expected.to validate_uniqueness_of(:chat_id).scoped_to(:team_id) }
- it 'is removed when the project is deleted' do
- expect { subject.reload.integration.project.delete }.to change { ChatName.count }.by(-1)
+ it 'is not removed when the project is deleted' do
+ expect { subject.reload.integration.project.delete }.not_to change { ChatName.count }
- expect(ChatName.where(id: subject.id)).not_to exist
+ expect(ChatName.where(id: subject.id)).to exist
end
describe '#update_last_used_at', :clean_gitlab_redis_shared_state do
diff --git a/spec/models/ci/bridge_spec.rb b/spec/models/ci/bridge_spec.rb
index 169b00b9c74..70e977e37ba 100644
--- a/spec/models/ci/bridge_spec.rb
+++ b/spec/models/ci/bridge_spec.rb
@@ -21,8 +21,8 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
{ trigger: { project: 'my/project', branch: 'master' } }
end
- it 'has many sourced pipelines' do
- expect(bridge).to have_many(:sourced_pipelines)
+ it 'has one sourced pipeline' do
+ expect(bridge).to have_one(:sourced_pipeline)
end
it_behaves_like 'has ID tokens', :ci_bridge
@@ -34,24 +34,6 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
expect(bridge).to have_one(:downstream_pipeline)
end
- describe '#sourced_pipelines' do
- subject { bridge.sourced_pipelines }
-
- it 'raises error' do
- expect { subject }.to raise_error RuntimeError, 'Ci::Bridge does not have sourced_pipelines association'
- end
-
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- before do
- stub_feature_flags(ci_bridge_remove_sourced_pipelines: false)
- end
-
- it 'returns the sourced_pipelines association' do
- expect(bridge.sourced_pipelines).to eq([])
- end
- end
- end
-
describe '#retryable?' do
let(:bridge) { create(:ci_bridge, :success) }
@@ -393,25 +375,6 @@ RSpec.describe Ci::Bridge, feature_category: :continuous_integration do
{ key: 'VAR7', value: 'value7 $VAR1', raw: true }
)
end
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it 'ignores the raw attribute' do
- expect(downstream_variables).to contain_exactly(
- { key: 'BRIDGE', value: 'cross' },
- { key: 'VAR1', value: 'value1' },
- { key: 'VAR2', value: 'value2 value1' },
- { key: 'VAR3', value: 'value3 value1' },
- { key: 'VAR4', value: 'value4 value1' },
- { key: 'VAR5', value: 'value5 value1' },
- { key: 'VAR6', value: 'value6 value1' },
- { key: 'VAR7', value: 'value7 value1' }
- )
- end
- end
end
end
diff --git a/spec/models/ci/build_spec.rb b/spec/models/ci/build_spec.rb
index c978e33bf54..dd1fbd7d0d5 100644
--- a/spec/models/ci/build_spec.rb
+++ b/spec/models/ci/build_spec.rb
@@ -1136,6 +1136,19 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
it do
is_expected.to all(a_hash_including(key: a_string_matching(/-protected$/)))
end
+
+ context 'and the cache has the `unprotect` option' do
+ let(:options) do
+ { cache: [
+ { key: "key", paths: ["public"], policy: "pull-push", unprotect: true },
+ { key: "key2", paths: ["public"], policy: "pull-push", unprotect: true }
+ ] }
+ end
+
+ it do
+ is_expected.to all(a_hash_including(key: a_string_matching(/-non_protected$/)))
+ end
+ end
end
context 'when pipeline is not on a protected ref' do
@@ -3533,6 +3546,52 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
end
end
+ context 'for the apple_app_store integration' do
+ let_it_be(:apple_app_store_integration) { create(:apple_app_store_integration) }
+
+ let(:apple_app_store_variables) do
+ [
+ { key: 'APP_STORE_CONNECT_API_KEY_ISSUER_ID', value: apple_app_store_integration.app_store_issuer_id, masked: true, public: false },
+ { key: 'APP_STORE_CONNECT_API_KEY_KEY', value: Base64.encode64(apple_app_store_integration.app_store_private_key), masked: true, public: false },
+ { key: 'APP_STORE_CONNECT_API_KEY_KEY_ID', value: apple_app_store_integration.app_store_key_id, masked: true, public: false }
+ ]
+ end
+
+ context 'when the apple_app_store exists' do
+ context 'when a build is protected' do
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(true)
+ build.project.update!(apple_app_store_integration: apple_app_store_integration)
+ end
+
+ it 'includes apple_app_store variables' do
+ is_expected.to include(*apple_app_store_variables)
+ end
+ end
+
+ context 'when a build is not protected' do
+ before do
+ allow(build.pipeline).to receive(:protected_ref?).and_return(false)
+ build.project.update!(apple_app_store_integration: apple_app_store_integration)
+ end
+
+ it 'does not include the apple_app_store variables' do
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_ISSUER_ID' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY_ID' }).to be_nil
+ end
+ end
+ end
+
+ context 'when the apple_app_store integration does not exist' do
+ it 'does not include apple_app_store variables' do
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_ISSUER_ID' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY' }).to be_nil
+ expect(subject.find { |v| v[:key] == 'APP_STORE_CONNECT_API_KEY_KEY_ID' }).to be_nil
+ end
+ end
+ end
+
context 'when build has dependency which has dotenv variable' do
let!(:prepare) { create(:ci_build, pipeline: pipeline, stage_idx: 0) }
let!(:build) { create(:ci_build, pipeline: pipeline, stage_idx: 1, options: { dependencies: [prepare.name] }) }
@@ -5664,17 +5723,22 @@ RSpec.describe Ci::Build, feature_category: :continuous_integration do
expect(prefix).to eq(ci_testing_partition_id)
end
+ end
- context 'when ci_build_partition_id_token_prefix is disabled' do
- before do
- stub_feature_flags(ci_build_partition_id_token_prefix: false)
- end
+ describe '#remove_token!' do
+ it 'removes the token' do
+ expect(build.token).to be_present
- it 'does not include partition_id as a token prefix' do
- prefix = ci_build.token.split('_').first.to_i(16)
+ build.remove_token!
- expect(prefix).not_to eq(ci_testing_partition_id)
- end
+ expect(build.token).to be_nil
+ expect(build.changes).to be_empty
+ end
+
+ it 'does not remove the token when FF is disabled' do
+ stub_feature_flags(remove_job_token_on_completion: false)
+
+ expect { build.remove_token! }.not_to change(build, :token)
end
end
end
diff --git a/spec/models/ci/job_artifact_spec.rb b/spec/models/ci/job_artifact_spec.rb
index 18aaab1d1f3..a1fd51f60ea 100644
--- a/spec/models/ci/job_artifact_spec.rb
+++ b/spec/models/ci/job_artifact_spec.rb
@@ -134,6 +134,38 @@ RSpec.describe Ci::JobArtifact do
end
end
+ describe 'artifacts_public?' do
+ subject { artifact.public_access? }
+
+ context 'when job artifact created by default' do
+ let!(:artifact) { create(:ci_job_artifact) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when job artifact created as public' do
+ let!(:artifact) { create(:ci_job_artifact, :public) }
+
+ it { is_expected.to be_truthy }
+ end
+
+ context 'when job artifact created as private' do
+ let!(:artifact) { build(:ci_job_artifact, :private) }
+
+ it { is_expected.to be_falsey }
+
+ context 'and the non_public_artifacts feature flag is disabled' do
+ let!(:artifact) { build(:ci_job_artifact, :private) }
+
+ before do
+ stub_feature_flags(non_public_artifacts: false)
+ end
+
+ it { is_expected.to be_truthy }
+ end
+ end
+ end
+
describe '.file_types_for_report' do
it 'returns the report file types for the report type' do
expect(described_class.file_types_for_report(:test)).to match_array(%w[junit])
@@ -690,8 +722,8 @@ RSpec.describe Ci::JobArtifact do
end
it 'updates project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(project, :build_artifacts_size, -job_artifact.file.size)
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).once
+ .with(project, :build_artifacts_size, [have_attributes(amount: -job_artifact.file.size)])
pipeline.destroy!
end
diff --git a/spec/models/ci/namespace_mirror_spec.rb b/spec/models/ci/namespace_mirror_spec.rb
index 29447cbc89d..63e6e9e6b26 100644
--- a/spec/models/ci/namespace_mirror_spec.rb
+++ b/spec/models/ci/namespace_mirror_spec.rb
@@ -96,7 +96,7 @@ RSpec.describe Ci::NamespaceMirror do
describe '.by_namespace_id' do
subject(:result) { described_class.by_namespace_id(group2.id) }
- it 'returns namesapce mirrors of namespace id' do
+ it 'returns namespace mirrors of namespace id' do
expect(result).to contain_exactly(group2.ci_namespace_mirror)
end
end
diff --git a/spec/models/ci/pipeline_spec.rb b/spec/models/ci/pipeline_spec.rb
index b72693d9994..5888f9d109c 100644
--- a/spec/models/ci/pipeline_spec.rb
+++ b/spec/models/ci/pipeline_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
+RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep, feature_category: :continuous_integration do
include ProjectForksHelper
include StubRequests
include Ci::SourcePipelineHelpers
@@ -1322,6 +1322,21 @@ RSpec.describe Ci::Pipeline, :mailer, factory_default: :keep do
end
end
+ context 'when tag is not found' do
+ let(:pipeline) do
+ create(:ci_pipeline, project: project, ref: 'not_found_tag', tag: true)
+ end
+
+ it 'does not expose tag variables' do
+ expect(subject.to_hash.keys)
+ .not_to include(
+ 'CI_COMMIT_TAG',
+ 'CI_COMMIT_TAG_MESSAGE',
+ 'CI_BUILD_TAG'
+ )
+ end
+ end
+
context 'without a commit' do
let(:pipeline) { build(:ci_empty_pipeline, :created, sha: nil) }
diff --git a/spec/models/ci/runner_machine_spec.rb b/spec/models/ci/runner_machine_spec.rb
new file mode 100644
index 00000000000..e39f987110f
--- /dev/null
+++ b/spec/models/ci/runner_machine_spec.rb
@@ -0,0 +1,51 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Ci::RunnerMachine, feature_category: :runner_fleet, type: :model do
+ it_behaves_like 'having unique enum values'
+
+ it { is_expected.to belong_to(:runner) }
+
+ describe 'validation' do
+ it { is_expected.to validate_presence_of(:runner) }
+ it { is_expected.to validate_presence_of(:machine_xid) }
+ it { is_expected.to validate_length_of(:machine_xid).is_at_most(64) }
+ it { is_expected.to validate_length_of(:version).is_at_most(2048) }
+ it { is_expected.to validate_length_of(:revision).is_at_most(255) }
+ it { is_expected.to validate_length_of(:platform).is_at_most(255) }
+ it { is_expected.to validate_length_of(:architecture).is_at_most(255) }
+ it { is_expected.to validate_length_of(:ip_address).is_at_most(1024) }
+
+ context 'when runner has config' do
+ it 'is valid' do
+ runner_machine = build(:ci_runner_machine, config: { gpus: "all" })
+
+ expect(runner_machine).to be_valid
+ end
+ end
+
+ context 'when runner has an invalid config' do
+ it 'is invalid' do
+ runner_machine = build(:ci_runner_machine, config: { test: 1 })
+
+ expect(runner_machine).not_to be_valid
+ end
+ end
+ end
+
+ describe '.stale', :freeze_time do
+ subject { described_class.stale.ids }
+
+ let!(:runner_machine1) { create(:ci_runner_machine, created_at: 8.days.ago, contacted_at: 7.days.ago) }
+ let!(:runner_machine2) { create(:ci_runner_machine, created_at: 7.days.ago, contacted_at: nil) }
+ let!(:runner_machine3) { create(:ci_runner_machine, created_at: 5.days.ago, contacted_at: nil) }
+ let!(:runner_machine4) do
+ create(:ci_runner_machine, created_at: (7.days - 1.second).ago, contacted_at: (7.days - 1.second).ago)
+ end
+
+ it 'returns stale runner machines' do
+ is_expected.to match_array([runner_machine1.id, runner_machine2.id])
+ end
+ end
+end
diff --git a/spec/models/ci/runner_spec.rb b/spec/models/ci/runner_spec.rb
index 803b766c822..b7c7b67b98f 100644
--- a/spec/models/ci/runner_spec.rb
+++ b/spec/models/ci/runner_spec.rb
@@ -11,6 +11,13 @@ RSpec.describe Ci::Runner, feature_category: :runner do
let(:factory_name) { :ci_runner }
end
+ context 'loose foreign key on ci_runners.creator_id' do
+ it_behaves_like 'cleanup by a loose foreign key' do
+ let!(:parent) { create(:user) }
+ let!(:model) { create(:ci_runner, creator: parent) }
+ end
+ end
+
describe 'groups association' do
# Due to other associations such as projects this whole spec is allowed to
# generate cross-database queries. So we have this temporary spec to
@@ -530,7 +537,7 @@ RSpec.describe Ci::Runner, feature_category: :runner do
end
end
- describe '.stale' do
+ describe '.stale', :freeze_time do
subject { described_class.stale }
let!(:runner1) { create(:ci_runner, :instance, created_at: 4.months.ago, contacted_at: 3.months.ago + 10.seconds) }
@@ -1090,6 +1097,23 @@ RSpec.describe Ci::Runner, feature_category: :runner do
expect(runner.runner_version).to be_nil
end
+
+ context 'with only ip_address specified', :freeze_time do
+ subject(:heartbeat) do
+ runner.heartbeat(ip_address: '1.1.1.1')
+ end
+
+ it 'updates only ip_address' do
+ attrs = Gitlab::Json.dump(ip_address: '1.1.1.1', contacted_at: Time.current)
+
+ Gitlab::Redis::Cache.with do |redis|
+ redis_key = runner.send(:cache_attribute_key)
+ expect(redis).to receive(:set).with(redis_key, attrs, any_args)
+ end
+
+ heartbeat
+ end
+ end
end
context 'when database was not updated recently' do
diff --git a/spec/models/ci/runner_version_spec.rb b/spec/models/ci/runner_version_spec.rb
index 552b271fe85..dfaa2201859 100644
--- a/spec/models/ci/runner_version_spec.rb
+++ b/spec/models/ci/runner_version_spec.rb
@@ -35,12 +35,6 @@ RSpec.describe Ci::RunnerVersion, feature_category: :runner_fleet do
end
describe 'validation' do
- context 'when runner version is too long' do
- let(:runner_version) { build(:ci_runner_version, version: 'a' * 2049) }
-
- it 'is not valid' do
- expect(runner_version).to be_invalid
- end
- end
+ it { is_expected.to validate_length_of(:version).is_at_most(2048) }
end
end
diff --git a/spec/models/clusters/providers/aws_spec.rb b/spec/models/clusters/providers/aws_spec.rb
index 2afed663edf..cb2960e1557 100644
--- a/spec/models/clusters/providers/aws_spec.rb
+++ b/spec/models/clusters/providers/aws_spec.rb
@@ -75,39 +75,6 @@ RSpec.describe Clusters::Providers::Aws do
end
end
- describe '#api_client' do
- let(:provider) { create(:cluster_provider_aws) }
- let(:credentials) { double }
- let(:client) { double }
-
- subject { provider.api_client }
-
- before do
- allow(provider).to receive(:credentials).and_return(credentials)
-
- expect(Aws::CloudFormation::Client).to receive(:new)
- .with(credentials: credentials, region: provider.region)
- .and_return(client)
- end
-
- it { is_expected.to eq client }
- end
-
- describe '#credentials' do
- let(:provider) { create(:cluster_provider_aws) }
- let(:credentials) { double }
-
- subject { provider.credentials }
-
- before do
- expect(Aws::Credentials).to receive(:new)
- .with(provider.access_key_id, provider.secret_access_key, provider.session_token)
- .and_return(credentials)
- end
-
- it { is_expected.to eq credentials }
- end
-
describe '#created_by_user' do
let(:provider) { create(:cluster_provider_aws) }
diff --git a/spec/models/clusters/providers/gcp_spec.rb b/spec/models/clusters/providers/gcp_spec.rb
index a1f00069937..afd5699091a 100644
--- a/spec/models/clusters/providers/gcp_spec.rb
+++ b/spec/models/clusters/providers/gcp_spec.rb
@@ -111,31 +111,6 @@ RSpec.describe Clusters::Providers::Gcp do
end
end
- describe '#api_client' do
- subject { gcp.api_client }
-
- context 'when status is creating' do
- let(:gcp) { build(:cluster_provider_gcp, :creating) }
-
- it 'returns Cloud Platform API clinet' do
- expect(subject).to be_an_instance_of(GoogleApi::CloudPlatform::Client)
- expect(subject.access_token).to eq(gcp.access_token)
- end
- end
-
- context 'when status is created' do
- let(:gcp) { build(:cluster_provider_gcp, :created) }
-
- it { is_expected.to be_nil }
- end
-
- context 'when status is errored' do
- let(:gcp) { build(:cluster_provider_gcp, :errored) }
-
- it { is_expected.to be_nil }
- end
- end
-
describe '#nullify_credentials' do
let(:provider) { create(:cluster_provider_gcp, :creating) }
diff --git a/spec/models/commit_collection_spec.rb b/spec/models/commit_collection_spec.rb
index 93c696cae54..6dd34c3e21f 100644
--- a/spec/models/commit_collection_spec.rb
+++ b/spec/models/commit_collection_spec.rb
@@ -15,26 +15,34 @@ RSpec.describe CommitCollection do
end
describe '.committers' do
+ subject(:collection) { described_class.new(project, [commit]) }
+
it 'returns a relation of users when users are found' do
user = create(:user, email: commit.committer_email.upcase)
- collection = described_class.new(project, [commit])
expect(collection.committers).to contain_exactly(user)
end
it 'returns empty array when committers cannot be found' do
- collection = described_class.new(project, [commit])
-
expect(collection.committers).to be_empty
end
it 'excludes authors of merge commits' do
commit = project.commit("60ecb67744cb56576c30214ff52294f8ce2def98")
create(:user, email: commit.committer_email.upcase)
- collection = described_class.new(project, [commit])
expect(collection.committers).to be_empty
end
+
+ context 'when committer email is nil' do
+ before do
+ allow(commit).to receive(:committer_email).and_return(nil)
+ end
+
+ it 'returns empty array when committers cannot be found' do
+ expect(collection.committers).to be_empty
+ end
+ end
end
describe '#without_merge_commits' do
diff --git a/spec/models/commit_signatures/ssh_signature_spec.rb b/spec/models/commit_signatures/ssh_signature_spec.rb
index 629d9c5ec53..235fd099c93 100644
--- a/spec/models/commit_signatures/ssh_signature_spec.rb
+++ b/spec/models/commit_signatures/ssh_signature_spec.rb
@@ -2,24 +2,30 @@
require 'spec_helper'
-RSpec.describe CommitSignatures::SshSignature do
+RSpec.describe CommitSignatures::SshSignature, feature_category: :source_code_management do
# This commit is seeded from https://gitlab.com/gitlab-org/gitlab-test
# For instructions on how to add more seed data, see the project README
let_it_be(:commit_sha) { '7b5160f9bb23a3d58a0accdbe89da13b96b1ece9' }
let_it_be(:project) { create(:project, :repository, path: 'sample-project') }
let_it_be(:commit) { create(:commit, project: project, sha: commit_sha) }
let_it_be(:ssh_key) { create(:ed25519_key_256) }
+ let_it_be(:user) { ssh_key.user }
+ let_it_be(:key_fingerprint) { ssh_key.fingerprint_sha256 }
+
+ let(:signature) do
+ create(:ssh_signature, commit_sha: commit_sha, key: ssh_key, key_fingerprint_sha256: key_fingerprint, user: user)
+ end
let(:attributes) do
{
commit_sha: commit_sha,
project: project,
- key: ssh_key
+ key: ssh_key,
+ key_fingerprint_sha256: key_fingerprint,
+ user: user
}
end
- let(:signature) { create(:ssh_signature, commit_sha: commit_sha, key: ssh_key) }
-
it_behaves_like 'having unique enum values'
it_behaves_like 'commit signature'
it_behaves_like 'signature with type checking', :ssh
@@ -39,9 +45,31 @@ RSpec.describe CommitSignatures::SshSignature do
end
end
+ describe '#key_fingerprint_sha256' do
+ it 'returns the fingerprint_sha256 associated with the SSH key' do
+ expect(signature.key_fingerprint_sha256).to eq(key_fingerprint)
+ end
+
+ context 'when the SSH key is no longer associated with the signature' do
+ it 'returns the fingerprint_sha256 stored in signature' do
+ signature.update!(key_id: nil)
+
+ expect(signature.key_fingerprint_sha256).to eq(key_fingerprint)
+ end
+ end
+ end
+
describe '#signed_by_user' do
it 'returns the user associated with the SSH key' do
expect(signature.signed_by_user).to eq(ssh_key.user)
end
+
+ context 'when the SSH key is no longer associated with the signature' do
+ it 'returns the user stored in signature' do
+ signature.update!(key_id: nil)
+
+ expect(signature.signed_by_user).to eq(user)
+ end
+ end
end
end
diff --git a/spec/models/commit_spec.rb b/spec/models/commit_spec.rb
index 4b5aabe745b..36d0e37454d 100644
--- a/spec/models/commit_spec.rb
+++ b/spec/models/commit_spec.rb
@@ -847,20 +847,6 @@ eos
expect(unsigned_commit.has_signature?).to be_falsey
expect(commit.has_signature?).to be_falsey
end
-
- context 'when feature flag "ssh_commit_signatures" is disabled' do
- before do
- stub_feature_flags(ssh_commit_signatures: false)
- end
-
- it 'reports no signature' do
- expect(ssh_signed_commit).not_to have_signature
- end
-
- it 'does not return signature data' do
- expect(ssh_signed_commit.signature).to be_nil
- end
- end
end
describe '#has_been_reverted?' do
diff --git a/spec/models/concerns/counter_attribute_spec.rb b/spec/models/concerns/counter_attribute_spec.rb
index 1dd9b78d642..c8224c64ba2 100644
--- a/spec/models/concerns/counter_attribute_spec.rb
+++ b/spec/models/concerns/counter_attribute_spec.rb
@@ -37,6 +37,50 @@ RSpec.describe CounterAttribute, :counter_attribute, :clean_gitlab_redis_shared_
end
end
+ describe '#initiate_refresh!' do
+ context 'when counter attribute is enabled' do
+ let(:attribute) { :build_artifacts_size }
+
+ it 'initiates refresh on the BufferedCounter' do
+ expect_next_instance_of(Gitlab::Counters::BufferedCounter, model, attribute) do |counter|
+ expect(counter).to receive(:initiate_refresh!)
+ end
+
+ model.initiate_refresh!(attribute)
+ end
+ end
+
+ context 'when counter attribute is not enabled' do
+ let(:attribute) { :snippets_size }
+
+ it 'raises error' do
+ expect { model.initiate_refresh!(attribute) }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
+ describe '#finalize_refresh' do
+ let(:attribute) { :build_artifacts_size }
+
+ context 'when counter attribute is enabled' do
+ it 'initiates refresh on the BufferedCounter' do
+ expect_next_instance_of(Gitlab::Counters::BufferedCounter, model, attribute) do |counter|
+ expect(counter).to receive(:finalize_refresh)
+ end
+
+ model.finalize_refresh(attribute)
+ end
+ end
+
+ context 'when counter attribute is not enabled' do
+ let(:attribute) { :snippets_size }
+
+ it 'raises error' do
+ expect { model.finalize_refresh(attribute) }.to raise_error(ArgumentError)
+ end
+ end
+ end
+
describe '#counter' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/models/concerns/has_user_type_spec.rb b/spec/models/concerns/has_user_type_spec.rb
index 462b28f99be..bd128112113 100644
--- a/spec/models/concerns/has_user_type_spec.rb
+++ b/spec/models/concerns/has_user_type_spec.rb
@@ -5,7 +5,8 @@ require 'spec_helper'
RSpec.describe User do
specify 'types consistency checks', :aggregate_failures do
expect(described_class::USER_TYPES.keys)
- .to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot visual_review_bot migration_bot automation_bot admin_bot])
+ .to match_array(%w[human ghost alert_bot project_bot support_bot service_user security_bot visual_review_bot
+ migration_bot automation_bot admin_bot suggested_reviewers_bot])
expect(described_class::USER_TYPES).to include(*described_class::BOT_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::NON_INTERNAL_USER_TYPES)
expect(described_class::USER_TYPES).to include(*described_class::INTERNAL_USER_TYPES)
diff --git a/spec/models/concerns/noteable_spec.rb b/spec/models/concerns/noteable_spec.rb
index 82aca13c929..383ed68816e 100644
--- a/spec/models/concerns/noteable_spec.rb
+++ b/spec/models/concerns/noteable_spec.rb
@@ -63,6 +63,82 @@ RSpec.describe Noteable do
end
end
+ # rubocop:disable RSpec/MultipleMemoizedHelpers
+ describe '#commenters' do
+ shared_examples 'commenters' do
+ it 'does not automatically include the noteable author' do
+ expect(commenters).not_to include(noteable.author)
+ end
+
+ context 'with no user' do
+ it 'contains a distinct list of non-internal note authors' do
+ expect(commenters).to contain_exactly(commenter, another_commenter)
+ end
+ end
+
+ context 'with non project member' do
+ let(:current_user) { create(:user) }
+
+ it 'contains a distinct list of non-internal note authors' do
+ expect(commenters).to contain_exactly(commenter, another_commenter)
+ end
+
+ it 'does not include a commenter from another noteable' do
+ expect(commenters).not_to include(other_noteable_commenter)
+ end
+ end
+ end
+
+ let_it_be(:commenter) { create(:user) }
+ let_it_be(:another_commenter) { create(:user) }
+ let_it_be(:internal_commenter) { create(:user) }
+ let_it_be(:other_noteable_commenter) { create(:user) }
+
+ let(:current_user) {}
+ let(:commenters) { noteable.commenters(user: current_user) }
+
+ let!(:comments) { create_list(:note, 2, author: commenter, noteable: noteable, project: noteable.project) }
+ let!(:more_comments) { create_list(:note, 2, author: another_commenter, noteable: noteable, project: noteable.project) }
+
+ context 'when noteable is an issue' do
+ let(:noteable) { create(:issue) }
+
+ let!(:internal_comments) { create_list(:note, 2, author: internal_commenter, noteable: noteable, project: noteable.project, internal: true) }
+ let!(:other_noteable_comments) { create_list(:note, 2, author: other_noteable_commenter, noteable: create(:issue, project: noteable.project), project: noteable.project) }
+
+ it_behaves_like 'commenters'
+
+ context 'with reporter' do
+ let(:current_user) { create(:user) }
+
+ before do
+ noteable.project.add_reporter(current_user)
+ end
+
+ it 'contains a distinct list of non-internal note authors' do
+ expect(commenters).to contain_exactly(commenter, another_commenter, internal_commenter)
+ end
+
+ context 'with noteable author' do
+ let(:current_user) { noteable.author }
+
+ it 'contains a distinct list of non-internal note authors' do
+ expect(commenters).to contain_exactly(commenter, another_commenter, internal_commenter)
+ end
+ end
+ end
+ end
+
+ context 'when noteable is a merge request' do
+ let(:noteable) { create(:merge_request) }
+
+ let!(:other_noteable_comments) { create_list(:note, 2, author: other_noteable_commenter, noteable: create(:merge_request, source_project: noteable.project, source_branch: 'feat123'), project: noteable.project) }
+
+ it_behaves_like 'commenters'
+ end
+ end
+ # rubocop:enable RSpec/MultipleMemoizedHelpers
+
describe '#discussion_ids_relation' do
it 'returns ordered discussion_ids' do
discussion_ids = subject.discussion_ids_relation.pluck(:discussion_id)
diff --git a/spec/models/concerns/safely_change_column_default_spec.rb b/spec/models/concerns/safely_change_column_default_spec.rb
new file mode 100644
index 00000000000..36782170eaf
--- /dev/null
+++ b/spec/models/concerns/safely_change_column_default_spec.rb
@@ -0,0 +1,75 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe SafelyChangeColumnDefault, feature_category: :database do
+ include Gitlab::Database::DynamicModelHelpers
+ before do
+ ApplicationRecord.connection.execute(<<~SQL)
+ CREATE TABLE _test_gitlab_main_data(
+ id bigserial primary key not null,
+ value bigint default 1
+ );
+ SQL
+ end
+
+ let!(:model) do
+ define_batchable_model('_test_gitlab_main_data', connection: ApplicationRecord.connection).tap do |model|
+ model.include(described_class)
+ model.columns_changing_default(:value)
+ model.columns # Force the schema cache to populate
+ end
+ end
+
+ def alter_default(new_default)
+ ApplicationRecord.connection.execute(<<~SQL)
+ ALTER TABLE _test_gitlab_main_data ALTER COLUMN value SET DEFAULT #{new_default}
+ SQL
+ end
+
+ def recorded_insert_queries(&block)
+ recorder = ActiveRecord::QueryRecorder.new
+ recorder.record(&block)
+
+ recorder.log.select { |q| q.include?('INSERT INTO') }
+ end
+
+ def query_includes_value_column?(query)
+ parsed = PgQuery.parse(query)
+ parsed.tree.stmts.first.stmt.insert_stmt.cols.any? { |node| node.res_target.name == 'value' }
+ end
+
+ it 'forces the column to be written on a change' do
+ queries = recorded_insert_queries do
+ model.create!(value: 1)
+ end
+
+ expect(queries.length).to eq(1)
+
+ expect(query_includes_value_column?(queries.first)).to be_truthy
+ end
+
+ it 'does not write the column without a change' do
+ queries = recorded_insert_queries do
+ model.create!
+ end
+
+ expect(queries.length).to eq(1)
+ expect(query_includes_value_column?(queries.first)).to be_falsey
+ end
+
+ it 'does not send the old column value if the default has changed' do
+ alter_default(2)
+ model.create!
+
+ expect(model.pluck(:value)).to contain_exactly(2)
+ end
+
+ it 'prevents writing new default in place of the old default' do
+ alter_default(2)
+
+ model.create!(value: 1)
+
+ expect(model.pluck(:value)).to contain_exactly(1)
+ end
+end
diff --git a/spec/models/concerns/sensitive_serializable_hash_spec.rb b/spec/models/concerns/sensitive_serializable_hash_spec.rb
index 591a4383a03..0bfd2d6a7de 100644
--- a/spec/models/concerns/sensitive_serializable_hash_spec.rb
+++ b/spec/models/concerns/sensitive_serializable_hash_spec.rb
@@ -95,7 +95,7 @@ RSpec.describe SensitiveSerializableHash do
expect(model.attributes).to include(attribute) # double-check the attribute does exist
expect(model.serializable_hash).not_to include(attribute)
- expect(model.to_json).not_to include(attribute)
+ expect(model.to_json).not_to match(/\b#{attribute}\b/)
expect(model.as_json).not_to include(attribute)
end
end
diff --git a/spec/models/deployment_spec.rb b/spec/models/deployment_spec.rb
index fb3883820fd..f0fdc62e6c7 100644
--- a/spec/models/deployment_spec.rb
+++ b/spec/models/deployment_spec.rb
@@ -170,18 +170,7 @@ RSpec.describe Deployment do
end
end
- it 'executes Deployments::DropOlderDeploymentsWorker asynchronously' do
- stub_feature_flags(prevent_outdated_deployment_jobs: false)
-
- expect(Deployments::DropOlderDeploymentsWorker)
- .to receive(:perform_async).once.with(deployment.id)
-
- deployment.run!
- end
-
- it 'does not execute Deployments::DropOlderDeploymentsWorker when FF enabled' do
- stub_feature_flags(prevent_outdated_deployment_jobs: true)
-
+ it 'does not execute Deployments::DropOlderDeploymentsWorker' do
expect(Deployments::DropOlderDeploymentsWorker)
.not_to receive(:perform_async).with(deployment.id)
@@ -413,6 +402,16 @@ RSpec.describe Deployment do
it { is_expected.to be_falsey }
end
+
+ context 'when environment is undefined' do
+ let(:deployment) { build(:deployment, :success, project: project, environment: environment) }
+
+ before do
+ deployment.environment = nil
+ end
+
+ it { is_expected.to be_falsey }
+ end
end
describe '#success?' do
diff --git a/spec/models/environment_spec.rb b/spec/models/environment_spec.rb
index 2670127442e..0d53ebdefe9 100644
--- a/spec/models/environment_spec.rb
+++ b/spec/models/environment_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Environment, :use_clean_rails_memory_store_caching do
+RSpec.describe Environment, :use_clean_rails_memory_store_caching, feature_category: :continuous_delivery do
include ReactiveCachingHelpers
using RSpec::Parameterized::TableSyntax
include RepoHelpers
@@ -2029,4 +2029,40 @@ RSpec.describe Environment, :use_clean_rails_memory_store_caching do
subject
end
end
+
+ describe '#deployed_and_updated_before' do
+ subject do
+ described_class.deployed_and_updated_before(project_id, before)
+ end
+
+ let(:project_id) { project.id }
+ let(:before) { 1.week.ago.to_date.to_s }
+ let(:environment) { create(:environment, project: project, updated_at: 2.weeks.ago) }
+ let!(:stale_deployment) { create(:deployment, environment: environment, updated_at: 2.weeks.ago) }
+
+ it 'excludes environments with recent deployments' do
+ create(:deployment, environment: environment, updated_at: Date.current)
+
+ is_expected.to match_array([])
+ end
+
+ it 'includes environments with no deployments' do
+ environment1 = create(:environment, project: project, updated_at: 2.weeks.ago)
+
+ is_expected.to match_array([environment, environment1])
+ end
+
+ it 'excludes environments that have been recently updated with no deployments' do
+ create(:environment, project: project)
+
+ is_expected.to match_array([environment])
+ end
+
+ it 'excludes environments that have been recently updated with stale deployments' do
+ environment1 = create(:environment, project: project)
+ create(:deployment, environment: environment1, updated_at: 2.weeks.ago)
+
+ is_expected.to match_array([environment])
+ end
+ end
end
diff --git a/spec/models/event_collection_spec.rb b/spec/models/event_collection_spec.rb
index 40b7930f02b..13983dcfde3 100644
--- a/spec/models/event_collection_spec.rb
+++ b/spec/models/event_collection_spec.rb
@@ -89,6 +89,25 @@ RSpec.describe EventCollection do
expect(events).to contain_exactly(closed_issue_event)
end
+ context 'when there are multiple issue events' do
+ let!(:work_item_event) do
+ create(
+ :event,
+ :created,
+ project: project,
+ target: create(:work_item, :task, project: project),
+ target_type: 'WorkItem'
+ )
+ end
+
+ it 'includes work item events too' do
+ filter = EventFilter.new(EventFilter::ISSUE)
+ events = described_class.new(projects, filter: filter).to_a
+
+ expect(events).to contain_exactly(closed_issue_event, work_item_event)
+ end
+ end
+
it 'allows filtering of events using an EventFilter, returning several items' do
filter = EventFilter.new(EventFilter::MERGED)
events = described_class.new(projects, filter: filter).to_a
diff --git a/spec/models/event_spec.rb b/spec/models/event_spec.rb
index 667f3ddff72..f170eeb5841 100644
--- a/spec/models/event_spec.rb
+++ b/spec/models/event_spec.rb
@@ -102,7 +102,20 @@ RSpec.describe Event, feature_category: :users do
end
describe 'scopes' do
- describe 'created_at' do
+ describe '.for_issue' do
+ let(:issue_event) { create(:event, :for_issue, project: project) }
+ let(:work_item_event) { create(:event, :for_work_item, project: project) }
+
+ before do
+ create(:event, :for_design, project: project)
+ end
+
+ it 'returns events for Issue and WorkItem target_type' do
+ expect(described_class.for_issue).to contain_exactly(issue_event, work_item_event)
+ end
+ end
+
+ describe '.created_at' do
it 'can find the right event' do
time = 1.day.ago
event = create(:event, created_at: time, project: project)
diff --git a/spec/models/factories_spec.rb b/spec/models/factories_spec.rb
index 4915c0bd870..d6e746986d6 100644
--- a/spec/models/factories_spec.rb
+++ b/spec/models/factories_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# `:saas` is used to test `gitlab_subscription` factory.
# It's not available on FOSS but also this very factory is not.
-RSpec.describe 'factories', :saas do
+RSpec.describe 'factories', :saas, :with_license, feature_category: :tooling do
include Database::DatabaseHelpers
# Used in `skipped` and indicates whether to skip any traits including the
@@ -188,7 +188,13 @@ RSpec.describe 'factories', :saas do
before do
factories_based_on_view.each do |factory|
view = build(factory).class.table_name
- swapout_view_for_table(view)
+ view_gitlab_schema = Gitlab::Database::GitlabSchema.table_schema(view)
+ Gitlab::Database.database_base_models.each_value.select do |base_model|
+ connection = base_model.connection
+ next unless Gitlab::Database.gitlab_schemas_for_connection(connection).include?(view_gitlab_schema)
+
+ swapout_view_for_table(view, connection: connection)
+ end
end
end
diff --git a/spec/models/group_spec.rb b/spec/models/group_spec.rb
index dfba0470d35..4605c086763 100644
--- a/spec/models/group_spec.rb
+++ b/spec/models/group_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Group do
+RSpec.describe Group, feature_category: :subgroups do
include ReloadHelpers
include StubGitlabCalls
@@ -11,9 +11,11 @@ RSpec.describe Group do
describe 'associations' do
it { is_expected.to have_many :projects }
it { is_expected.to have_many(:group_members).dependent(:destroy) }
+ it { is_expected.to have_many(:namespace_members) }
it { is_expected.to have_many(:users).through(:group_members) }
it { is_expected.to have_many(:owners).through(:group_members) }
it { is_expected.to have_many(:requesters).dependent(:destroy) }
+ it { is_expected.to have_many(:namespace_requesters) }
it { is_expected.to have_many(:members_and_requesters) }
it { is_expected.to have_many(:project_group_links).dependent(:destroy) }
it { is_expected.to have_many(:shared_projects).through(:project_group_links) }
@@ -45,7 +47,7 @@ RSpec.describe Group do
it { is_expected.to have_one(:group_feature) }
it { is_expected.to have_one(:harbor_integration) }
- describe '#members & #requesters' do
+ describe '#namespace_members' do
let(:requester) { create(:user) }
let(:developer) { create(:user) }
@@ -54,6 +56,98 @@ RSpec.describe Group do
group.add_developer(developer)
end
+ it 'includes the correct users' do
+ expect(group.namespace_members).to include Member.find_by(user: developer)
+ expect(group.namespace_members).not_to include Member.find_by(user: requester)
+ end
+
+ it 'is equivelent to #group_members' do
+ expect(group.namespace_members).to eq group.group_members
+ end
+
+ it_behaves_like 'query without source filters' do
+ subject { group.namespace_members }
+ end
+ end
+
+ describe '#namespace_requesters' do
+ let(:requester) { create(:user) }
+ let(:developer) { create(:user) }
+
+ before do
+ group.request_access(requester)
+ group.add_developer(developer)
+ end
+
+ it 'includes the correct users' do
+ expect(group.namespace_requesters).to include Member.find_by(user: requester)
+ expect(group.namespace_requesters).not_to include Member.find_by(user: developer)
+ end
+
+ it 'is equivalent to #requesters' do
+ expect(group.namespace_requesters).to eq group.requesters
+ end
+
+ it_behaves_like 'query without source filters' do
+ subject { group.namespace_requesters }
+ end
+ end
+
+ shared_examples 'polymorphic membership relationship' do
+ it do
+ expect(membership.attributes).to include(
+ 'source_type' => 'Namespace',
+ 'source_id' => group.id
+ )
+ end
+ end
+
+ shared_examples 'member_namespace membership relationship' do
+ it do
+ expect(membership.attributes).to include(
+ 'member_namespace_id' => group.id
+ )
+ end
+ end
+
+ describe '#namespace_members setters' do
+ let(:user) { create(:user) }
+ let(:membership) { group.namespace_members.create!(user: user, access_level: Gitlab::Access::DEVELOPER) }
+
+ it { expect(membership).to be_instance_of(GroupMember) }
+ it { expect(membership.user).to eq user }
+ it { expect(membership.group).to eq group }
+ it { expect(membership.requested_at).to be_nil }
+
+ it_behaves_like 'polymorphic membership relationship'
+ it_behaves_like 'member_namespace membership relationship'
+ end
+
+ describe '#namespace_requesters setters' do
+ let(:requested_at) { Time.current }
+ let(:user) { create(:user) }
+ let(:membership) do
+ group.namespace_requesters.create!(user: user, requested_at: requested_at, access_level: Gitlab::Access::DEVELOPER)
+ end
+
+ it { expect(membership).to be_instance_of(GroupMember) }
+ it { expect(membership.user).to eq user }
+ it { expect(membership.group).to eq group }
+ it { expect(membership.requested_at).to eq requested_at }
+
+ it_behaves_like 'polymorphic membership relationship'
+ it_behaves_like 'member_namespace membership relationship'
+ end
+
+ describe '#members & #requesters' do
+ let_it_be(:requester) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+
+ before do
+ group.request_access(requester)
+ group.add_developer(developer)
+ end
+
it_behaves_like 'members and requesters associations' do
let(:namespace) { group }
end
@@ -2648,7 +2742,81 @@ RSpec.describe Group do
end
end
- context 'disabled_with_override' do
+ context 'disabled_and_overridable' do
+ subject { group.update_shared_runners_setting!(Namespace::SR_DISABLED_AND_OVERRIDABLE) }
+
+ context 'top level group' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+ let_it_be(:sub_group) { create(:group, :shared_runners_disabled, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: sub_group) }
+
+ it 'enables allow descendants to override only for itself' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { sub_group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { sub_group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
+ end
+
+ context 'group that its ancestors have shared Runners disabled but allows to override' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, :allow_descendants_override_disabled_shared_runners) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, parent: parent) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: false, group: group) }
+
+ it 'enables allow descendants to override' do
+ expect { subject_and_reload(parent, group, project) }
+ .to not_change { parent.allow_descendants_override_disabled_shared_runners }
+ .and not_change { parent.shared_runners_enabled }
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and not_change { group.shared_runners_enabled }
+ .and not_change { project.shared_runners_enabled }
+ end
+ end
+
+ context 'when parent does not allow' do
+ let_it_be(:parent, reload: true) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false) }
+ let_it_be(:group, reload: true) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
+
+ it 'raises exception' do
+ expect { subject }
+ .to raise_error(ActiveRecord::RecordInvalid, 'Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
+ end
+
+ it 'does not allow descendants to override' do
+ expect do
+ begin
+ subject
+ rescue StandardError
+ nil
+ end
+
+ parent.reload
+ group.reload
+ end.to not_change { parent.allow_descendants_override_disabled_shared_runners }
+ .and not_change { parent.shared_runners_enabled }
+ .and not_change { group.allow_descendants_override_disabled_shared_runners }
+ .and not_change { group.shared_runners_enabled }
+ end
+ end
+
+ context 'top level group that has shared Runners enabled' do
+ let_it_be(:group) { create(:group, shared_runners_enabled: true) }
+ let_it_be(:sub_group) { create(:group, shared_runners_enabled: true, parent: group) }
+ let_it_be(:project) { create(:project, shared_runners_enabled: true, group: sub_group) }
+
+ it 'enables allow descendants to override & disables shared runners everywhere' do
+ expect { subject_and_reload(group, sub_group, project) }
+ .to change { group.shared_runners_enabled }.from(true).to(false)
+ .and change { group.allow_descendants_override_disabled_shared_runners }.from(false).to(true)
+ .and change { sub_group.shared_runners_enabled }.from(true).to(false)
+ .and change { project.shared_runners_enabled }.from(true).to(false)
+ end
+ end
+ end
+
+ context 'disabled_with_override (deprecated)' do
subject { group.update_shared_runners_setting!(Namespace::SR_DISABLED_WITH_OVERRIDE) }
context 'top level group' do
@@ -3486,4 +3654,26 @@ RSpec.describe Group do
it { is_expected.to be_nil }
end
end
+
+ describe '#usage_quotas_enabled?', feature_category: :subscription_cost_management, unless: Gitlab.ee? do
+ using RSpec::Parameterized::TableSyntax
+
+ where(:feature_enabled, :root_group, :result) do
+ false | true | false
+ false | false | false
+ true | false | false
+ true | true | true
+ end
+
+ with_them do
+ before do
+ stub_feature_flags(usage_quotas_for_all_editions: feature_enabled)
+ allow(group).to receive(:root?).and_return(root_group)
+ end
+
+ it 'returns the expected result' do
+ expect(group.usage_quotas_enabled?).to eq result
+ end
+ end
+ end
end
diff --git a/spec/models/integration_spec.rb b/spec/models/integration_spec.rb
index 78b30221a24..9b3250e3c08 100644
--- a/spec/models/integration_spec.rb
+++ b/spec/models/integration_spec.rb
@@ -29,6 +29,7 @@ RSpec.describe Integration do
it { is_expected.to be_tag_push_events }
it { is_expected.to be_wiki_page_events }
it { is_expected.not_to be_active }
+ it { is_expected.not_to be_incident_events }
it { expect(subject.category).to eq(:common) }
end
@@ -153,6 +154,7 @@ RSpec.describe Integration do
include_examples 'hook scope', 'confidential_note'
include_examples 'hook scope', 'alert'
include_examples 'hook scope', 'archive_trace'
+ include_examples 'hook scope', 'incident'
end
describe '#operating?' do
diff --git a/spec/models/integrations/apple_app_store_spec.rb b/spec/models/integrations/apple_app_store_spec.rb
new file mode 100644
index 00000000000..1a57f556895
--- /dev/null
+++ b/spec/models/integrations/apple_app_store_spec.rb
@@ -0,0 +1,105 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Integrations::AppleAppStore, feature_category: :mobile_devops do
+ describe 'Validations' do
+ context 'when active' do
+ before do
+ subject.active = true
+ end
+
+ it { is_expected.to validate_presence_of :app_store_issuer_id }
+ it { is_expected.to validate_presence_of :app_store_key_id }
+ it { is_expected.to validate_presence_of :app_store_private_key }
+ it { is_expected.to allow_value('aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee').for(:app_store_issuer_id) }
+ it { is_expected.not_to allow_value('abcde').for(:app_store_issuer_id) }
+ it { is_expected.to allow_value(File.read('spec/fixtures/ssl_key.pem')).for(:app_store_private_key) }
+ it { is_expected.not_to allow_value("foo").for(:app_store_private_key) }
+ it { is_expected.to allow_value('ABCD1EF12G').for(:app_store_key_id) }
+ it { is_expected.not_to allow_value('ABC').for(:app_store_key_id) }
+ it { is_expected.not_to allow_value('abc1').for(:app_store_key_id) }
+ it { is_expected.not_to allow_value('-A0-').for(:app_store_key_id) }
+ end
+ end
+
+ context 'when integration is enabled' do
+ let(:apple_app_store_integration) { build(:apple_app_store_integration) }
+
+ describe '#fields' do
+ it 'returns custom fields' do
+ expect(apple_app_store_integration.fields.pluck(:name)).to eq(%w[app_store_issuer_id app_store_key_id
+ app_store_private_key])
+ end
+ end
+
+ describe '#test' do
+ it 'returns true for a successful request' do
+ allow(AppStoreConnect::Client).to receive_message_chain(:new, :apps).and_return({})
+ expect(apple_app_store_integration.test[:success]).to be true
+ end
+
+ it 'returns false for an invalid request' do
+ allow(AppStoreConnect::Client).to receive_message_chain(:new,
+:apps).and_return({ errors: [title: "error title"] })
+ expect(apple_app_store_integration.test[:success]).to be false
+ end
+ end
+
+ describe '#help' do
+ it 'renders prompt information' do
+ expect(apple_app_store_integration.help).not_to be_empty
+ end
+ end
+
+ describe '.to_param' do
+ it 'returns the name of the integration' do
+ expect(described_class.to_param).to eq('apple_app_store')
+ end
+ end
+
+ describe '#ci_variables' do
+ let(:apple_app_store_integration) { build_stubbed(:apple_app_store_integration) }
+
+ it 'returns vars when the integration is activated' do
+ ci_vars = [
+ {
+ key: 'APP_STORE_CONNECT_API_KEY_ISSUER_ID',
+ value: apple_app_store_integration.app_store_issuer_id,
+ masked: true,
+ public: false
+ },
+ {
+ key: 'APP_STORE_CONNECT_API_KEY_KEY',
+ value: Base64.encode64(apple_app_store_integration.app_store_private_key),
+ masked: true,
+ public: false
+ },
+ {
+ key: 'APP_STORE_CONNECT_API_KEY_KEY_ID',
+ value: apple_app_store_integration.app_store_key_id,
+ masked: true,
+ public: false
+ }
+ ]
+
+ expect(apple_app_store_integration.ci_variables).to match_array(ci_vars)
+ end
+
+ it 'returns an empty array when the integration is disabled' do
+ apple_app_store_integration = build_stubbed(:apple_app_store_integration, active: false)
+ expect(apple_app_store_integration.ci_variables).to match_array([])
+ end
+ end
+ end
+
+ context 'when integration is disabled' do
+ let(:apple_app_store_integration) { build_stubbed(:apple_app_store_integration, active: false) }
+
+ describe '#ci_variables' do
+ it 'returns an empty array' do
+ expect(apple_app_store_integration.ci_variables).to match_array([])
+ end
+ end
+ end
+end
diff --git a/spec/models/integrations/base_chat_notification_spec.rb b/spec/models/integrations/base_chat_notification_spec.rb
index 67fc09fd8b5..1527ffd7278 100644
--- a/spec/models/integrations/base_chat_notification_spec.rb
+++ b/spec/models/integrations/base_chat_notification_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Integrations::BaseChatNotification do
+RSpec.describe Integrations::BaseChatNotification, feature_category: :integrations do
describe 'default values' do
it { expect(subject.category).to eq(:chat) }
end
@@ -134,6 +134,12 @@ RSpec.describe Integrations::BaseChatNotification do
it_behaves_like 'notifies the chat integration'
end
+
+ context 'Incident events' do
+ let(:data) { issue.to_hook_data(user).merge!({ object_kind: 'incident' }) }
+
+ it_behaves_like 'notifies the chat integration'
+ end
end
context 'when labels_to_be_notified_behavior is not defined' do
diff --git a/spec/models/integrations/chat_message/issue_message_spec.rb b/spec/models/integrations/chat_message/issue_message_spec.rb
index ff9f30efdca..cd40e4c361e 100644
--- a/spec/models/integrations/chat_message/issue_message_spec.rb
+++ b/spec/models/integrations/chat_message/issue_message_spec.rb
@@ -7,6 +7,7 @@ RSpec.describe Integrations::ChatMessage::IssueMessage do
let(:args) do
{
+ object_kind: 'issue',
user: {
name: 'Test User',
username: 'test.user',
diff --git a/spec/models/integrations/chat_message/pipeline_message_spec.rb b/spec/models/integrations/chat_message/pipeline_message_spec.rb
index 413cb097327..4d371ca0899 100644
--- a/spec/models/integrations/chat_message/pipeline_message_spec.rb
+++ b/spec/models/integrations/chat_message/pipeline_message_spec.rb
@@ -80,18 +80,6 @@ RSpec.describe Integrations::ChatMessage::PipelineMessage do
expect(name_field[:value]).to eq('Build pipeline')
end
- context 'when pipeline_name feature flag is disabled' do
- before do
- stub_feature_flags(pipeline_name: false)
- end
-
- it 'does not return pipeline name' do
- name_field = subject.attachments.first[:fields].find { |a| a[:title] == 'Pipeline name' }
-
- expect(name_field).to be nil
- end
- end
-
context "when the pipeline failed" do
before do
args[:object_attributes][:status] = 'failed'
diff --git a/spec/models/integrations/every_integration_spec.rb b/spec/models/integrations/every_integration_spec.rb
index 33e89b3dabc..8666ef512fc 100644
--- a/spec/models/integrations/every_integration_spec.rb
+++ b/spec/models/integrations/every_integration_spec.rb
@@ -11,9 +11,9 @@ RSpec.describe 'Every integration' do
let(:integration) { integration_class.new }
context 'secret fields', :aggregate_failures do
- it "uses type: 'password' for all secret fields" do
+ it "uses type: 'password' for all secret fields, except when bypassed" do
integration.fields.each do |field|
- next unless Integrations::Field::SECRET_NAME.match?(field[:name])
+ next unless Integrations::Field::SECRET_NAME.match?(field[:name]) && field[:is_secret]
expect(field[:type]).to eq('password'),
"Field '#{field[:name]}' should use type 'password'"
diff --git a/spec/models/integrations/field_spec.rb b/spec/models/integrations/field_spec.rb
index 642fb1fbf7f..c30f9ef0d7b 100644
--- a/spec/models/integrations/field_spec.rb
+++ b/spec/models/integrations/field_spec.rb
@@ -83,6 +83,8 @@ RSpec.describe ::Integrations::Field do
be false
when :type
eq 'text'
+ when :is_secret
+ eq true
else
be_nil
end
diff --git a/spec/models/issue_spec.rb b/spec/models/issue_spec.rb
index 7c147067714..fdb397932e0 100644
--- a/spec/models/issue_spec.rb
+++ b/spec/models/issue_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe Issue, feature_category: :project_management do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
- let_it_be(:reusable_project) { create(:project) }
+ let_it_be_with_reload(:reusable_project) { create(:project) }
describe "Associations" do
it { is_expected.to belong_to(:milestone) }
diff --git a/spec/models/member_spec.rb b/spec/models/member_spec.rb
index 0ebccf1cb65..4b28f619d94 100644
--- a/spec/models/member_spec.rb
+++ b/spec/models/member_spec.rb
@@ -303,7 +303,7 @@ RSpec.describe Member do
@requested_member = project.requesters.find_by(user_id: requested_user.id)
accepted_request_user = create(:user).tap { |u| project.request_access(u) }
- @accepted_request_member = project.requesters.find_by(user_id: accepted_request_user.id).tap { |m| m.accept_request }
+ @accepted_request_member = project.requesters.find_by(user_id: accepted_request_user.id).tap { |m| m.accept_request(@owner_user) }
@member_with_minimal_access = create(:group_member, :minimal_access, source: group)
end
@@ -777,18 +777,25 @@ RSpec.describe Member do
describe '#accept_request' do
let(:member) { create(:project_member, requested_at: Time.current.utc) }
- it { expect(member.accept_request).to be_truthy }
+ it { expect(member.accept_request(@owner_user)).to be_truthy }
+ it { expect(member.accept_request(nil)).to be_truthy }
it 'clears requested_at' do
- member.accept_request
+ member.accept_request(@owner_user)
expect(member.requested_at).to be_nil
end
+ it 'saves the approving user' do
+ member.accept_request(@owner_user)
+
+ expect(member.created_by).to eq(@owner_user)
+ end
+
it 'calls #after_accept_request' do
expect(member).to receive(:after_accept_request)
- member.accept_request
+ member.accept_request(@owner_user)
end
end
@@ -799,33 +806,27 @@ RSpec.describe Member do
end
describe '#request?' do
- context 'when request for project' do
- subject { create(:project_member, requested_at: Time.current.utc) }
+ shared_examples 'calls notification service and todo service' do
+ subject { create(source_type, requested_at: Time.current.utc) }
- it 'calls notification service but not todo service' do
+ specify do
expect_next_instance_of(NotificationService) do |instance|
expect(instance).to receive(:new_access_request)
end
- expect(TodoService).not_to receive(:new)
+ expect_next_instance_of(TodoService) do |instance|
+ expect(instance).to receive(:create_member_access_request_todos)
+ end
is_expected.to be_request
end
end
- context 'when request for group' do
- subject { create(:group_member, requested_at: Time.current.utc) }
-
- it 'calls notification and todo service' do
- expect_next_instance_of(NotificationService) do |instance|
- expect(instance).to receive(:new_access_request)
- end
-
- expect_next_instance_of(TodoService) do |instance|
- expect(instance).to receive(:create_member_access_request)
+ context 'when requests for project and group are raised' do
+ %i[project_member group_member].each do |source_type|
+ it_behaves_like 'calls notification service and todo service' do
+ let_it_be(:source_type) { source_type }
end
-
- is_expected.to be_request
end
end
end
diff --git a/spec/models/members/member_role_spec.rb b/spec/models/members/member_role_spec.rb
index f9d6757bb90..b118a3c0968 100644
--- a/spec/models/members/member_role_spec.rb
+++ b/spec/models/members/member_role_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MemberRole do
+RSpec.describe MemberRole, feature_category: :authentication_and_authorization do
describe 'associations' do
it { is_expected.to belong_to(:namespace) }
it { is_expected.to have_many(:members) }
@@ -14,6 +14,27 @@ RSpec.describe MemberRole do
it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:base_access_level) }
+ context 'for attributes_locked_after_member_associated' do
+ context 'when assigned to member' do
+ it 'cannot be changed' do
+ member_role.save!
+ member_role.members << create(:project_member)
+
+ expect(member_role).not_to be_valid
+ expect(member_role.errors.messages[:base]).to include(
+ s_("MemberRole|cannot be changed because it is already assigned to a user. "\
+ "Please create a new Member Role instead")
+ )
+ end
+ end
+
+ context 'when not assigned to member' do
+ it 'can be changed' do
+ expect(member_role).to be_valid
+ end
+ end
+ end
+
context 'when for namespace' do
let_it_be(:root_group) { create(:group) }
diff --git a/spec/models/merge_request/approval_removal_settings_spec.rb b/spec/models/merge_request/approval_removal_settings_spec.rb
index 5f879207a72..7e375c7ff39 100644
--- a/spec/models/merge_request/approval_removal_settings_spec.rb
+++ b/spec/models/merge_request/approval_removal_settings_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequest::ApprovalRemovalSettings do
+RSpec.describe MergeRequest::ApprovalRemovalSettings, :with_license do
describe 'validations' do
let(:reset_approvals_on_push) {}
let(:selective_code_owner_removals) {}
diff --git a/spec/models/merge_request_diff_spec.rb b/spec/models/merge_request_diff_spec.rb
index a17b90930f0..1ecc4356672 100644
--- a/spec/models/merge_request_diff_spec.rb
+++ b/spec/models/merge_request_diff_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequestDiff do
+RSpec.describe MergeRequestDiff, feature_category: :code_review_workflow do
using RSpec::Parameterized::TableSyntax
include RepoHelpers
@@ -412,7 +412,19 @@ RSpec.describe MergeRequestDiff do
describe '#diffs_in_batch' do
let(:diff_options) { {} }
+ shared_examples_for 'measuring diffs metrics' do
+ specify do
+ allow(Gitlab::Metrics).to receive(:measure).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_reorder).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_collection).and_call_original
+
+ diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options)
+ end
+ end
+
shared_examples_for 'fetching full diffs' do
+ it_behaves_like 'measuring diffs metrics'
+
it 'returns diffs from repository comparison' do
expect_next_instance_of(Compare) do |comparison|
expect(comparison).to receive(:diffs)
@@ -435,6 +447,13 @@ RSpec.describe MergeRequestDiff do
expect(diffs.pagination_data).to eq(total_pages: nil)
end
+
+ it 'measures diffs_comparison' do
+ allow(Gitlab::Metrics).to receive(:measure).and_call_original
+ expect(Gitlab::Metrics).to receive(:measure).with(:diffs_comparison).and_call_original
+
+ diff_with_commits.diffs_in_batch(1, 10, diff_options: diff_options)
+ end
end
context 'when no persisted files available' do
@@ -454,6 +473,8 @@ RSpec.describe MergeRequestDiff do
end
context 'when persisted files available' do
+ it_behaves_like 'measuring diffs metrics'
+
it 'returns paginated diffs' do
diffs = diff_with_commits.diffs_in_batch(0, 10, diff_options: diff_options)
diff --git a/spec/models/merge_request_spec.rb b/spec/models/merge_request_spec.rb
index 05586cbfc64..a059d5cae9b 100644
--- a/spec/models/merge_request_spec.rb
+++ b/spec/models/merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequest, factory_default: :keep do
+RSpec.describe MergeRequest, factory_default: :keep, feature_category: :code_review_workflow do
include RepoHelpers
include ProjectForksHelper
include ReactiveCachingHelpers
@@ -165,6 +165,25 @@ RSpec.describe MergeRequest, factory_default: :keep do
expect(described_class.drafts).to eq([merge_request4])
end
end
+
+ describe '.without_hidden', feature_category: :insider_threat do
+ let_it_be(:banned_user) { create(:user, :banned) }
+ let_it_be(:hidden_merge_request) { create(:merge_request, :unique_branches, author: banned_user) }
+
+ it 'only returns public issuables' do
+ expect(described_class.without_hidden).not_to include(hidden_merge_request)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(hide_merge_requests_from_banned_users: false)
+ end
+
+ it 'returns public and hidden issuables' do
+ expect(described_class.without_hidden).to include(hidden_merge_request)
+ end
+ end
+ end
end
describe '#squash?' do
@@ -4546,6 +4565,34 @@ RSpec.describe MergeRequest, factory_default: :keep do
end
end
+ describe 'transition to merged' do
+ context 'when reset_merge_error_on_transition feature flag is on' do
+ before do
+ stub_feature_flags(reset_merge_error_on_transition: true)
+ end
+
+ it 'resets the merge error' do
+ subject.update!(merge_error: 'temp')
+
+ expect { subject.mark_as_merged }.to change { subject.merge_error.present? }
+ .from(true)
+ .to(false)
+ end
+ end
+
+ context 'when reset_merge_error_on_transition feature flag is off' do
+ before do
+ stub_feature_flags(reset_merge_error_on_transition: false)
+ end
+
+ it 'does not reset the merge error' do
+ subject.update!(merge_error: 'temp')
+
+ expect { subject.mark_as_merged }.not_to change { subject.merge_error.present? }
+ end
+ end
+ end
+
describe 'transition to cannot_be_merged' do
let(:notification_service) { double(:notification_service) }
let(:todo_service) { double(:todo_service) }
@@ -5456,4 +5503,27 @@ RSpec.describe MergeRequest, factory_default: :keep do
it { is_expected.to be_empty }
end
+
+ describe '#hidden?', feature_category: :insider_threat do
+ let_it_be(:author) { create(:user) }
+ let(:merge_request) { build_stubbed(:merge_request, author: author) }
+
+ subject { merge_request.hidden? }
+
+ it { is_expected.to eq(false) }
+
+ context 'when the author is banned' do
+ let_it_be(:author) { create(:user, :banned) }
+
+ it { is_expected.to eq(true) }
+
+ context 'when the feature flag is disabled' do
+ before do
+ stub_feature_flags(hide_merge_requests_from_banned_users: false)
+ end
+
+ it { is_expected.to eq(false) }
+ end
+ end
+ end
end
diff --git a/spec/models/ml/candidate_spec.rb b/spec/models/ml/candidate_spec.rb
index 9ce411191f0..fa8952dc0f4 100644
--- a/spec/models/ml/candidate_spec.rb
+++ b/spec/models/ml/candidate_spec.rb
@@ -4,6 +4,14 @@ require 'spec_helper'
RSpec.describe Ml::Candidate, factory_default: :keep do
let_it_be(:candidate) { create(:ml_candidates, :with_metrics_and_params) }
+ let_it_be(:candidate2) { create(:ml_candidates, experiment: candidate.experiment) }
+
+ let_it_be(:candidate_artifact) do
+ FactoryBot.create(:generic_package,
+ name: candidate.package_name,
+ version: candidate.package_version,
+ project: candidate.project)
+ end
let(:project) { candidate.experiment.project }
@@ -22,13 +30,13 @@ RSpec.describe Ml::Candidate, factory_default: :keep do
describe '.artifact_root' do
subject { candidate.artifact_root }
- it { is_expected.to eq("/ml_candidate_#{candidate.iid}/-/") }
+ it { is_expected.to eq("/ml_candidate_#{candidate.id}/-/") }
end
describe '.package_name' do
subject { candidate.package_name }
- it { is_expected.to eq("ml_candidate_#{candidate.iid}") }
+ it { is_expected.to eq("ml_candidate_#{candidate.id}") }
end
describe '.package_version' do
@@ -38,27 +46,45 @@ RSpec.describe Ml::Candidate, factory_default: :keep do
end
describe '.artifact' do
- subject { candidate.artifact }
+ let(:tested_candidate) { candidate }
- context 'when has logged artifacts' do
- let(:package) do
- create(:generic_package, name: candidate.package_name, version: candidate.package_version, project: project)
- end
+ subject { tested_candidate.artifact }
- it 'returns the package' do
- package
+ before do
+ candidate_artifact
+ end
- is_expected.to eq(package)
+ context 'when has logged artifacts' do
+ it 'returns the package' do
+ expect(subject.name).to eq(tested_candidate.package_name)
end
end
context 'when does not have logged artifacts' do
- let(:tested_candidate) { create(:ml_candidates, :with_metrics_and_params) }
+ let(:tested_candidate) { candidate2 }
it { is_expected.to be_nil }
end
end
+ describe '.artifact_lazy' do
+ context 'when candidates have same the same iid' do
+ before do
+ BatchLoader::Executor.clear_current
+ end
+
+ it 'loads the correct artifacts', :aggregate_failures do
+ candidate.artifact_lazy
+ candidate2.artifact_lazy
+
+ expect(Packages::Package).to receive(:joins).once.and_call_original # Only one database call
+
+ expect(candidate.artifact.name).to eq(candidate.package_name)
+ expect(candidate2.artifact).to be_nil
+ end
+ end
+ end
+
describe '#by_project_id_and_iid' do
let(:project_id) { candidate.experiment.project_id }
let(:iid) { candidate.iid }
@@ -95,12 +121,13 @@ RSpec.describe Ml::Candidate, factory_default: :keep do
end
end
- describe "#including_metrics_and_params" do
- subject { described_class.including_metrics_and_params.find_by(id: candidate.id) }
+ describe "#including_relationships" do
+ subject { described_class.including_relationships.find_by(id: candidate.id) }
it 'loads latest metrics and params', :aggregate_failures do
expect(subject.association_cached?(:latest_metrics)).to be(true)
expect(subject.association_cached?(:params)).to be(true)
+ expect(subject.association_cached?(:user)).to be(true)
end
end
end
diff --git a/spec/models/namespace_setting_spec.rb b/spec/models/namespace_setting_spec.rb
index e06a6a30f9a..0bf6fdf4fa0 100644
--- a/spec/models/namespace_setting_spec.rb
+++ b/spec/models/namespace_setting_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe NamespaceSetting, type: :model do
+RSpec.describe NamespaceSetting, feature_category: :subgroups, type: :model do
it_behaves_like 'sanitizable', :namespace_settings, %i[default_branch_name]
# Relationships
@@ -235,6 +235,80 @@ RSpec.describe NamespaceSetting, type: :model do
end
end
+ describe '#allow_runner_registration_token?' do
+ subject(:group_setting) { group.allow_runner_registration_token? }
+
+ context 'when a top-level group' do
+ let_it_be(:settings) { create(:namespace_settings) }
+ let_it_be(:group) { create(:group, namespace_settings: settings) }
+
+ before do
+ group.update!(allow_runner_registration_token: allow_runner_registration_token)
+ end
+
+ context 'when :allow_runner_registration_token is false' do
+ let(:allow_runner_registration_token) { false }
+
+ it 'returns false', :aggregate_failures do
+ is_expected.to be_falsey
+
+ expect(settings.allow_runner_registration_token).to be_falsey
+ end
+
+ it 'does not query the db' do
+ expect { group_setting }.not_to exceed_query_limit(0)
+ end
+ end
+
+ context 'when :allow_runner_registration_token is true' do
+ let(:allow_runner_registration_token) { true }
+
+ it 'returns true', :aggregate_failures do
+ is_expected.to be_truthy
+
+ expect(settings.allow_runner_registration_token).to be_truthy
+ end
+
+ context 'when disallowed by application setting' do
+ before do
+ stub_application_setting(allow_runner_registration_token: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+
+ context 'when a group has parent groups' do
+ let_it_be_with_refind(:parent) { create(:group) }
+ let_it_be_with_refind(:group) { create(:group, parent: parent) }
+
+ before do
+ parent.update!(allow_runner_registration_token: allow_runner_registration_token)
+ end
+
+ context 'when a parent group has runner registration disabled' do
+ let(:allow_runner_registration_token) { false }
+
+ it { is_expected.to be_falsey }
+ end
+
+ context 'when all parent groups have runner registration enabled' do
+ let(:allow_runner_registration_token) { true }
+
+ it { is_expected.to be_truthy }
+
+ context 'when disallowed by application setting' do
+ before do
+ stub_application_setting(allow_runner_registration_token: false)
+ end
+
+ it { is_expected.to be_falsey }
+ end
+ end
+ end
+ end
+
describe '#delayed_project_removal' do
it_behaves_like 'a cascading namespace setting boolean attribute', settings_attribute_name: :delayed_project_removal
end
diff --git a/spec/models/namespace_spec.rb b/spec/models/namespace_spec.rb
index 80721e11049..d063f4713c7 100644
--- a/spec/models/namespace_spec.rb
+++ b/spec/models/namespace_spec.rb
@@ -35,6 +35,7 @@ RSpec.describe Namespace do
it { is_expected.to have_one :cluster_enabled_grant }
it { is_expected.to have_many(:work_items) }
it { is_expected.to have_many :achievements }
+ it { is_expected.to have_many(:namespace_commit_emails).class_name('Users::NamespaceCommitEmail') }
it do
is_expected.to have_one(:ci_cd_settings).class_name('NamespaceCiCdSetting').inverse_of(:namespace).autosave(true)
@@ -363,6 +364,10 @@ RSpec.describe Namespace do
it { is_expected.to delegate_method(:name).to(:owner).with_prefix.allow_nil }
it { is_expected.to delegate_method(:avatar_url).to(:owner).allow_nil }
it { is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy).to(:namespace_settings).allow_nil }
+ it { is_expected.to delegate_method(:runner_registration_enabled).to(:namespace_settings) }
+ it { is_expected.to delegate_method(:runner_registration_enabled?).to(:namespace_settings) }
+ it { is_expected.to delegate_method(:allow_runner_registration_token).to(:namespace_settings) }
+ it { is_expected.to delegate_method(:allow_runner_registration_token?).to(:namespace_settings) }
it { is_expected.to delegate_method(:maven_package_requests_forwarding).to(:package_settings) }
it { is_expected.to delegate_method(:pypi_package_requests_forwarding).to(:package_settings) }
it { is_expected.to delegate_method(:npm_package_requests_forwarding).to(:package_settings) }
@@ -371,6 +376,16 @@ RSpec.describe Namespace do
is_expected.to delegate_method(:prevent_sharing_groups_outside_hierarchy=).to(:namespace_settings)
.with_arguments(:args).allow_nil
end
+
+ it do
+ is_expected.to delegate_method(:runner_registration_enabled=).to(:namespace_settings)
+ .with_arguments(:args)
+ end
+
+ it do
+ is_expected.to delegate_method(:allow_runner_registration_token=).to(:namespace_settings)
+ .with_arguments(:args)
+ end
end
describe "Respond to" do
@@ -2114,7 +2129,7 @@ RSpec.describe Namespace do
where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :shared_runners_setting) do
true | true | Namespace::SR_ENABLED
true | false | Namespace::SR_ENABLED
- false | true | Namespace::SR_DISABLED_WITH_OVERRIDE
+ false | true | Namespace::SR_DISABLED_AND_OVERRIDABLE
false | false | Namespace::SR_DISABLED_AND_UNOVERRIDABLE
end
@@ -2133,12 +2148,15 @@ RSpec.describe Namespace do
where(:shared_runners_enabled, :allow_descendants_override_disabled_shared_runners, :other_setting, :result) do
true | true | Namespace::SR_ENABLED | false
true | true | Namespace::SR_DISABLED_WITH_OVERRIDE | true
+ true | true | Namespace::SR_DISABLED_AND_OVERRIDABLE | true
true | true | Namespace::SR_DISABLED_AND_UNOVERRIDABLE | true
false | true | Namespace::SR_ENABLED | false
false | true | Namespace::SR_DISABLED_WITH_OVERRIDE | false
+ false | true | Namespace::SR_DISABLED_AND_OVERRIDABLE | false
false | true | Namespace::SR_DISABLED_AND_UNOVERRIDABLE | true
false | false | Namespace::SR_ENABLED | false
false | false | Namespace::SR_DISABLED_WITH_OVERRIDE | false
+ false | false | Namespace::SR_DISABLED_AND_OVERRIDABLE | false
false | false | Namespace::SR_DISABLED_AND_UNOVERRIDABLE | false
end
diff --git a/spec/models/note_spec.rb b/spec/models/note_spec.rb
index 328d3ba7dda..4b574540500 100644
--- a/spec/models/note_spec.rb
+++ b/spec/models/note_spec.rb
@@ -1622,6 +1622,50 @@ RSpec.describe Note do
expect(described_class.with_suggestions).not_to include(note_without_suggestion)
end
end
+
+ describe '.inc_relations_for_view' do
+ subject { note.noteable.notes.inc_relations_for_view(noteable) }
+
+ context 'when noteable can not have diffs' do
+ let_it_be(:note) { create(:note_on_issue) }
+ let(:noteable) { note.noteable }
+
+ it 'does not include additional associations' do
+ expect { subject.reload }.to match_query_count(0).for_model(NoteDiffFile).and(
+ match_query_count(0).for_model(DiffNotePosition))
+ end
+
+ context 'when noteable is not set' do
+ let(:noteable) { nil }
+
+ it 'includes additional diff associations' do
+ expect { subject.reload }.to match_query_count(1).for_model(NoteDiffFile).and(
+ match_query_count(1).for_model(DiffNotePosition))
+ end
+ end
+
+ context 'when skip_notes_diff_include flag is disabled' do
+ before do
+ stub_feature_flags(skip_notes_diff_include: false)
+ end
+
+ it 'includes additional diff associations' do
+ expect { subject.reload }.to match_query_count(1).for_model(NoteDiffFile).and(
+ match_query_count(1).for_model(DiffNotePosition))
+ end
+ end
+ end
+
+ context 'when noteable can have diffs' do
+ let_it_be(:note) { create(:note_on_commit) }
+ let(:noteable) { note.noteable }
+
+ it 'includes additional diff associations' do
+ expect { subject.reload }.to match_query_count(1).for_model(NoteDiffFile).and(
+ match_query_count(1).for_model(DiffNotePosition))
+ end
+ end
+ end
end
describe 'banzai_render_context' do
diff --git a/spec/models/oauth_access_token_spec.rb b/spec/models/oauth_access_token_spec.rb
index 92e1ae8ac60..fc53d926dd6 100644
--- a/spec/models/oauth_access_token_spec.rb
+++ b/spec/models/oauth_access_token_spec.rb
@@ -53,4 +53,22 @@ RSpec.describe OauthAccessToken do
expect(described_class.matching_token_for(app_one, token.resource_owner, token.scopes)).to be_nil
end
end
+
+ describe '#expires_in' do
+ context 'when token has expires_in value set' do
+ it 'uses the expires_in value' do
+ token = OauthAccessToken.new(expires_in: 1.minute)
+
+ expect(token.expires_in).to eq 1.minute
+ end
+ end
+
+ context 'when token has nil expires_in' do
+ it 'uses default value' do
+ token = OauthAccessToken.new(expires_in: nil)
+
+ expect(token.expires_in).to eq 2.hours
+ end
+ end
+ end
end
diff --git a/spec/models/packages/package_file_spec.rb b/spec/models/packages/package_file_spec.rb
index a244ed34e54..9b341034aaa 100644
--- a/spec/models/packages/package_file_spec.rb
+++ b/spec/models/packages/package_file_spec.rb
@@ -8,6 +8,7 @@ RSpec.describe Packages::PackageFile, type: :model do
let_it_be(:package_file1) { create(:package_file, :xml, file_name: 'FooBar') }
let_it_be(:package_file2) { create(:package_file, :xml, file_name: 'ThisIsATest') }
let_it_be(:package_file3) { create(:package_file, :xml, file_name: 'formatted.zip') }
+ let_it_be(:package_file4) { create(:package_file, :nuget) }
let_it_be(:debian_package) { create(:debian_package, project: project) }
it_behaves_like 'having unique enum values'
@@ -98,6 +99,12 @@ RSpec.describe Packages::PackageFile, type: :model do
it { is_expected.to contain_exactly(package_file3) }
end
+
+ describe '.with_nuget_format' do
+ subject { described_class.with_nuget_format }
+
+ it { is_expected.to contain_exactly(package_file4) }
+ end
end
context 'updating project statistics' do
diff --git a/spec/models/packages/package_spec.rb b/spec/models/packages/package_spec.rb
index d6f71f2401c..a8bcda1242f 100644
--- a/spec/models/packages/package_spec.rb
+++ b/spec/models/packages/package_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Packages::Package, type: :model do
+RSpec.describe Packages::Package, type: :model, feature_category: :package_registry do
include SortingHelper
using RSpec::Parameterized::TableSyntax
@@ -14,6 +14,7 @@ RSpec.describe Packages::Package, type: :model do
it { is_expected.to have_many(:dependency_links).inverse_of(:package) }
it { is_expected.to have_many(:tags).inverse_of(:package) }
it { is_expected.to have_many(:build_infos).inverse_of(:package) }
+ it { is_expected.to have_many(:installable_nuget_package_files).inverse_of(:package) }
it { is_expected.to have_one(:conan_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:maven_metadatum).inverse_of(:package) }
it { is_expected.to have_one(:debian_publication).inverse_of(:package).class_name('Packages::Debian::Publication') }
@@ -713,7 +714,7 @@ RSpec.describe Packages::Package, type: :model do
subject(:destroy!) { package.destroy! }
it 'updates the project statistics' do
- expect(project_statistics).to receive(:increment_counter).with(:packages_size, -package_file.size)
+ expect(project_statistics).to receive(:increment_counter).with(:packages_size, have_attributes(amount: -package_file.size))
destroy!
end
diff --git a/spec/models/pages/lookup_path_spec.rb b/spec/models/pages/lookup_path_spec.rb
index 6f684eceaec..ef79ba28d5d 100644
--- a/spec/models/pages/lookup_path_spec.rb
+++ b/spec/models/pages/lookup_path_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Pages::LookupPath do
+RSpec.describe Pages::LookupPath, feature_category: :pages do
let(:project) { create(:project, :pages_private, pages_https_only: true) }
subject(:lookup_path) { described_class.new(project) }
@@ -126,14 +126,18 @@ RSpec.describe Pages::LookupPath do
describe '#prefix' do
it 'returns "/" for pages group root projects' do
- project = instance_double(Project, pages_group_root?: true)
+ project = instance_double(Project, pages_namespace_url: "namespace.test", pages_url: "namespace.test")
lookup_path = described_class.new(project, trim_prefix: 'mygroup')
expect(lookup_path.prefix).to eq('/')
end
it 'returns the project full path with the provided prefix removed' do
- project = instance_double(Project, pages_group_root?: false, full_path: 'mygroup/myproject')
+ project = instance_double(
+ Project,
+ pages_namespace_url: "namespace.test",
+ pages_url: "namespace.other",
+ full_path: 'mygroup/myproject')
lookup_path = described_class.new(project, trim_prefix: 'mygroup')
expect(lookup_path.prefix).to eq('/myproject/')
diff --git a/spec/models/pages_domain_spec.rb b/spec/models/pages_domain_spec.rb
index e5f2e849a0a..f054fde78e7 100644
--- a/spec/models/pages_domain_spec.rb
+++ b/spec/models/pages_domain_spec.rb
@@ -567,7 +567,7 @@ RSpec.describe PagesDomain do
it 'returns the virual domain when there are pages deployed for the project' do
expect(virtual_domain).to be_an_instance_of(Pages::VirtualDomain)
expect(virtual_domain.lookup_paths).not_to be_empty
- expect(virtual_domain.cache_key).to match(/pages_domain_for_project_#{project.id}_/)
+ expect(virtual_domain.cache_key).to match(/pages_domain_for_domain_#{pages_domain.id}_/)
end
context 'when :cache_pages_domain_api is disabled' do
diff --git a/spec/models/personal_access_token_spec.rb b/spec/models/personal_access_token_spec.rb
index 9d4c53f8d55..f65b5ff824b 100644
--- a/spec/models/personal_access_token_spec.rb
+++ b/spec/models/personal_access_token_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PersonalAccessToken do
+RSpec.describe PersonalAccessToken, feature_category: :authentication_and_authorization do
subject { described_class }
describe '.build' do
@@ -210,6 +210,12 @@ RSpec.describe PersonalAccessToken do
expect(personal_access_token).to be_valid
end
+ it "allows creating a token with `admin_mode` scope" do
+ personal_access_token.scopes = [:api, :admin_mode]
+
+ expect(personal_access_token).to be_valid
+ end
+
context 'when registry is disabled' do
before do
stub_container_registry_config(enabled: false)
@@ -340,4 +346,27 @@ RSpec.describe PersonalAccessToken do
end
end
end
+
+ # During the implementation of Admin Mode for API, tokens of
+ # administrators should automatically get the `admin_mode` scope as well
+ # See https://gitlab.com/gitlab-org/gitlab/-/issues/42692
+ describe '`admin_mode scope' do
+ subject { create(:personal_access_token, user: user, scopes: ['api']) }
+
+ context 'with administrator user' do
+ let_it_be(:user) { create(:user, :admin) }
+
+ it 'adds `admin_mode` scope before created' do
+ expect(subject.scopes).to contain_exactly('api', 'admin_mode')
+ end
+ end
+
+ context 'with normal user' do
+ let_it_be(:user) { create(:user) }
+
+ it 'does not add `admin_mode` scope before created' do
+ expect(subject.scopes).to contain_exactly('api')
+ end
+ end
+ end
end
diff --git a/spec/models/plan_limits_spec.rb b/spec/models/plan_limits_spec.rb
index d4e550657c8..3705cab7ef5 100644
--- a/spec/models/plan_limits_spec.rb
+++ b/spec/models/plan_limits_spec.rb
@@ -219,14 +219,21 @@ RSpec.describe PlanLimits do
ci_daily_pipeline_schedule_triggers
repository_size
security_policy_scan_execution_schedules
+ enforcement_limit
+ notification_limit
] + disabled_max_artifact_size_columns
end
+ let(:datetime_columns) do
+ %w[dashboard_limit_enabled_at]
+ end
+
it "has positive values for enabled limits" do
attributes = plan_limits.attributes
attributes = attributes.except(described_class.primary_key)
attributes = attributes.except(described_class.reflections.values.map(&:foreign_key))
attributes = attributes.except(*columns_with_zero)
+ attributes = attributes.except(*datetime_columns)
expect(attributes).to all(include(be_positive))
end
diff --git a/spec/models/project_import_state_spec.rb b/spec/models/project_import_state_spec.rb
index ba1a29a8b27..e5232026c39 100644
--- a/spec/models/project_import_state_spec.rb
+++ b/spec/models/project_import_state_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectImportState, type: :model do
+RSpec.describe ProjectImportState, type: :model, feature_category: :importers do
let_it_be(:correlation_id) { 'cid' }
let_it_be(:import_state, refind: true) { create(:import_state, correlation_id_value: correlation_id) }
@@ -17,22 +17,19 @@ RSpec.describe ProjectImportState, type: :model do
end
describe 'Project import job' do
- let_it_be(:import_state) { create(:import_state, import_url: generate(:url)) }
- let_it_be(:project) { import_state.project }
+ let_it_be(:project) { create(:project) }
- before do
- allow_any_instance_of(Gitlab::GitalyClient::RepositoryService).to receive(:import_repository)
- .with(project.import_url, http_authorization_header: '', mirror: false, resolved_address: '').and_return(true)
+ let(:import_state) { create(:import_state, import_url: generate(:url), project: project) }
+ let(:jid) { '551d3ceac5f67a116719ce41' }
+ before do
# Works around https://github.com/rspec/rspec-mocks/issues/910
allow(Project).to receive(:find).with(project.id).and_return(project)
- expect(project).to receive(:after_import).and_call_original
+ allow(project).to receive(:add_import_job).and_return(jid)
end
it 'imports a project', :sidekiq_might_not_need_inline do
- expect(RepositoryImportWorker).to receive(:perform_async).and_call_original
-
- expect { import_state.schedule }.to change { import_state.status }.from('none').to('finished')
+ expect { import_state.schedule }.to change { import_state.status }.from('none').to('scheduled')
end
it 'records job and correlation IDs', :sidekiq_might_not_need_inline do
@@ -40,7 +37,8 @@ RSpec.describe ProjectImportState, type: :model do
import_state.schedule
- expect(import_state.jid).to be_an_instance_of(String)
+ expect(project).to have_received(:add_import_job)
+ expect(import_state.jid).to eq(jid)
expect(import_state.correlation_id).to eq(correlation_id)
end
end
diff --git a/spec/models/project_spec.rb b/spec/models/project_spec.rb
index f33001b9c5b..4ed85844a53 100644
--- a/spec/models/project_spec.rb
+++ b/spec/models/project_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Project, factory_default: :keep do
+RSpec.describe Project, factory_default: :keep, feature_category: :projects do
include ProjectForksHelper
include ExternalAuthorizationServiceHelpers
include ReloadHelpers
@@ -28,8 +28,10 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:incident_management_issuable_escalation_statuses).through(:issues).inverse_of(:project).class_name('IncidentManagement::IssuableEscalationStatus') }
it { is_expected.to have_many(:milestones) }
it { is_expected.to have_many(:project_members).dependent(:delete_all) }
+ it { is_expected.to have_many(:namespace_members) }
it { is_expected.to have_many(:users).through(:project_members) }
it { is_expected.to have_many(:requesters).dependent(:delete_all) }
+ it { is_expected.to have_many(:namespace_requesters) }
it { is_expected.to have_many(:notes).dependent(:destroy) }
it { is_expected.to have_many(:snippets).class_name('ProjectSnippet') }
it { is_expected.to have_many(:deploy_keys_projects) }
@@ -47,6 +49,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_one(:webex_teams_integration) }
it { is_expected.to have_one(:packagist_integration) }
it { is_expected.to have_one(:pushover_integration) }
+ it { is_expected.to have_one(:apple_app_store_integration) }
it { is_expected.to have_one(:asana_integration) }
it { is_expected.to have_many(:boards) }
it { is_expected.to have_one(:campfire_integration) }
@@ -163,6 +166,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to have_many(:wiki_page_hooks_integrations).class_name('Integration') }
it { is_expected.to have_many(:deployment_hooks_integrations).class_name('Integration') }
it { is_expected.to have_many(:alert_hooks_integrations).class_name('Integration') }
+ it { is_expected.to have_many(:incident_hooks_integrations).class_name('Integration') }
# GitLab Pages
it { is_expected.to have_many(:pages_domains) }
@@ -346,6 +350,108 @@ RSpec.describe Project, factory_default: :keep do
end
end
+ shared_examples 'query without source filters' do
+ it do
+ expect(subject.where_values_hash.keys).not_to include('source_id', 'source_type')
+ end
+ end
+
+ describe '#namespace_members' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:requester) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+
+ before_all do
+ project.request_access(requester)
+ project.add_developer(developer)
+ end
+
+ it 'includes the correct users' do
+ expect(project.namespace_members).to include Member.find_by(user: developer)
+ expect(project.namespace_members).not_to include Member.find_by(user: requester)
+ end
+
+ it 'is equivalent to #project_members' do
+ expect(project.namespace_members).to match_array(project.project_members)
+ end
+
+ it_behaves_like 'query without source filters' do
+ subject { project.namespace_members }
+ end
+ end
+
+ describe '#namespace_requesters' do
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:requester) { create(:user) }
+ let_it_be(:developer) { create(:user) }
+
+ before_all do
+ project.request_access(requester)
+ project.add_developer(developer)
+ end
+
+ it 'includes the correct users' do
+ expect(project.namespace_requesters).to include Member.find_by(user: requester)
+ expect(project.namespace_requesters).not_to include Member.find_by(user: developer)
+ end
+
+ it 'is equivalent to #project_members' do
+ expect(project.namespace_requesters).to eq project.requesters
+ end
+
+ it_behaves_like 'query without source filters' do
+ subject { project.namespace_requesters }
+ end
+ end
+
+ shared_examples 'polymorphic membership relationship' do
+ it do
+ expect(membership.attributes).to include(
+ 'source_type' => 'Project',
+ 'source_id' => project.id
+ )
+ end
+ end
+
+ shared_examples 'member_namespace membership relationship' do
+ it do
+ expect(membership.attributes).to include(
+ 'member_namespace_id' => project.project_namespace_id
+ )
+ end
+ end
+
+ describe '#namespace_members setters' do
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:membership) { project.namespace_members.create!(user: user, access_level: Gitlab::Access::DEVELOPER) }
+
+ it { expect(membership).to be_instance_of(ProjectMember) }
+ it { expect(membership.user).to eq user }
+ it { expect(membership.project).to eq project }
+ it { expect(membership.requested_at).to be_nil }
+
+ it_behaves_like 'polymorphic membership relationship'
+ it_behaves_like 'member_namespace membership relationship'
+ end
+
+ describe '#namespace_requesters setters' do
+ let_it_be(:requested_at) { Time.current }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:membership) do
+ project.namespace_requesters.create!(user: user, requested_at: requested_at, access_level: Gitlab::Access::DEVELOPER)
+ end
+
+ it { expect(membership).to be_instance_of(ProjectMember) }
+ it { expect(membership.user).to eq user }
+ it { expect(membership.project).to eq project }
+ it { expect(membership.requested_at).to eq requested_at }
+
+ it_behaves_like 'polymorphic membership relationship'
+ it_behaves_like 'member_namespace membership relationship'
+ end
+
describe '#members & #requesters' do
let_it_be(:project) { create(:project, :public) }
let_it_be(:requester) { create(:user) }
@@ -2490,16 +2596,28 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#pages_url' do
+ describe '#pages_url', feature_category: :pages do
let(:group) { create(:group, name: group_name) }
- let(:project) { create(:project, namespace: group, name: project_name) }
+
+ let(:project_path) { project_name.downcase }
+ let(:project) do
+ create(
+ :project,
+ namespace: group,
+ name: project_name,
+ path: project_path)
+ end
+
let(:domain) { 'Example.com' }
+ let(:port) { nil }
subject { project.pages_url }
before do
allow(Settings.pages).to receive(:host).and_return(domain)
- allow(Gitlab.config.pages).to receive(:url).and_return('http://example.com')
+ allow(Gitlab.config.pages)
+ .to receive(:url)
+ .and_return(['http://example.com', port].compact.join(':'))
end
context 'group page' do
@@ -2509,9 +2627,7 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to eq("http://group.example.com") }
context 'mixed case path' do
- before do
- project.update!(path: 'Group.example.com')
- end
+ let(:project_path) { 'Group.example.com' }
it { is_expected.to eq("http://group.example.com") }
end
@@ -2524,22 +2640,88 @@ RSpec.describe Project, factory_default: :keep do
it { is_expected.to eq("http://group.example.com/project") }
context 'mixed case path' do
+ let(:project_path) { 'Project' }
+
+ it { is_expected.to eq("http://group.example.com/Project") }
+ end
+ end
+
+ context 'when there is an explicit port' do
+ let(:port) { 3000 }
+
+ context 'when not in dev mode' do
before do
- project.update!(path: 'Project')
+ stub_rails_env('production')
end
- it { is_expected.to eq("http://group.example.com/Project") }
+ context 'group page' do
+ let(:group_name) { 'Group' }
+ let(:project_name) { 'group.example.com' }
+
+ it { is_expected.to eq('http://group.example.com:3000/group.example.com') }
+
+ context 'mixed case path' do
+ let(:project_path) { 'Group.example.com' }
+
+ it { is_expected.to eq('http://group.example.com:3000/Group.example.com') }
+ end
+ end
+
+ context 'project page' do
+ let(:group_name) { 'Group' }
+ let(:project_name) { 'Project' }
+
+ it { is_expected.to eq("http://group.example.com:3000/project") }
+
+ context 'mixed case path' do
+ let(:project_path) { 'Project' }
+
+ it { is_expected.to eq("http://group.example.com:3000/Project") }
+ end
+ end
+ end
+
+ context 'when in dev mode' do
+ before do
+ stub_rails_env('development')
+ end
+
+ context 'group page' do
+ let(:group_name) { 'Group' }
+ let(:project_name) { 'group.example.com' }
+
+ it { is_expected.to eq('http://group.example.com:3000') }
+
+ context 'mixed case path' do
+ let(:project_path) { 'Group.example.com' }
+
+ it { is_expected.to eq('http://group.example.com:3000') }
+ end
+ end
+
+ context 'project page' do
+ let(:group_name) { 'Group' }
+ let(:project_name) { 'Project' }
+
+ it { is_expected.to eq("http://group.example.com:3000/project") }
+
+ context 'mixed case path' do
+ let(:project_path) { 'Project' }
+
+ it { is_expected.to eq("http://group.example.com:3000/Project") }
+ end
+ end
end
end
end
- describe '#pages_group_url' do
+ describe '#pages_namespace_url', feature_category: :pages do
let(:group) { create(:group, name: group_name) }
let(:project) { create(:project, namespace: group, name: project_name) }
let(:domain) { 'Example.com' }
let(:port) { 1234 }
- subject { project.pages_group_url }
+ subject { project.pages_namespace_url }
before do
allow(Settings.pages).to receive(:host).and_return(domain)
@@ -5808,22 +5990,6 @@ RSpec.describe Project, factory_default: :keep do
expect(recorder.count).to be_zero
end
-
- context 'with cache_project_integrations disabled' do
- before do
- stub_feature_flags(cache_project_integrations: false)
- end
-
- it 'triggers extra queries when called multiple times' do
- integration.project.execute_integrations({}, :push_hooks)
-
- recorder = ActiveRecord::QueryRecorder.new do
- integration.project.execute_integrations({}, :push_hooks)
- end
-
- expect(recorder.count).not_to be_zero
- end
- end
end
describe '#has_active_hooks?' do
@@ -6653,8 +6819,8 @@ RSpec.describe Project, factory_default: :keep do
where(:shared_runners_setting, :project_shared_runners_enabled, :valid_record) do
:shared_runners_enabled | true | true
:shared_runners_enabled | false | true
- :disabled_with_override | true | true
- :disabled_with_override | false | true
+ :disabled_and_overridable | true | true
+ :disabled_and_overridable | false | true
:disabled_and_unoverridable | true | false
:disabled_and_unoverridable | false | true
end
@@ -6902,21 +7068,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#pages_group_root?' do
- it 'returns returns true if pages_url is same as pages_group_url' do
- project = build(:project)
- expect(project).to receive(:pages_url).and_return(project.pages_group_url)
-
- expect(project.pages_group_root?).to eq(true)
- end
-
- it 'returns returns false if pages_url is different than pages_group_url' do
- project = build(:project)
-
- expect(project.pages_group_root?).to eq(false)
- end
- end
-
describe '#closest_setting' do
shared_examples_for 'fetching closest setting' do
let!(:namespace) { create(:namespace) }
@@ -7038,8 +7189,8 @@ RSpec.describe Project, factory_default: :keep do
create_list(:chat_name, 5, integration: integration)
end
- it 'removes chat names on removal' do
- expect { subject.destroy! }.to change { ChatName.count }.by(-5)
+ it 'does not remove chat names on removal' do
+ expect { subject.destroy! }.not_to change { ChatName.count }
end
end
@@ -7612,32 +7763,6 @@ RSpec.describe Project, factory_default: :keep do
end
end
- describe '#increment_statistic_value' do
- let(:project) { build_stubbed(:project) }
-
- subject(:increment) do
- project.increment_statistic_value(:build_artifacts_size, -10)
- end
-
- it 'increments the value' do
- expect(ProjectStatistics)
- .to receive(:increment_statistic)
- .with(project, :build_artifacts_size, -10)
-
- increment
- end
-
- context 'when the project is scheduled for removal' do
- let(:project) { build_stubbed(:project, pending_delete: true) }
-
- it 'does not increment the value' do
- expect(ProjectStatistics).not_to receive(:increment_statistic)
-
- increment
- end
- end
- end
-
describe 'topics' do
let_it_be(:project) { create(:project, name: 'topic-project', topic_list: 'topic1, topic2, topic3') }
diff --git a/spec/models/project_statistics_spec.rb b/spec/models/project_statistics_spec.rb
index a6e2bcf1525..ef53de6ad82 100644
--- a/spec/models/project_statistics_spec.rb
+++ b/spec/models/project_statistics_spec.rb
@@ -455,35 +455,50 @@ RSpec.describe ProjectStatistics do
end
describe '.increment_statistic' do
- shared_examples 'a statistic that increases storage_size' do
+ shared_examples 'a statistic that increases storage_size synchronously' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 13) }
+
it 'increases the statistic by that amount' do
- expect { described_class.increment_statistic(project, stat, 13) }
+ expect { described_class.increment_statistic(project, stat, increment) }
.to change { statistics.reload.send(stat) || 0 }
- .by(13)
+ .by(increment.amount)
end
it 'increases also storage size by that amount' do
- expect { described_class.increment_statistic(project, stat, 20) }
+ expect { described_class.increment_statistic(project, stat, increment) }
.to change { statistics.reload.storage_size }
- .by(20)
+ .by(increment.amount)
end
it 'schedules a namespace aggregation worker' do
expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
.with(statistics.project.namespace.id)
- described_class.increment_statistic(project, stat, 20)
+ described_class.increment_statistic(project, stat, increment)
+ end
+
+ context 'when the project is pending delete' do
+ before do
+ project.update_attribute(:pending_delete, true)
+ end
+
+ it 'does not change the statistics' do
+ expect { described_class.increment_statistic(project, stat, increment) }
+ .not_to change { statistics.reload.send(stat) }
+ end
end
end
shared_examples 'a statistic that increases storage_size asynchronously' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 13) }
+
it 'stores the increment temporarily in Redis', :clean_gitlab_redis_shared_state do
- described_class.increment_statistic(project, stat, 13)
+ described_class.increment_statistic(project, stat, increment)
Gitlab::Redis::SharedState.with do |redis|
key = statistics.counter(stat).key
- increment = redis.get(key)
- expect(increment.to_i).to eq(13)
+ value = redis.get(key)
+ expect(value.to_i).to eq(increment.amount)
end
end
@@ -493,9 +508,20 @@ RSpec.describe ProjectStatistics do
.with(Gitlab::Counters::BufferedCounter::WORKER_DELAY, described_class.name, statistics.id, stat)
.and_call_original
- expect { described_class.increment_statistic(project, stat, 20) }
- .to change { statistics.reload.send(stat) }.by(20)
- .and change { statistics.reload.send(:storage_size) }.by(20)
+ expect { described_class.increment_statistic(project, stat, increment) }
+ .to change { statistics.reload.send(stat) }.by(increment.amount)
+ .and change { statistics.reload.send(:storage_size) }.by(increment.amount)
+ end
+
+ context 'when the project is pending delete' do
+ before do
+ project.update_attribute(:pending_delete, true)
+ end
+
+ it 'does not change the statistics' do
+ expect { described_class.increment_statistic(project, stat, increment) }
+ .not_to change { [statistics.reload.send(stat), statistics.reload.send(:storage_size)] }
+ end
end
end
@@ -508,7 +534,7 @@ RSpec.describe ProjectStatistics do
context 'when adjusting :pipeline_artifacts_size' do
let(:stat) { :pipeline_artifacts_size }
- it_behaves_like 'a statistic that increases storage_size'
+ it_behaves_like 'a statistic that increases storage_size synchronously'
end
context 'when adjusting :packages_size' do
@@ -518,9 +544,11 @@ RSpec.describe ProjectStatistics do
end
context 'when the amount is 0' do
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 0) }
+
it 'does not execute a query' do
project
- expect { described_class.increment_statistic(project, :build_artifacts_size, 0) }
+ expect { described_class.increment_statistic(project, :build_artifacts_size, increment) }
.not_to exceed_query_limit(0)
end
end
@@ -532,4 +560,116 @@ RSpec.describe ProjectStatistics do
end
end
end
+
+ describe '.bulk_increment_statistic' do
+ let(:increments) { [10, 3].map { |amount| Gitlab::Counters::Increment.new(amount: amount) } }
+ let(:total_amount) { increments.sum(&:amount) }
+
+ shared_examples 'a statistic that increases storage_size synchronously' do
+ it 'increases the statistic by that amount' do
+ expect { described_class.bulk_increment_statistic(project, stat, increments) }
+ .to change { statistics.reload.send(stat) || 0 }
+ .by(total_amount)
+ end
+
+ it 'increases also storage size by that amount' do
+ expect { described_class.bulk_increment_statistic(project, stat, increments) }
+ .to change { statistics.reload.storage_size }
+ .by(total_amount)
+ end
+
+ it 'schedules a namespace aggregation worker' do
+ expect(Namespaces::ScheduleAggregationWorker).to receive(:perform_async)
+ .with(statistics.project.namespace.id)
+
+ described_class.bulk_increment_statistic(project, stat, increments)
+ end
+
+ context 'when the project is pending delete' do
+ before do
+ project.update_attribute(:pending_delete, true)
+ end
+
+ it 'does not change the statistics' do
+ expect { described_class.bulk_increment_statistic(project, stat, increments) }
+ .not_to change { statistics.reload.send(stat) }
+ end
+ end
+ end
+
+ shared_examples 'a statistic that increases storage_size asynchronously' do
+ it 'stores the increment temporarily in Redis', :clean_gitlab_redis_shared_state do
+ described_class.bulk_increment_statistic(project, stat, increments)
+
+ Gitlab::Redis::SharedState.with do |redis|
+ key = statistics.counter(stat).key
+ increment = redis.get(key)
+ expect(increment.to_i).to eq(total_amount)
+ end
+ end
+
+ it 'schedules a worker to update the statistic and storage_size async', :sidekiq_inline do
+ expect(FlushCounterIncrementsWorker)
+ .to receive(:perform_in)
+ .with(Gitlab::Counters::BufferedCounter::WORKER_DELAY, described_class.name, statistics.id, stat)
+ .and_call_original
+
+ expect { described_class.bulk_increment_statistic(project, stat, increments) }
+ .to change { statistics.reload.send(stat) }.by(total_amount)
+ .and change { statistics.reload.send(:storage_size) }.by(total_amount)
+ end
+
+ context 'when the project is pending delete' do
+ before do
+ project.update_attribute(:pending_delete, true)
+ end
+
+ it 'does not change the statistics' do
+ expect { described_class.bulk_increment_statistic(project, stat, increments) }
+ .not_to change { [statistics.reload.send(stat), statistics.reload.send(:storage_size)] }
+ end
+ end
+ end
+
+ context 'when adjusting :build_artifacts_size' do
+ let(:stat) { :build_artifacts_size }
+
+ it_behaves_like 'a statistic that increases storage_size asynchronously'
+
+ context 'when :project_statistics_bulk_increment flag is disabled' do
+ before do
+ stub_feature_flags(project_statistics_bulk_increment: false)
+ end
+
+ it 'calls increment_statistic on once with the sum of the increments' do
+ total_amount = increments.sum(&:amount)
+ expect(statistics)
+ .to receive(:increment_statistic).with(stat, have_attributes(amount: total_amount)).and_call_original
+
+ described_class.bulk_increment_statistic(project, stat, increments)
+ end
+
+ it_behaves_like 'a statistic that increases storage_size asynchronously'
+ end
+ end
+
+ context 'when adjusting :pipeline_artifacts_size' do
+ let(:stat) { :pipeline_artifacts_size }
+
+ it_behaves_like 'a statistic that increases storage_size synchronously'
+ end
+
+ context 'when adjusting :packages_size' do
+ let(:stat) { :packages_size }
+
+ it_behaves_like 'a statistic that increases storage_size asynchronously'
+ end
+
+ context 'when using an invalid column' do
+ it 'raises an error' do
+ expect { described_class.bulk_increment_statistic(project, :id, increments) }
+ .to raise_error(ArgumentError, "Cannot increment attribute: id")
+ end
+ end
+ end
end
diff --git a/spec/models/projects/branch_rule_spec.rb b/spec/models/projects/branch_rule_spec.rb
new file mode 100644
index 00000000000..6910fbbb6db
--- /dev/null
+++ b/spec/models/projects/branch_rule_spec.rb
@@ -0,0 +1,32 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::BranchRule, feature_category: :source_code_management do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:protected_branch) { create(:protected_branch, project: project, name: 'feature*') }
+
+ subject { described_class.new(protected_branch.project, protected_branch) }
+
+ it 'delegates methods to protected branch' do
+ expect(subject).to delegate_method(:name).to(:protected_branch)
+ expect(subject).to delegate_method(:group).to(:protected_branch)
+ expect(subject).to delegate_method(:default_branch?).to(:protected_branch)
+ expect(subject).to delegate_method(:created_at).to(:protected_branch)
+ expect(subject).to delegate_method(:updated_at).to(:protected_branch)
+ end
+
+ it 'is protected' do
+ expect(subject.protected?).to eq(true)
+ end
+
+ it 'branch protection returns protected branch' do
+ expect(subject.branch_protection).to eq(protected_branch)
+ end
+
+ describe '#matching_branches_count' do
+ it 'returns the number of branches that are matching the protected branch name' do
+ expect(subject.matching_branches_count).to eq(2)
+ end
+ end
+end
diff --git a/spec/models/projects/build_artifacts_size_refresh_spec.rb b/spec/models/projects/build_artifacts_size_refresh_spec.rb
index caff06262d9..7255c8ac89b 100644
--- a/spec/models/projects/build_artifacts_size_refresh_spec.rb
+++ b/spec/models/projects/build_artifacts_size_refresh_spec.rb
@@ -14,10 +14,11 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
end
describe 'scopes' do
- let_it_be(:refresh_1) { create(:project_build_artifacts_size_refresh, :running, updated_at: (described_class::STALE_WINDOW + 1.second).ago) }
+ let_it_be(:refresh_1) { create(:project_build_artifacts_size_refresh, :stale) }
let_it_be(:refresh_2) { create(:project_build_artifacts_size_refresh, :running, updated_at: 1.hour.ago) }
let_it_be(:refresh_3) { create(:project_build_artifacts_size_refresh, :pending) }
let_it_be(:refresh_4) { create(:project_build_artifacts_size_refresh, :created) }
+ let_it_be(:refresh_5) { create(:project_build_artifacts_size_refresh, :finalizing) }
describe 'stale' do
it 'returns records in running state and has not been updated for more than 2 hours' do
@@ -26,15 +27,23 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
end
describe 'remaining' do
- it 'returns stale, created, and pending records' do
+ it 'returns stale, created and pending records' do
expect(described_class.remaining).to match_array([refresh_1, refresh_3, refresh_4])
end
+
+ it 'does not include finalizing' do
+ expect(described_class.processing_queue).not_to include(refresh_5)
+ end
end
describe 'processing_queue' do
it 'prioritizes pending -> stale -> created' do
expect(described_class.processing_queue).to eq([refresh_3, refresh_1, refresh_4])
end
+
+ it 'does not include finalizing' do
+ expect(described_class.processing_queue).not_to include(refresh_5)
+ end
end
end
@@ -58,10 +67,7 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
let_it_be_with_reload(:refresh) do
create(
:project_build_artifacts_size_refresh,
- :created,
- updated_at: 2.days.ago,
- refresh_started_at: nil,
- last_job_artifact_id: nil
+ :created
)
end
@@ -70,8 +76,8 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
let(:statistics) { refresh.project.statistics }
before do
- stats = create(:project_statistics, project: refresh.project, build_artifacts_size: 120)
- stats.increment_counter(:build_artifacts_size, 30)
+ statistics.update!(build_artifacts_size: 120)
+ statistics.increment_counter(:build_artifacts_size, Gitlab::Counters::Increment.new(amount: 30))
end
it 'transitions the state to running' do
@@ -91,11 +97,11 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
end
it 'resets the build artifacts size stats' do
- expect { refresh.process! }.to change { statistics.build_artifacts_size }.to(0)
+ expect { refresh.process! }.to change { statistics.reload.build_artifacts_size }.from(120).to(0)
end
- it 'resets the counter attribute to zero' do
- expect { refresh.process! }.to change { statistics.counter(:build_artifacts_size).get }.to(0)
+ it 'resets the buffered counter value to zero' do
+ expect { refresh.process! }.to change { Gitlab::Counters::BufferedCounter.new(statistics, :build_artifacts_size).get }.to(0)
end
end
@@ -170,6 +176,22 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
expect { refresh.requeue!(last_job_artifact_id) }.to change { refresh.reload.last_job_artifact_id.to_i }.to(last_job_artifact_id)
end
end
+
+ describe '#schedule_finalize!' do
+ let!(:refresh) { create(:project_build_artifacts_size_refresh, :running) }
+
+ it 'transitions refresh state from running to finalizing' do
+ expect { refresh.schedule_finalize! }.to change { refresh.reload.state }.to(described_class::STATES[:finalizing])
+ end
+
+ it 'schedules Projects::FinalizeProjectStatisticsRefreshWorker' do
+ expect(Projects::FinalizeProjectStatisticsRefreshWorker)
+ .to receive(:perform_in)
+ .with(described_class::FINALIZE_DELAY, refresh.class.to_s, refresh.id)
+
+ refresh.schedule_finalize!
+ end
+ end
end
describe '.process_next_refresh!' do
@@ -210,6 +232,26 @@ RSpec.describe Projects::BuildArtifactsSizeRefresh, type: :model do
end
end
+ describe '#finalize!' do
+ let!(:refresh) { create(:project_build_artifacts_size_refresh, :finalizing) }
+
+ let(:statistics) { refresh.project.statistics }
+
+ before do
+ allow(statistics).to receive(:finalize_refresh)
+ end
+
+ it 'stores the refresh amount into the buffered counter' do
+ expect(statistics).to receive(:finalize_refresh).with(described_class::COUNTER_ATTRIBUTE_NAME)
+
+ refresh.finalize!
+ end
+
+ it 'destroys the refresh record' do
+ expect { refresh.finalize! }.to change { described_class.count }.by(-1)
+ end
+ end
+
describe '#next_batch' do
let!(:project) { create(:project) }
let!(:artifact_1) { create(:ci_job_artifact, project: project, created_at: 14.days.ago) }
diff --git a/spec/models/release_spec.rb b/spec/models/release_spec.rb
index 180a76ff593..5ed4eb7d233 100644
--- a/spec/models/release_spec.rb
+++ b/spec/models/release_spec.rb
@@ -86,6 +86,10 @@ RSpec.describe Release do
context 'when updating existing release without author' do
let(:release) { create(:release, :legacy) }
+ before do
+ stub_feature_flags(validate_release_with_author: false)
+ end
+
it 'updates successfully' do
release.description += 'Update'
diff --git a/spec/models/repository_spec.rb b/spec/models/repository_spec.rb
index 969a279dd52..a3d2f9a09fb 100644
--- a/spec/models/repository_spec.rb
+++ b/spec/models/repository_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Repository do
+RSpec.describe Repository, feature_category: :source_code_management do
include RepoHelpers
before do
@@ -534,21 +534,48 @@ RSpec.describe Repository do
end
describe '#find_commits_by_message' do
- it 'returns commits with messages containing a given string' do
- commit_ids = repository.find_commits_by_message('submodule').map(&:id)
+ subject(:find_commits_by_message) { repository.find_commits_by_message(query) }
- expect(commit_ids).to include(
- '5937ac0a7beb003549fc5fd26fc247adbce4a52e',
- '6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9',
- 'cfe32cf61b73a0d5e9f13e774abde7ff789b1660'
- )
+ let(:commit_ids) { find_commits_by_message.map(&:id) }
+ let(:query) { 'submodule' }
+ let(:expected_commit_ids) do
+ %w[
+ 5937ac0a7beb003549fc5fd26fc247adbce4a52e
+ 6f6d7e7ed97bb5f0054f2b1df789b39ca89b6ff9
+ cfe32cf61b73a0d5e9f13e774abde7ff789b1660
+ ]
+ end
+
+ it 'returns commits with messages containing a given string' do
+ expect(commit_ids).to include(*expected_commit_ids)
expect(commit_ids).not_to include('913c66a37b4a45b9769037c55c2d238bd0942d2e')
end
- it 'is case insensitive' do
- commit_ids = repository.find_commits_by_message('SUBMODULE').map(&:id)
+ context 'when query is in UPCASE' do
+ let(:query) { 'SUBMODULE' }
+
+ it 'is case insensitive' do
+ expect(commit_ids).to include(*expected_commit_ids)
+ end
+ end
+
+ context 'when message has surrounding spaces' do
+ let(:query) { ' submodule ' }
+
+ it 'removes spaces and returns commits by message' do
+ expect(commit_ids).to include(*expected_commit_ids)
+ expect(commit_ids).not_to include('913c66a37b4a45b9769037c55c2d238bd0942d2e')
+ end
+
+ context 'when feature flag "commit_search_trailing_spaces" is disabled' do
+ before do
+ stub_feature_flags(commit_search_trailing_spaces: false)
+ end
- expect(commit_ids).to include('5937ac0a7beb003549fc5fd26fc247adbce4a52e')
+ it 'returns an empty list' do
+ expect(commit_ids).to be_empty
+ end
+ end
end
describe 'when storage is broken', :broken_storage do
@@ -2691,12 +2718,26 @@ RSpec.describe Repository do
end
it 'caches the response' do
- expect(repository.head_tree).to receive(:readme_path).and_call_original.once
+ expect(repository).to receive(:search_files_by_regexp).and_call_original.once
2.times do
expect(repository.readme_path).to eq("README.md")
end
end
+
+ context 'when "readme_from_gitaly" FF is disabled' do
+ before do
+ stub_feature_flags(readme_from_gitaly: false)
+ end
+
+ it 'caches the response' do
+ expect(repository.head_tree).to receive(:readme_path).and_call_original.once
+
+ 2.times do
+ expect(repository.readme_path).to eq("README.md")
+ end
+ end
+ end
end
end
end
diff --git a/spec/models/resource_event_spec.rb b/spec/models/resource_event_spec.rb
new file mode 100644
index 00000000000..f40c192ab2b
--- /dev/null
+++ b/spec/models/resource_event_spec.rb
@@ -0,0 +1,19 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResourceEvent, feature_category: :team_planing, type: :model do
+ let(:dummy_resource_label_event_class) do
+ Class.new(ResourceEvent) do
+ self.table_name = 'resource_label_events'
+ end
+ end
+
+ it 'raises error on not implemented `issuable` method' do
+ expect { dummy_resource_label_event_class.new.issuable }.to raise_error(NoMethodError)
+ end
+
+ it 'raises error on not implemented `synthetic_note_class` method' do
+ expect { dummy_resource_label_event_class.new.synthetic_note_class }.to raise_error(NoMethodError)
+ end
+end
diff --git a/spec/models/resource_label_event_spec.rb b/spec/models/resource_label_event_spec.rb
index 5087a8e8524..87f3b9fb2bb 100644
--- a/spec/models/resource_label_event_spec.rb
+++ b/spec/models/resource_label_event_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ResourceLabelEvent, type: :model do
+RSpec.describe ResourceLabelEvent, feature_category: :team_planing, type: :model do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:issue) { create(:issue, project: project) }
let_it_be(:merge_request) { create(:merge_request, source_project: project) }
@@ -15,6 +15,7 @@ RSpec.describe ResourceLabelEvent, type: :model do
it_behaves_like 'a resource event'
it_behaves_like 'a resource event for issues'
it_behaves_like 'a resource event for merge requests'
+ it_behaves_like 'a note for work item resource event'
describe 'associations' do
it { is_expected.to belong_to(:label) }
@@ -154,4 +155,19 @@ RSpec.describe ResourceLabelEvent, type: :model do
expect(event_1.discussion_id).not_to eq(event_2.discussion_id)
end
end
+
+ context 'with multiple label events' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project, author: user) }
+ let_it_be(:events) { create_pair(:resource_label_event, issue: work_item) }
+
+ it 'builds synthetic note' do
+ first_event = events.first
+ synthetic_note = first_event.work_item_synthetic_system_note(events: events)
+
+ expect(synthetic_note.class.name).to eq(first_event.synthetic_note_class.name)
+ expect(synthetic_note.events).to match_array(events)
+ end
+ end
end
diff --git a/spec/models/resource_milestone_event_spec.rb b/spec/models/resource_milestone_event_spec.rb
index c1761e5b2e8..11b704ceadf 100644
--- a/spec/models/resource_milestone_event_spec.rb
+++ b/spec/models/resource_milestone_event_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
-RSpec.describe ResourceMilestoneEvent, type: :model do
+RSpec.describe ResourceMilestoneEvent, feature_category: :team_planing, type: :model do
it_behaves_like 'a resource event'
it_behaves_like 'a resource event for issues'
it_behaves_like 'a resource event for merge requests'
+ it_behaves_like 'a note for work item resource event'
it_behaves_like 'having unique enum values'
it_behaves_like 'timebox resource event validations'
diff --git a/spec/models/resource_state_event_spec.rb b/spec/models/resource_state_event_spec.rb
index f84634bd220..04e4359a3ff 100644
--- a/spec/models/resource_state_event_spec.rb
+++ b/spec/models/resource_state_event_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ResourceStateEvent, type: :model do
+RSpec.describe ResourceStateEvent, feature_category: :team_planing, type: :model do
subject { build(:resource_state_event, issue: issue) }
let(:issue) { create(:issue) }
@@ -11,6 +11,7 @@ RSpec.describe ResourceStateEvent, type: :model do
it_behaves_like 'a resource event'
it_behaves_like 'a resource event for issues'
it_behaves_like 'a resource event for merge requests'
+ it_behaves_like 'a note for work item resource event'
describe 'validations' do
describe 'Issuable validation' do
diff --git a/spec/models/timelog_spec.rb b/spec/models/timelog_spec.rb
index f96d02e6a82..515057a862b 100644
--- a/spec/models/timelog_spec.rb
+++ b/spec/models/timelog_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Timelog do
+RSpec.describe Timelog, feature_category: :team_planning do
subject { create(:timelog) }
let_it_be(:issue) { create(:issue) }
@@ -149,4 +149,30 @@ RSpec.describe Timelog do
end
end
end
+
+ describe 'sorting' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:timelog_a) { create(:issue_timelog, time_spent: 7200, spent_at: 1.hour.ago, user: user) }
+ let_it_be(:timelog_b) { create(:issue_timelog, time_spent: 5400, spent_at: 2.hours.ago, user: user) }
+ let_it_be(:timelog_c) { create(:issue_timelog, time_spent: 1800, spent_at: 30.minutes.ago, user: user) }
+ let_it_be(:timelog_d) { create(:issue_timelog, time_spent: 3600, spent_at: 1.day.ago, user: user) }
+
+ describe '.sort_by_field' do
+ it 'sorts timelogs by time spent in ascending order' do
+ expect(user.timelogs.sort_by_field('time_spent', :asc)).to eq([timelog_c, timelog_d, timelog_b, timelog_a])
+ end
+
+ it 'sorts timelogs by time spent in descending order' do
+ expect(user.timelogs.sort_by_field('time_spent', :desc)).to eq([timelog_a, timelog_b, timelog_d, timelog_c])
+ end
+
+ it 'sorts timelogs by spent at in ascending order' do
+ expect(user.timelogs.sort_by_field('spent_at', :asc)).to eq([timelog_d, timelog_b, timelog_a, timelog_c])
+ end
+
+ it 'sorts timelogs by spent at in descending order' do
+ expect(user.timelogs.sort_by_field('spent_at', :desc)).to eq([timelog_c, timelog_a, timelog_b, timelog_d])
+ end
+ end
+ end
end
diff --git a/spec/models/todo_spec.rb b/spec/models/todo_spec.rb
index 221f09dd87f..8669db4af16 100644
--- a/spec/models/todo_spec.rb
+++ b/spec/models/todo_spec.rb
@@ -175,6 +175,15 @@ RSpec.describe Todo do
end
describe '#target_reference' do
+ shared_examples 'returns full_path' do
+ specify do
+ subject.target = target
+ subject.action = Todo::MEMBER_ACCESS_REQUESTED
+
+ expect(subject.target_reference).to eq target.full_path
+ end
+ end
+
it 'returns commit full reference with short id' do
project = create(:project, :repository)
commit = project.commit
@@ -193,13 +202,10 @@ RSpec.describe Todo do
end
context 'when target is member access requested' do
- it 'returns group full path' do
- group = create(:group)
-
- subject.target = group
- subject.action = Todo::MEMBER_ACCESS_REQUESTED
-
- expect(subject.target_reference).to eq group.full_path
+ %i[project group].each do |target_type|
+ it_behaves_like 'returns full_path' do
+ let(:target) { create(target_type, :public) }
+ end
end
end
end
@@ -525,4 +531,46 @@ RSpec.describe Todo do
expect(described_class.for_internal_notes).to contain_exactly(todo)
end
end
+
+ describe '#access_request_url' do
+ shared_examples 'returns member access requests tab url/path' do
+ it 'returns group access requests tab url/path if target is group' do
+ group = create(:group)
+ subject.target = group
+
+ expect(subject.access_request_url(only_path: only_path)).to eq(Gitlab::Routing.url_helpers.group_group_members_url(group, tab: 'access_requests', only_path: only_path))
+ end
+
+ it 'returns project access requests tab url/path if target is project' do
+ project = create(:project)
+ subject.target = project
+
+ expect(subject.access_request_url(only_path: only_path)).to eq(Gitlab::Routing.url_helpers.project_project_members_url(project, tab: 'access_requests', only_path: only_path))
+ end
+
+ it 'returns empty string if target is neither group nor project' do
+ subject.target = issue
+
+ expect(subject.access_request_url(only_path: only_path)).to eq("")
+ end
+ end
+
+ context 'when only_path param is false' do
+ it_behaves_like 'returns member access requests tab url/path' do
+ let_it_be(:only_path) { false }
+ end
+ end
+
+ context 'when only_path param is nil' do
+ it_behaves_like 'returns member access requests tab url/path' do
+ let_it_be(:only_path) { nil }
+ end
+ end
+
+ context 'when only_path param is true' do
+ it_behaves_like 'returns member access requests tab url/path' do
+ let_it_be(:only_path) { true }
+ end
+ end
+ end
end
diff --git a/spec/models/user_detail_spec.rb b/spec/models/user_detail_spec.rb
index ed55aca49b7..1893b6530a5 100644
--- a/spec/models/user_detail_spec.rb
+++ b/spec/models/user_detail_spec.rb
@@ -68,26 +68,34 @@ RSpec.describe UserDetail do
end
end
- describe '.user_fields_changed?' do
- let(:user) { create(:user) }
-
- context 'when user detail fields unchanged' do
- it 'returns false' do
- expect(described_class.user_fields_changed?(user)).to be false
- end
-
- %i[linkedin location organization skype twitter website_url].each do |attr|
- context "when #{attr} is changed" do
- before do
- user[attr] = 'new value'
- end
-
- it 'returns true' do
- expect(described_class.user_fields_changed?(user)).to be true
- end
- end
+ describe '#save' do
+ let(:user_detail) do
+ create(:user_detail,
+ bio: 'bio',
+ linkedin: 'linkedin',
+ twitter: 'twitter',
+ skype: 'skype',
+ location: 'location',
+ organization: 'organization',
+ website_url: 'https://example.com')
+ end
+
+ shared_examples 'prevents `nil` value' do |attr|
+ it 'converts `nil` to the empty string' do
+ user_detail[attr] = nil
+ expect { user_detail.save! }
+ .to change { user_detail[attr] }.to('')
+ .and not_change { user_detail.attributes.except(attr.to_s) }
end
end
+
+ it_behaves_like 'prevents `nil` value', :bio
+ it_behaves_like 'prevents `nil` value', :linkedin
+ it_behaves_like 'prevents `nil` value', :twitter
+ it_behaves_like 'prevents `nil` value', :skype
+ it_behaves_like 'prevents `nil` value', :location
+ it_behaves_like 'prevents `nil` value', :organization
+ it_behaves_like 'prevents `nil` value', :website_url
end
describe '#sanitize_attrs' do
@@ -137,45 +145,4 @@ RSpec.describe UserDetail do
details.save!
end
end
-
- describe '#assign_changed_fields_from_user' do
- let(:user_detail) { build(:user_detail) }
-
- shared_examples 'syncs field with `user_details`' do |field|
- it 'does not sync the field to `user_details` if unchanged' do
- expect { user_detail.assign_changed_fields_from_user }
- .to not_change { user_detail.public_send(field) }
- end
-
- it 'syncs the field to `user_details` if changed' do
- user_detail.user[field] = "new_value"
- expect { user_detail.assign_changed_fields_from_user }
- .to change { user_detail.public_send(field) }
- .to("new_value")
- end
-
- it 'truncates the field if too long' do
- user_detail.user[field] = 'a' * (UserDetail::DEFAULT_FIELD_LENGTH + 1)
- expect { user_detail.assign_changed_fields_from_user }
- .to change { user_detail.public_send(field) }
- .to('a' * UserDetail::DEFAULT_FIELD_LENGTH)
- end
-
- it 'properly syncs nil field to `user_details' do
- user_detail.user[field] = 'Test'
- user_detail.user.save!(validate: false)
- user_detail.user[field] = nil
- expect { user_detail.assign_changed_fields_from_user }
- .to change { user_detail.public_send(field) }
- .to('')
- end
- end
-
- it_behaves_like 'syncs field with `user_details`', :linkedin
- it_behaves_like 'syncs field with `user_details`', :location
- it_behaves_like 'syncs field with `user_details`', :organization
- it_behaves_like 'syncs field with `user_details`', :skype
- it_behaves_like 'syncs field with `user_details`', :twitter
- it_behaves_like 'syncs field with `user_details`', :website_url
- end
end
diff --git a/spec/models/user_highest_role_spec.rb b/spec/models/user_highest_role_spec.rb
index 3ae672cf7f7..7ef04466b6f 100644
--- a/spec/models/user_highest_role_spec.rb
+++ b/spec/models/user_highest_role_spec.rb
@@ -8,7 +8,7 @@ RSpec.describe UserHighestRole do
end
describe 'validations' do
- it { is_expected.to validate_inclusion_of(:highest_access_level).in_array([nil, *Gitlab::Access.all_values]) }
+ it { is_expected.to validate_inclusion_of(:highest_access_level).in_array(Gitlab::Access.all_values).allow_nil }
end
describe 'scopes' do
diff --git a/spec/models/user_spec.rb b/spec/models/user_spec.rb
index 4dbcc68af19..e2e4e4248d8 100644
--- a/spec/models/user_spec.rb
+++ b/spec/models/user_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe User do
+RSpec.describe User, feature_category: :users do
include ProjectForksHelper
include TermsHelper
include ExclusiveLeaseHelpers
@@ -101,6 +101,24 @@ RSpec.describe User do
it { is_expected.to delegate_method(:requires_credit_card_verification).to(:user_detail).allow_nil }
it { is_expected.to delegate_method(:requires_credit_card_verification=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:linkedin).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:linkedin=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:twitter).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:twitter=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:skype).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:skype=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:website_url).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:website_url=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:location).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:location=).to(:user_detail).with_arguments(:args).allow_nil }
+
+ it { is_expected.to delegate_method(:organization).to(:user_detail).allow_nil }
+ it { is_expected.to delegate_method(:organization=).to(:user_detail).with_arguments(:args).allow_nil }
end
describe 'associations' do
@@ -148,6 +166,11 @@ RSpec.describe User do
it { is_expected.to have_many(:group_callouts).class_name('Users::GroupCallout') }
it { is_expected.to have_many(:project_callouts).class_name('Users::ProjectCallout') }
it { is_expected.to have_many(:created_projects).dependent(:nullify).class_name('Project') }
+ it { is_expected.to have_many(:user_achievements).class_name('Achievements::UserAchievement').inverse_of(:user) }
+ it { is_expected.to have_many(:awarded_user_achievements).class_name('Achievements::UserAchievement').with_foreign_key('awarded_by_user_id').inverse_of(:awarded_by_user) }
+ it { is_expected.to have_many(:revoked_user_achievements).class_name('Achievements::UserAchievement').with_foreign_key('revoked_by_user_id').inverse_of(:revoked_by_user) }
+ it { is_expected.to have_many(:achievements).through(:user_achievements).class_name('Achievements::Achievement').inverse_of(:users) }
+ it { is_expected.to have_many(:namespace_commit_emails).class_name('Users::NamespaceCommitEmail') }
describe 'default values' do
let(:user) { described_class.new }
@@ -160,7 +183,7 @@ RSpec.describe User do
it { expect(user.hide_no_password).to be_falsey }
it { expect(user.project_view).to eq('files') }
it { expect(user.notified_of_own_activity).to be_falsey }
- it { expect(user.preferred_language).to eq(I18n.default_locale.to_s) }
+ it { expect(user.preferred_language).to eq(Gitlab::CurrentSettings.default_preferred_language) }
it { expect(user.theme_id).to eq(described_class.gitlab_config.default_theme) }
end
@@ -169,17 +192,51 @@ RSpec.describe User do
expect(create(:user).user_detail).not_to be_persisted
end
- it 'creates `user_detail` when `bio` is given' do
- user = create(:user, bio: 'my bio')
+ shared_examples 'delegated field' do |field|
+ it 'creates `user_detail` when the field is given' do
+ user = create(:user, field => 'my field')
+
+ expect(user.user_detail).to be_persisted
+ expect(user.user_detail[field]).to eq('my field')
+ end
+
+ it 'delegates to `user_detail`' do
+ user = create(:user, field => 'my field')
+
+ expect(user.public_send(field)).to eq(user.user_detail[field])
+ end
+
+ it 'creates `user_detail` when first updated' do
+ user = create(:user)
+
+ expect { user.update!(field => 'my field') }.to change { user.user_detail.persisted? }.from(false).to(true)
+ end
+ end
+
+ it_behaves_like 'delegated field', :bio
+ it_behaves_like 'delegated field', :linkedin
+ it_behaves_like 'delegated field', :twitter
+ it_behaves_like 'delegated field', :skype
+ it_behaves_like 'delegated field', :location
+ it_behaves_like 'delegated field', :organization
+
+ it 'creates `user_detail` when `website_url` is given' do
+ user = create(:user, website_url: 'https://example.com')
expect(user.user_detail).to be_persisted
- expect(user.user_detail.bio).to eq('my bio')
+ expect(user.user_detail.website_url).to eq('https://example.com')
+ end
+
+ it 'delegates `website_url` to `user_detail`' do
+ user = create(:user, website_url: 'http://example.com')
+
+ expect(user.website_url).to eq(user.user_detail.website_url)
end
- it 'delegates `bio` to `user_detail`' do
- user = create(:user, bio: 'my bio')
+ it 'creates `user_detail` when `website_url` is first updated' do
+ user = create(:user)
- expect(user.bio).to eq(user.user_detail.bio)
+ expect { user.update!(website_url: 'https://example.com') }.to change { user.user_detail.persisted? }.from(false).to(true)
end
it 'delegates `pronouns` to `user_detail`' do
@@ -193,30 +250,24 @@ RSpec.describe User do
expect(user.pronunciation).to eq(user.user_detail.pronunciation)
end
-
- it 'creates `user_detail` when `bio` is first updated' do
- user = create(:user)
-
- expect { user.update!(bio: 'my bio') }.to change { user.user_detail.persisted? }.from(false).to(true)
- end
end
- describe '#abuse_report' do
+ describe '#abuse_reports' do
let(:current_user) { create(:user) }
let(:other_user) { create(:user) }
- it { is_expected.to have_one(:abuse_report) }
+ it { is_expected.to have_many(:abuse_reports) }
it 'refers to the abuse report whose user_id is the current user' do
abuse_report = create(:abuse_report, reporter: other_user, user: current_user)
- expect(current_user.abuse_report).to eq(abuse_report)
+ expect(current_user.abuse_reports.last).to eq(abuse_report)
end
it 'does not refer to the abuse report whose reporter_id is the current user' do
create(:abuse_report, reporter: current_user, user: other_user)
- expect(current_user.abuse_report).to be_nil
+ expect(current_user.abuse_reports.last).to be_nil
end
it 'does not update the user_id of an abuse report when the user is updated' do
@@ -436,18 +487,25 @@ RSpec.describe User do
end
describe 'preferred_language' do
- context 'when its value is nil in the database' do
- let(:user) { build(:user, preferred_language: nil) }
+ subject(:preferred_language) { user.preferred_language }
- it 'falls back to I18n.default_locale when empty in the database' do
- expect(user.preferred_language).to eq I18n.default_locale.to_s
- end
+ context 'when preferred_language is set' do
+ let(:user) { build(:user, preferred_language: 'de_DE') }
+
+ it { is_expected.to eq 'de_DE' }
+ end
+
+ context 'when preferred_language is nil' do
+ let(:user) { build(:user) }
- it 'falls back to english when I18n.default_locale is not an available language' do
- allow(I18n).to receive(:default_locale) { :kl }
- default_preferred_language = user.send(:default_preferred_language)
+ it { is_expected.to eq 'en' }
- expect(user.preferred_language).to eq default_preferred_language
+ context 'when Gitlab::CurrentSettings.default_preferred_language is set' do
+ before do
+ allow(::Gitlab::CurrentSettings).to receive(:default_preferred_language).and_return('zh_CN')
+ end
+
+ it { is_expected.to eq 'zh_CN' }
end
end
end
@@ -1230,17 +1288,6 @@ RSpec.describe User do
end
describe 'before save hook' do
- describe '#default_private_profile_to_false' do
- let(:user) { create(:user, private_profile: true) }
-
- it 'converts nil to false' do
- user.private_profile = nil
- user.save!
-
- expect(user.private_profile).to eq false
- end
- end
-
context 'when saving an external user' do
let(:user) { create(:user) }
let(:external_user) { create(:user, external: true) }
@@ -2675,7 +2722,7 @@ RSpec.describe User do
expect(user.can_create_group).to eq(Gitlab::CurrentSettings.can_create_group)
expect(user.theme_id).to eq(Gitlab.config.gitlab.default_theme)
expect(user.external).to be_falsey
- expect(user.private_profile).to eq(false)
+ expect(user.private_profile).to eq(Gitlab::CurrentSettings.user_defaults_to_private_profile)
end
end
@@ -3672,19 +3719,14 @@ RSpec.describe User do
describe '#sanitize_attrs' do
let(:user) { build(:user, name: 'test <& user', skype: 'test&user') }
- it 'encodes HTML entities in the Skype attribute' do
- expect { user.sanitize_attrs }.to change { user.skype }.to('test&amp;user')
- end
-
it 'does not encode HTML entities in the name attribute' do
expect { user.sanitize_attrs }.not_to change { user.name }
end
it 'sanitizes attr from html tags' do
- user = create(:user, name: '<a href="//example.com">Test<a>', twitter: '<a href="//evil.com">https://twitter.com<a>')
+ user = create(:user, name: '<a href="//example.com">Test<a>')
expect(user.name).to eq('Test')
- expect(user.twitter).to eq('https://twitter.com')
end
it 'sanitizes attr from js scripts' do
@@ -5253,36 +5295,16 @@ RSpec.describe User do
describe '#invalidate_issue_cache_counts' do
let(:user) { build_stubbed(:user) }
- before do
- stub_feature_flags(limit_assigned_issues_count: false)
- end
-
it 'invalidates cache for issue counter' do
cache_mock = double
expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_issues_count'])
+ expect(cache_mock).to receive(:delete).with(['users', user.id, 'max_assigned_open_issues_count'])
allow(Rails).to receive(:cache).and_return(cache_mock)
user.invalidate_issue_cache_counts
end
-
- context 'when limit_assigned_issues_count is enabled' do
- before do
- stub_feature_flags(limit_assigned_issues_count: true)
- end
-
- it 'invalidates cache for issue counter' do
- cache_mock = double
-
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'assigned_open_issues_count'])
- expect(cache_mock).to receive(:delete).with(['users', user.id, 'max_assigned_open_issues_count'])
-
- allow(Rails).to receive(:cache).and_return(cache_mock)
-
- user.invalidate_issue_cache_counts
- end
- end
end
describe '#invalidate_merge_request_cache_counts' do
@@ -5506,41 +5528,6 @@ RSpec.describe User do
end
end
- describe '#ensure_user_detail_assigned' do
- let(:user) { build(:user) }
-
- context 'when no user detail field has been changed' do
- before do
- allow(UserDetail)
- .to receive(:user_fields_changed?)
- .and_return(false)
- end
-
- it 'does not assign user details before save' do
- expect(user.user_detail)
- .not_to receive(:assign_changed_fields_from_user)
-
- user.save!
- end
- end
-
- context 'when a user detail field has been changed' do
- before do
- allow(UserDetail)
- .to receive(:user_fields_changed?)
- .and_return(true)
- end
-
- it 'assigns user details before save' do
- expect(user.user_detail)
- .to receive(:assign_changed_fields_from_user)
- .and_call_original
-
- user.save!
- end
- end
- end
-
describe '#username_changed_hook' do
context 'for a new user' do
let(:user) { build(:user) }
@@ -7429,4 +7416,84 @@ RSpec.describe User do
end
end
end
+
+ describe '#namespace_commit_email_for_project' do
+ let_it_be(:user) { create(:user) }
+
+ let(:emails) { user.namespace_commit_email_for_project(project) }
+
+ context 'when project is nil' do
+ let(:project) {}
+
+ it 'returns nil' do
+ expect(emails).to be(nil)
+ end
+ end
+
+ context 'with a group project' do
+ let_it_be(:root_group) { create(:group) }
+ let_it_be(:group) { create(:group, parent: root_group) }
+ let_it_be(:project) { create(:project, group: group) }
+
+ context 'without a defined root group namespace_commit_email' do
+ context 'without a defined project namespace_commit_email' do
+ it 'returns nil' do
+ expect(emails).to be(nil)
+ end
+ end
+
+ context 'with a defined project namespace_commit_email' do
+ it 'returns the defined namespace_commit_email' do
+ project_commit_email = create(:namespace_commit_email,
+ user: user,
+ namespace: project.project_namespace)
+
+ expect(emails).to eq(project_commit_email)
+ end
+ end
+ end
+
+ context 'with a defined root group namespace_commit_email' do
+ let_it_be(:root_group_commit_email) do
+ create(:namespace_commit_email, user: user, namespace: root_group)
+ end
+
+ context 'without a defined project namespace_commit_email' do
+ it 'returns the defined namespace_commit_email' do
+ expect(emails).to eq(root_group_commit_email)
+ end
+ end
+
+ context 'with a defined project namespace_commit_email' do
+ it 'returns the defined namespace_commit_email' do
+ project_commit_email = create(:namespace_commit_email,
+ user: user,
+ namespace: project.project_namespace)
+
+ expect(emails).to eq(project_commit_email)
+ end
+ end
+ end
+ end
+
+ context 'with personal project' do
+ let_it_be(:project) { create(:project, namespace: user.namespace) }
+
+ context 'without a defined project namespace_commit_email' do
+ it 'returns nil' do
+ expect(emails).to be(nil)
+ end
+ end
+
+ context 'with a defined project namespace_commit_email' do
+ it 'returns the defined namespace_commit_email' do
+ project_commit_email = create(:namespace_commit_email,
+ user: user,
+ namespace: project.project_namespace)
+
+ expect(emails).to eq(project_commit_email)
+ end
+ end
+ end
+ end
end
diff --git a/spec/models/users/namespace_commit_email_spec.rb b/spec/models/users/namespace_commit_email_spec.rb
index 696dac25f9b..23fed85ab3e 100644
--- a/spec/models/users/namespace_commit_email_spec.rb
+++ b/spec/models/users/namespace_commit_email_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Users::NamespaceCommitEmail, type: :model do
+RSpec.describe Users::NamespaceCommitEmail, type: :model, feature_category: :source_code_management do
subject { build(:namespace_commit_email) }
describe 'associations' do
@@ -15,7 +15,39 @@ RSpec.describe Users::NamespaceCommitEmail, type: :model do
it { is_expected.to validate_presence_of(:user) }
it { is_expected.to validate_presence_of(:namespace) }
it { is_expected.to validate_presence_of(:email) }
+
+ it { is_expected.to validate_uniqueness_of(:user).scoped_to(:namespace_id) }
+
+ describe 'validate_root_group' do
+ let_it_be(:root_group) { create(:group) }
+
+ context 'when root group' do
+ subject { build(:namespace_commit_email, namespace: root_group) }
+
+ it { is_expected.to be_valid }
+ end
+
+ context 'when subgroup' do
+ subject { build(:namespace_commit_email, namespace: create(:group, parent: root_group)) }
+
+ it 'is invalid and reports the relevant error' do
+ expect(subject).to be_invalid
+ expect(subject.errors[:namespace]).to include('must be a root group.')
+ end
+ end
+ end
end
it { is_expected.to be_valid }
+
+ describe '.delete_for_namespace' do
+ let_it_be(:group) { create(:group) }
+
+ it 'deletes all records for namespace' do
+ create_list(:namespace_commit_email, 3, namespace: group)
+ create(:namespace_commit_email)
+
+ expect { described_class.delete_for_namespace(group) }.to change { described_class.count }.by(-3)
+ end
+ end
end
diff --git a/spec/models/work_item_spec.rb b/spec/models/work_item_spec.rb
index 1c34936c5c2..0bedcc9791f 100644
--- a/spec/models/work_item_spec.rb
+++ b/spec/models/work_item_spec.rb
@@ -21,6 +21,13 @@ RSpec.describe WorkItem, feature_category: :portfolio_management do
.with_foreign_key('work_item_id')
end
+ it 'has many `work_item_children_by_created_at`' do
+ is_expected.to have_many(:work_item_children_by_created_at)
+ .order(created_at: :asc)
+ .class_name('WorkItem')
+ .with_foreign_key('work_item_id')
+ end
+
it 'has many `child_links`' do
is_expected.to have_many(:child_links)
.class_name('::WorkItems::ParentLink')
diff --git a/spec/models/work_items/parent_link_spec.rb b/spec/models/work_items/parent_link_spec.rb
index 82e79e8fbdf..f1aa81f46d2 100644
--- a/spec/models/work_items/parent_link_spec.rb
+++ b/spec/models/work_items/parent_link_spec.rb
@@ -218,4 +218,16 @@ RSpec.describe WorkItems::ParentLink, feature_category: :portfolio_management do
end
end
end
+
+ context 'with relative positioning' do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:work_item_parent) { create(:work_item, project: project) }
+
+ it_behaves_like "a class that supports relative positioning" do
+ let(:factory) { :parent_link }
+ let(:default_params) { { work_item_parent: work_item_parent } }
+ let(:items_with_nil_position_sample_quantity) { 90 }
+ end
+ end
end
diff --git a/spec/models/work_items/widgets/hierarchy_spec.rb b/spec/models/work_items/widgets/hierarchy_spec.rb
index c847f2694fe..43670b30645 100644
--- a/spec/models/work_items/widgets/hierarchy_spec.rb
+++ b/spec/models/work_items/widgets/hierarchy_spec.rb
@@ -2,11 +2,11 @@
require 'spec_helper'
-RSpec.describe WorkItems::Widgets::Hierarchy do
+RSpec.describe WorkItems::Widgets::Hierarchy, feature_category: :team_planning do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:task) { create(:work_item, :task, project: project) }
- let_it_be(:work_item_parent) { create(:work_item, project: project) }
+ let_it_be_with_reload(:work_item_parent) { create(:work_item, project: project) }
describe '.type' do
subject { described_class.type }
@@ -21,7 +21,7 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#parent' do
- let_it_be(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent).reload }
+ let_it_be_with_reload(:parent_link) { create(:parent_link, work_item: task, work_item_parent: work_item_parent) }
subject { described_class.new(parent_link.work_item).parent }
@@ -29,11 +29,21 @@ RSpec.describe WorkItems::Widgets::Hierarchy do
end
describe '#children' do
- let_it_be(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task).reload }
- let_it_be(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent).reload }
+ let_it_be_with_reload(:parent_link1) { create(:parent_link, work_item_parent: work_item_parent, work_item: task) }
+ let_it_be_with_reload(:parent_link2) { create(:parent_link, work_item_parent: work_item_parent) }
subject { described_class.new(work_item_parent).children }
it { is_expected.to contain_exactly(parent_link1.work_item, parent_link2.work_item) }
+
+ context 'with default order by created_at' do
+ let_it_be(:oldest_child) { create(:work_item, :task, project: project, created_at: 5.minutes.ago) }
+
+ let_it_be_with_reload(:link_to_oldest_child) do
+ create(:parent_link, work_item_parent: work_item_parent, work_item: oldest_child)
+ end
+
+ it { is_expected.to eq([link_to_oldest_child, parent_link1, parent_link2].map(&:work_item)) }
+ end
end
end
diff --git a/spec/policies/concerns/archived_abilities_spec.rb b/spec/policies/concerns/archived_abilities_spec.rb
index 8e3fd8a209f..d4d0498b0a3 100644
--- a/spec/policies/concerns/archived_abilities_spec.rb
+++ b/spec/policies/concerns/archived_abilities_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe ArchivedAbilities, feature_category: :projects do
end
describe '.archived_abilities' do
- it 'returns an array of abilites to be prevented when archived' do
+ it 'returns an array of abilities to be prevented when archived' do
expect(TestClass.archived_abilities).to include(*described_class::ARCHIVED_ABILITIES)
end
end
diff --git a/spec/policies/global_policy_spec.rb b/spec/policies/global_policy_spec.rb
index 4a8855f1da7..1538f8a70c8 100644
--- a/spec/policies/global_policy_spec.rb
+++ b/spec/policies/global_policy_spec.rb
@@ -2,15 +2,15 @@
require 'spec_helper'
-RSpec.describe GlobalPolicy do
+RSpec.describe GlobalPolicy, feature_category: :security_policies do
include TermsHelper
+ let_it_be(:admin_user) { create(:admin) }
let_it_be(:project_bot) { create(:user, :project_bot) }
let_it_be(:migration_bot) { create(:user, :migration_bot) }
let_it_be(:security_bot) { create(:user, :security_bot) }
-
- let(:current_user) { create(:user) }
- let(:user) { create(:user) }
+ let_it_be_with_reload(:current_user) { create(:user) }
+ let_it_be(:user) { create(:user) }
subject { described_class.new(current_user, [user]) }
@@ -27,7 +27,7 @@ RSpec.describe GlobalPolicy do
stub_application_setting(restricted_visibility_levels: [Gitlab::VisibilityLevel::PUBLIC])
end
- it { is_expected.not_to be_allowed(:read_users_list) }
+ it { is_expected.to be_disallowed(:read_users_list) }
end
context "when the public level is not restricted" do
@@ -40,7 +40,7 @@ RSpec.describe GlobalPolicy do
end
context "for an admin" do
- let_it_be(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
context "when the public level is restricted" do
before do
@@ -93,7 +93,7 @@ RSpec.describe GlobalPolicy do
context 'when user does not have the ability to create group' do
let(:current_user) { create(:user, can_create_group: false) }
- it { is_expected.not_to be_allowed(:create_group) }
+ it { is_expected.to be_disallowed(:create_group) }
end
end
@@ -107,18 +107,18 @@ RSpec.describe GlobalPolicy do
context 'when user does not have the ability to create group' do
let(:current_user) { create(:user, can_create_group: false) }
- it { is_expected.not_to be_allowed(:create_group_with_default_branch_protection) }
+ it { is_expected.to be_disallowed(:create_group_with_default_branch_protection) }
end
end
describe 'custom attributes' do
context 'regular user' do
- it { is_expected.not_to be_allowed(:read_custom_attribute) }
- it { is_expected.not_to be_allowed(:update_custom_attribute) }
+ it { is_expected.to be_disallowed(:read_custom_attribute) }
+ it { is_expected.to be_disallowed(:update_custom_attribute) }
end
context 'admin' do
- let_it_be(:current_user) { create(:user, :admin) }
+ let(:current_user) { admin_user }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:read_custom_attribute) }
@@ -134,11 +134,11 @@ RSpec.describe GlobalPolicy do
describe 'approving users' do
context 'regular user' do
- it { is_expected.not_to be_allowed(:approve_user) }
+ it { is_expected.to be_disallowed(:approve_user) }
end
context 'admin' do
- let_it_be(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:approve_user) }
@@ -152,11 +152,11 @@ RSpec.describe GlobalPolicy do
describe 'rejecting users' do
context 'regular user' do
- it { is_expected.not_to be_allowed(:reject_user) }
+ it { is_expected.to be_disallowed(:reject_user) }
end
context 'admin' do
- let_it_be(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:reject_user) }
@@ -170,11 +170,11 @@ RSpec.describe GlobalPolicy do
describe 'using project statistics filters' do
context 'regular user' do
- it { is_expected.not_to be_allowed(:use_project_statistics_filters) }
+ it { is_expected.to be_disallowed(:use_project_statistics_filters) }
end
context 'admin' do
- let_it_be(:current_user) { create(:user, :admin) }
+ let(:current_user) { admin_user }
context 'when admin mode is enabled', :enable_admin_mode do
it { is_expected.to be_allowed(:use_project_statistics_filters) }
@@ -187,7 +187,7 @@ RSpec.describe GlobalPolicy do
end
shared_examples 'access allowed when terms accepted' do |ability|
- it { is_expected.not_to be_allowed(ability) }
+ it { is_expected.to be_disallowed(ability) }
it "allows #{ability} when the user accepted the terms" do
accept_terms(current_user)
@@ -202,7 +202,7 @@ RSpec.describe GlobalPolicy do
end
context 'admin' do
- let(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
it { is_expected.to be_allowed(:access_api) }
end
@@ -222,13 +222,13 @@ RSpec.describe GlobalPolicy do
context 'migration bot' do
let(:current_user) { migration_bot }
- it { is_expected.not_to be_allowed(:access_api) }
+ it { is_expected.to be_disallowed(:access_api) }
end
context 'security bot' do
let(:current_user) { security_bot }
- it { is_expected.not_to be_allowed(:access_api) }
+ it { is_expected.to be_disallowed(:access_api) }
end
context 'user blocked pending approval' do
@@ -236,7 +236,7 @@ RSpec.describe GlobalPolicy do
current_user.block_pending_approval
end
- it { is_expected.not_to be_allowed(:access_api) }
+ it { is_expected.to be_disallowed(:access_api) }
end
context 'with a deactivated user' do
@@ -244,7 +244,7 @@ RSpec.describe GlobalPolicy do
current_user.deactivate!
end
- it { is_expected.not_to be_allowed(:access_api) }
+ it { is_expected.to be_disallowed(:access_api) }
end
context 'user with expired password' do
@@ -252,7 +252,7 @@ RSpec.describe GlobalPolicy do
current_user.update!(password_expires_at: 2.minutes.ago)
end
- it { is_expected.not_to be_allowed(:access_api) }
+ it { is_expected.to be_disallowed(:access_api) }
context 'when user is using ldap' do
let(:current_user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
@@ -271,7 +271,7 @@ RSpec.describe GlobalPolicy do
end
context 'admin' do
- let(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
it_behaves_like 'access allowed when terms accepted', :access_api
end
@@ -301,7 +301,7 @@ RSpec.describe GlobalPolicy do
allow(User).to receive(:allow_unconfirmed_access_for).and_return(2.days)
end
- it { is_expected.not_to be_allowed(:access_api) }
+ it { is_expected.to be_disallowed(:access_api) }
end
end
end
@@ -312,7 +312,7 @@ RSpec.describe GlobalPolicy do
end
describe 'admin' do
- let(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
it { is_expected.to be_allowed(:receive_notifications) }
end
@@ -320,7 +320,7 @@ RSpec.describe GlobalPolicy do
describe 'anonymous' do
let(:current_user) { nil }
- it { is_expected.not_to be_allowed(:receive_notifications) }
+ it { is_expected.to be_disallowed(:receive_notifications) }
end
describe 'blocked user' do
@@ -328,7 +328,7 @@ RSpec.describe GlobalPolicy do
current_user.block
end
- it { is_expected.not_to be_allowed(:receive_notifications) }
+ it { is_expected.to be_disallowed(:receive_notifications) }
end
describe 'deactivated user' do
@@ -336,19 +336,19 @@ RSpec.describe GlobalPolicy do
current_user.deactivate
end
- it { is_expected.not_to be_allowed(:receive_notifications) }
+ it { is_expected.to be_disallowed(:receive_notifications) }
end
context 'project bot' do
let(:current_user) { project_bot }
- it { is_expected.not_to be_allowed(:receive_notifications) }
+ it { is_expected.to be_disallowed(:receive_notifications) }
end
context 'migration bot' do
let(:current_user) { migration_bot }
- it { is_expected.not_to be_allowed(:receive_notifications) }
+ it { is_expected.to be_disallowed(:receive_notifications) }
end
context 'user blocked pending approval' do
@@ -356,7 +356,7 @@ RSpec.describe GlobalPolicy do
current_user.block_pending_approval
end
- it { is_expected.not_to be_allowed(:receive_notifications) }
+ it { is_expected.to be_disallowed(:receive_notifications) }
end
end
@@ -366,7 +366,7 @@ RSpec.describe GlobalPolicy do
end
describe 'admin' do
- let(:current_user) { create(:admin) }
+ let(:current_user) { admin_user }
it { is_expected.to be_allowed(:access_git) }
end
@@ -394,7 +394,7 @@ RSpec.describe GlobalPolicy do
current_user.deactivate
end
- it { is_expected.not_to be_allowed(:access_git) }
+ it { is_expected.to be_disallowed(:access_git) }
end
describe 'inactive user' do
@@ -402,7 +402,7 @@ RSpec.describe GlobalPolicy do
current_user.update!(confirmed_at: nil)
end
- it { is_expected.not_to be_allowed(:access_git) }
+ it { is_expected.to be_disallowed(:access_git) }
end
context 'when terms are enforced' do
@@ -438,7 +438,7 @@ RSpec.describe GlobalPolicy do
current_user.block_pending_approval
end
- it { is_expected.not_to be_allowed(:access_git) }
+ it { is_expected.to be_disallowed(:access_git) }
end
context 'user with expired password' do
@@ -446,7 +446,7 @@ RSpec.describe GlobalPolicy do
current_user.update!(password_expires_at: 2.minutes.ago)
end
- it { is_expected.not_to be_allowed(:access_git) }
+ it { is_expected.to be_disallowed(:access_git) }
context 'when user is using ldap' do
let(:current_user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
@@ -464,7 +464,7 @@ RSpec.describe GlobalPolicy do
context 'anonymous' do
let(:current_user) { nil }
- it { is_expected.not_to be_allowed(:read_instance_metadata) }
+ it { is_expected.to be_disallowed(:read_instance_metadata) }
end
end
@@ -476,7 +476,7 @@ RSpec.describe GlobalPolicy do
context 'when internal' do
let(:current_user) { User.ghost }
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
end
context 'when blocked' do
@@ -484,7 +484,7 @@ RSpec.describe GlobalPolicy do
current_user.block
end
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
end
context 'when deactivated' do
@@ -492,7 +492,7 @@ RSpec.describe GlobalPolicy do
current_user.deactivate
end
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
end
describe 'inactive user' do
@@ -500,7 +500,7 @@ RSpec.describe GlobalPolicy do
current_user.update!(confirmed_at: nil)
end
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
end
context 'when access locked' do
@@ -508,7 +508,7 @@ RSpec.describe GlobalPolicy do
current_user.lock_access!
end
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
end
context 'project bot' do
@@ -520,7 +520,7 @@ RSpec.describe GlobalPolicy do
context 'migration bot' do
let(:current_user) { migration_bot }
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
end
context 'user blocked pending approval' do
@@ -528,7 +528,7 @@ RSpec.describe GlobalPolicy do
current_user.block_pending_approval
end
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
end
context 'user with expired password' do
@@ -536,7 +536,7 @@ RSpec.describe GlobalPolicy do
current_user.update!(password_expires_at: 2.minutes.ago)
end
- it { is_expected.not_to be_allowed(:use_slash_commands) }
+ it { is_expected.to be_disallowed(:use_slash_commands) }
context 'when user is using ldap' do
let(:current_user) { create(:omniauth_user, provider: 'ldap', password_expires_at: 2.minutes.ago) }
@@ -550,7 +550,7 @@ RSpec.describe GlobalPolicy do
context 'when anonymous' do
let(:current_user) { nil }
- it { is_expected.not_to be_allowed(:create_snippet) }
+ it { is_expected.to be_disallowed(:create_snippet) }
end
context 'regular user' do
@@ -560,7 +560,7 @@ RSpec.describe GlobalPolicy do
context 'when external' do
let(:current_user) { build(:user, :external) }
- it { is_expected.not_to be_allowed(:create_snippet) }
+ it { is_expected.to be_disallowed(:create_snippet) }
end
end
@@ -568,19 +568,19 @@ RSpec.describe GlobalPolicy do
context 'project bot' do
let(:current_user) { project_bot }
- it { is_expected.not_to be_allowed(:log_in) }
+ it { is_expected.to be_disallowed(:log_in) }
end
context 'migration bot' do
let(:current_user) { migration_bot }
- it { is_expected.not_to be_allowed(:log_in) }
+ it { is_expected.to be_disallowed(:log_in) }
end
context 'security bot' do
let(:current_user) { security_bot }
- it { is_expected.not_to be_allowed(:log_in) }
+ it { is_expected.to be_disallowed(:log_in) }
end
context 'user blocked pending approval' do
@@ -588,7 +588,7 @@ RSpec.describe GlobalPolicy do
current_user.block_pending_approval
end
- it { is_expected.not_to be_allowed(:log_in) }
+ it { is_expected.to be_disallowed(:log_in) }
end
end
end
diff --git a/spec/policies/group_policy_spec.rb b/spec/policies/group_policy_spec.rb
index 65abb43b6c4..2d4c86845c9 100644
--- a/spec/policies/group_policy_spec.rb
+++ b/spec/policies/group_policy_spec.rb
@@ -157,7 +157,7 @@ RSpec.describe GroupPolicy do
let(:current_user) { maintainer }
context 'with subgroup_creation level set to maintainer' do
- before_all do
+ before do
group.update!(subgroup_creation_level: ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
end
@@ -550,7 +550,7 @@ RSpec.describe GroupPolicy do
context 'create_projects' do
context 'when group has no project creation level set' do
- before_all do
+ before do
group.update!(project_creation_level: nil)
end
@@ -580,7 +580,7 @@ RSpec.describe GroupPolicy do
end
context 'when group has project creation level set to no one' do
- before_all do
+ before do
group.update!(project_creation_level: ::Gitlab::Access::NO_ONE_PROJECT_ACCESS)
end
@@ -610,7 +610,7 @@ RSpec.describe GroupPolicy do
end
context 'when group has project creation level set to maintainer only' do
- before_all do
+ before do
group.update!(project_creation_level: ::Gitlab::Access::MAINTAINER_PROJECT_ACCESS)
end
@@ -640,7 +640,7 @@ RSpec.describe GroupPolicy do
end
context 'when group has project creation level set to developers + maintainer' do
- before_all do
+ before do
group.update!(project_creation_level: ::Gitlab::Access::DEVELOPER_MAINTAINER_PROJECT_ACCESS)
end
@@ -672,7 +672,7 @@ RSpec.describe GroupPolicy do
context 'create_subgroup' do
context 'when group has subgroup creation level set to owner' do
- before_all do
+ before do
group.update!(subgroup_creation_level: ::Gitlab::Access::OWNER_SUBGROUP_ACCESS)
end
@@ -702,7 +702,7 @@ RSpec.describe GroupPolicy do
end
context 'when group has subgroup creation level set to maintainer' do
- before_all do
+ before do
group.update!(subgroup_creation_level: ::Gitlab::Access::MAINTAINER_SUBGROUP_ACCESS)
end
@@ -1073,7 +1073,7 @@ RSpec.describe GroupPolicy do
it_behaves_like 'Self-managed Core resource access tokens'
context 'support bot' do
- let_it_be(:group) { create(:group, :private, :crm_enabled) }
+ let_it_be_with_refind(:group) { create(:group, :private, :crm_enabled) }
let_it_be(:current_user) { User.support_bot }
before do
@@ -1351,9 +1351,8 @@ RSpec.describe GroupPolicy do
context 'when crm_enabled is false' do
let(:current_user) { owner }
- before_all do
- group.crm_settings.enabled = false
- group.crm_settings.save!
+ before do
+ group.crm_settings.update!(enabled: false)
end
it { is_expected.to be_disallowed(:read_crm_contact) }
diff --git a/spec/policies/issue_policy_spec.rb b/spec/policies/issue_policy_spec.rb
index 905ef591b53..0040d9dff7e 100644
--- a/spec/policies/issue_policy_spec.rb
+++ b/spec/policies/issue_policy_spec.rb
@@ -87,49 +87,49 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
end
it 'allows guests to read issues' do
- expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid)
- expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :mark_note_as_confidential)
+ expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid, :admin_issue_relation)
+ expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :mark_note_as_internal)
- expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
+ expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :admin_issue_relation)
expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
expect(permissions(guest, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
it 'allows reporters to read, update, admin and create confidential notes' do
- expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :mark_note_as_confidential)
+ expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :mark_note_as_internal, :admin_issue_relation)
end
it 'allows reporters from group links to read, update, and admin issues' do
- expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter_from_group_link, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(reporter_from_group_link, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows issue authors to read and update their issues' do
- expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue_relation)
expect(permissions(author, issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
+ expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :admin_issue_relation)
expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(author, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(author, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows issue assignees to read and update their issues' do
- expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue_relation)
expect(permissions(assignee, issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(assignee, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(assignee, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'does not allow non-members to read, update or create issues' do
- expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
@@ -142,50 +142,50 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
let(:confidential_issue_no_assignee) { create(:issue, :confidential, project: project) }
it 'does not allow non-members to read confidential issues' do
- expect(permissions(non_member, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
- expect(permissions(non_member, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(non_member, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :admin_issue_relation)
+ expect(permissions(non_member, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'does not allow guests to read confidential issues' do
- expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
- expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(guest, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :admin_issue_relation)
+ expect(permissions(guest, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows reporters to read, update, and admin confidential issues' do
- expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows reporters from group links to read, update, and admin confidential issues' do
- expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows issue authors to read and update their confidential issues' do
- expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue_relation)
expect(permissions(author, confidential_issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
+ expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :admin_issue_relation)
expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
end
it 'does not allow issue author to read or update confidential issue moved to an private project' do
confidential_issue.project = create(:project, :private)
- expect(permissions(author, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(author, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows issue assignees to read and update their confidential issues' do
expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
expect(permissions(assignee, confidential_issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'does not allow issue assignees to read or update confidential issue moved to an private project' do
confidential_issue.project = create(:project, :private)
- expect(permissions(assignee, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(assignee, confidential_issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
end
end
@@ -210,61 +210,61 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
it 'does not allow anonymous user to create todos' do
expect(permissions(nil, issue)).to be_allowed(:read_issue)
- expect(permissions(nil, issue)).to be_disallowed(:create_todo, :update_subscription, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(nil, issue)).to be_disallowed(:create_todo, :update_subscription, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
expect(permissions(nil, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata, :set_confidentiality)
end
it 'allows guests to read issues' do
- expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid, :create_todo, :update_subscription)
+ expect(permissions(guest, issue)).to be_allowed(:read_issue, :read_issue_iid, :create_todo, :update_subscription, :admin_issue_relation)
expect(permissions(guest, issue)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
+ expect(permissions(guest, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :admin_issue_relation)
expect(permissions(guest, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(guest, issue_locked)).to be_allowed(:read_issue, :read_issue_iid)
+ expect(permissions(guest, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :admin_issue_relation)
expect(permissions(guest, issue_locked)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(guest, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(guest, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows reporters to read, update, reopen, and admin issues' do
- expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(reporter, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(reporter, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(reporter, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
expect(permissions(reporter, issue_locked)).to be_disallowed(:reopen_issue)
- expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows reporters from group links to read, update, reopen and admin issues' do
- expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter_from_group_link, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
expect(permissions(reporter_from_group_link, issue_no_assignee)).to be_allowed(:reopen_issue)
- expect(permissions(reporter_from_group_link, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter_from_group_link, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
expect(permissions(reporter_from_group_link, issue_locked)).to be_disallowed(:reopen_issue)
- expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(reporter, new_issue)).to be_allowed(:create_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it 'allows issue authors to read, reopen and update their issues' do
- expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :reopen_issue)
+ expect(permissions(author, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :reopen_issue, :admin_issue_relation)
expect(permissions(author, issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
+ expect(permissions(author, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :admin_issue_relation)
expect(permissions(author, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(author, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(author, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue_relation)
expect(permissions(author, issue_locked)).to be_disallowed(:admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(author, new_issue)).to be_allowed(:create_issue)
+ expect(permissions(author, new_issue)).to be_allowed(:create_issue, :admin_issue_relation)
expect(permissions(author, new_issue)).to be_disallowed(:set_issue_metadata)
end
it 'allows issue assignees to read, reopen and update their issues' do
- expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :reopen_issue)
+ expect(permissions(assignee, issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :reopen_issue, :admin_issue_relation)
expect(permissions(assignee, issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid)
+ expect(permissions(assignee, issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :admin_issue_relation)
expect(permissions(assignee, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(assignee, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(assignee, issue_locked)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue_relation)
expect(permissions(assignee, issue_locked)).to be_disallowed(:admin_issue, :reopen_issue, :set_issue_metadata, :set_confidentiality)
end
@@ -335,6 +335,10 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
expect(permissions(guest, issue)).to be_allowed(:update_subscription)
end
+ it 'allows guests to admin relation' do
+ expect(permissions(guest, issue)).to be_allowed(:admin_issue_relation)
+ end
+
context 'when admin mode is enabled', :enable_admin_mode do
it 'allows admins to view' do
expect(permissions(admin, issue)).to be_allowed(:read_issue)
@@ -356,9 +360,9 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
end
it 'does not allow non-members to update or create issues' do
- expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
- expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata, :set_confidentiality)
+ expect(permissions(non_member, issue)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(non_member, issue_no_assignee)).to be_disallowed(:update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
+ expect(permissions(non_member, new_issue)).to be_disallowed(:create_issue, :set_issue_metadata, :set_confidentiality, :admin_issue_relation)
end
it_behaves_like 'alert bot'
@@ -376,24 +380,24 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
end
it 'allows reporters to read, update, and admin confidential issues' do
- expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
+ expect(permissions(reporter, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :admin_issue_relation)
expect(permissions(reporter, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
end
it 'allows reporter from group links to read, update, and admin confidential issues' do
- expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue)
+ expect(permissions(reporter_from_group_link, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :admin_issue_relation)
expect(permissions(reporter_from_group_link, confidential_issue_no_assignee)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
end
it 'allows issue authors to read and update their confidential issues' do
- expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(author, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue_relation)
expect(permissions(author, confidential_issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
expect(permissions(author, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
end
it 'allows issue assignees to read and update their confidential issues' do
- expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue)
+ expect(permissions(assignee, confidential_issue)).to be_allowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue_relation)
expect(permissions(assignee, confidential_issue)).to be_disallowed(:admin_issue, :set_issue_metadata, :set_confidentiality)
expect(permissions(assignee, confidential_issue_no_assignee)).to be_disallowed(:read_issue, :read_issue_iid, :update_issue, :admin_issue, :set_issue_metadata, :set_confidentiality)
@@ -432,8 +436,8 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
it 'does not allow accessing notes' do
# if notes widget is disabled not even maintainer can access notes
- expect(permissions(maintainer, task)).to be_disallowed(:create_note, :read_note, :mark_note_as_confidential, :read_internal_note)
- expect(permissions(admin, task)).to be_disallowed(:create_note, :read_note, :read_internal_note, :mark_note_as_confidential, :set_note_created_at)
+ expect(permissions(maintainer, task)).to be_disallowed(:create_note, :read_note, :mark_note_as_internal, :read_internal_note)
+ expect(permissions(admin, task)).to be_disallowed(:create_note, :read_note, :read_internal_note, :mark_note_as_internal, :set_note_created_at)
end
end
@@ -441,10 +445,10 @@ RSpec.describe IssuePolicy, feature_category: :team_planning do
it 'allows accessing notes' do
# with notes widget enabled, even guests can access notes
expect(permissions(guest, issue)).to be_allowed(:create_note, :read_note)
- expect(permissions(guest, issue)).to be_disallowed(:read_internal_note, :mark_note_as_confidential, :set_note_created_at)
- expect(permissions(reporter, issue)).to be_allowed(:create_note, :read_note, :read_internal_note, :mark_note_as_confidential)
- expect(permissions(maintainer, issue)).to be_allowed(:create_note, :read_note, :read_internal_note, :mark_note_as_confidential)
- expect(permissions(owner, issue)).to be_allowed(:create_note, :read_note, :read_internal_note, :mark_note_as_confidential, :set_note_created_at)
+ expect(permissions(guest, issue)).to be_disallowed(:read_internal_note, :mark_note_as_internal, :set_note_created_at)
+ expect(permissions(reporter, issue)).to be_allowed(:create_note, :read_note, :read_internal_note, :mark_note_as_internal)
+ expect(permissions(maintainer, issue)).to be_allowed(:create_note, :read_note, :read_internal_note, :mark_note_as_internal)
+ expect(permissions(owner, issue)).to be_allowed(:create_note, :read_note, :read_internal_note, :mark_note_as_internal, :set_note_created_at)
end
end
end
diff --git a/spec/policies/merge_request_policy_spec.rb b/spec/policies/merge_request_policy_spec.rb
index 741a0db3009..c21e1244402 100644
--- a/spec/policies/merge_request_policy_spec.rb
+++ b/spec/policies/merge_request_policy_spec.rb
@@ -461,4 +461,34 @@ RSpec.describe MergeRequestPolicy do
end
end
end
+
+ context 'when the author of the merge request is banned', feature_category: :insider_threat do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:admin) { create(:user, :admin) }
+ let_it_be(:author) { create(:user, :banned) }
+ let_it_be(:project) { create(:project, :public) }
+ let_it_be(:hidden_merge_request) { create(:merge_request, source_project: project, author: author) }
+
+ it 'does not allow non-admin user to read the merge_request' do
+ expect(permissions(user, hidden_merge_request)).not_to be_allowed(:read_merge_request)
+ end
+
+ it 'allows admin to read the merge_request', :enable_admin_mode do
+ expect(permissions(admin, hidden_merge_request)).to be_allowed(:read_merge_request)
+ end
+
+ context 'when the `hide_merge_requests_from_banned_users` feature flag is disabled' do
+ before do
+ stub_feature_flags(hide_merge_requests_from_banned_users: false)
+ end
+
+ it 'allows non-admin users to read the merge_request' do
+ expect(permissions(user, hidden_merge_request)).to be_allowed(:read_merge_request)
+ end
+
+ it 'allows admin users to read the merge_request', :enable_admin_mode do
+ expect(permissions(admin, hidden_merge_request)).to be_allowed(:read_merge_request)
+ end
+ end
+ end
end
diff --git a/spec/policies/note_policy_spec.rb b/spec/policies/note_policy_spec.rb
index dcfc398806a..f4abe3a223c 100644
--- a/spec/policies/note_policy_spec.rb
+++ b/spec/policies/note_policy_spec.rb
@@ -311,7 +311,7 @@ RSpec.describe NotePolicy, feature_category: :team_planning do
end
end
- context 'with confidential notes' do
+ context 'with internal notes' do
def permissions(user, note)
described_class.new(user, note)
end
@@ -332,54 +332,54 @@ RSpec.describe NotePolicy, feature_category: :team_planning do
project.add_guest(guest)
end
- shared_examples_for 'confidential notes permissions' do
- it 'does not allow non members to read confidential notes and replies' do
- expect(permissions(non_member, confidential_note)).to be_disallowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_confidential)
+ shared_examples_for 'internal notes permissions' do
+ it 'does not allow non members to read internal notes and replies' do
+ expect(permissions(non_member, internal_note)).to be_disallowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_internal)
end
- it 'does not allow guests to read confidential notes and replies' do
- expect(permissions(guest, confidential_note)).to be_disallowed(:read_note, :read_internal_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_confidential)
+ it 'does not allow guests to read internal notes and replies' do
+ expect(permissions(guest, internal_note)).to be_disallowed(:read_note, :read_internal_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_internal)
end
it 'allows reporter to read all notes but not resolve and admin them' do
- expect(permissions(reporter, confidential_note)).to be_allowed(:read_note, :award_emoji, :mark_note_as_confidential)
- expect(permissions(reporter, confidential_note)).to be_disallowed(:admin_note, :reposition_note, :resolve_note)
+ expect(permissions(reporter, internal_note)).to be_allowed(:read_note, :award_emoji, :mark_note_as_internal)
+ expect(permissions(reporter, internal_note)).to be_disallowed(:admin_note, :reposition_note, :resolve_note)
end
it 'allows developer to read and resolve all notes' do
- expect(permissions(developer, confidential_note)).to be_allowed(:read_note, :award_emoji, :resolve_note, :mark_note_as_confidential)
- expect(permissions(developer, confidential_note)).to be_disallowed(:admin_note, :reposition_note)
+ expect(permissions(developer, internal_note)).to be_allowed(:read_note, :award_emoji, :resolve_note, :mark_note_as_internal)
+ expect(permissions(developer, internal_note)).to be_disallowed(:admin_note, :reposition_note)
end
it 'allows maintainers to read all notes and admin them' do
- expect(permissions(maintainer, confidential_note)).to be_allowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_confidential)
+ expect(permissions(maintainer, internal_note)).to be_allowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_internal)
end
context 'when admin mode is enabled', :enable_admin_mode do
it 'allows admins to read all notes and admin them' do
- expect(permissions(admin, confidential_note)).to be_allowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_confidential)
+ expect(permissions(admin, internal_note)).to be_allowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_internal)
end
end
context 'when admin mode is disabled' do
- it 'does not allow non members to read confidential notes and replies' do
- expect(permissions(admin, confidential_note)).to be_disallowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_confidential)
+ it 'does not allow non members to read internal notes and replies' do
+ expect(permissions(admin, internal_note)).to be_disallowed(:read_note, :admin_note, :reposition_note, :resolve_note, :award_emoji, :mark_note_as_internal)
end
end
it 'disallows noteable author to read and resolve all notes' do
- expect(permissions(author, confidential_note)).to be_disallowed(:read_note, :resolve_note, :award_emoji, :mark_note_as_confidential, :admin_note, :reposition_note)
+ expect(permissions(author, internal_note)).to be_disallowed(:read_note, :resolve_note, :award_emoji, :mark_note_as_internal, :admin_note, :reposition_note)
end
end
context 'for issues' do
let(:issue) { create(:issue, project: project, author: author, assignees: [assignee]) }
- let(:confidential_note) { create(:note, :confidential, project: project, noteable: issue) }
+ let(:internal_note) { create(:note, :confidential, project: project, noteable: issue) }
- it_behaves_like 'confidential notes permissions'
+ it_behaves_like 'internal notes permissions'
it 'disallows noteable assignees to read all notes' do
- expect(permissions(assignee, confidential_note)).to be_disallowed(:read_note, :award_emoji, :mark_note_as_confidential, :admin_note, :reposition_note, :resolve_note)
+ expect(permissions(assignee, internal_note)).to be_disallowed(:read_note, :award_emoji, :mark_note_as_internal, :admin_note, :reposition_note, :resolve_note)
end
end
end
diff --git a/spec/policies/project_group_link_policy_spec.rb b/spec/policies/project_group_link_policy_spec.rb
new file mode 100644
index 00000000000..7c8a4619e47
--- /dev/null
+++ b/spec/policies/project_group_link_policy_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ProjectGroupLinkPolicy, feature_category: :authentication_and_authorization do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group, :private) }
+ let_it_be(:group2) { create(:group, :private) }
+ let_it_be(:project) { create(:project, :private, group: group) }
+
+ let(:project_group_link) do
+ create(:project_group_link, project: project, group: group2, group_access: Gitlab::Access::DEVELOPER)
+ end
+
+ subject(:policy) { described_class.new(user, project_group_link) }
+
+ context 'when the user is a group owner' do
+ before do
+ project_group_link.group.add_owner(user)
+ end
+
+ context 'when user is not project maintainer' do
+ it 'can admin group_project_link' do
+ expect(policy).to be_allowed(:admin_project_group_link)
+ end
+ end
+
+ context 'when user is a project maintainer' do
+ before do
+ project_group_link.project.add_maintainer(user)
+ end
+
+ it 'can admin group_project_link' do
+ expect(policy).to be_allowed(:admin_project_group_link)
+ end
+ end
+ end
+
+ context 'when user is not a group owner' do
+ context 'when user is a project maintainer' do
+ it 'can admin group_project_link' do
+ project_group_link.project.add_maintainer(user)
+
+ expect(policy).to be_allowed(:admin_project_group_link)
+ end
+ end
+
+ context 'when user is not a project maintainer' do
+ it 'cannot admin group_project_link' do
+ project_group_link.project.add_developer(user)
+
+ expect(policy).to be_disallowed(:admin_project_group_link)
+ end
+ end
+ end
+end
diff --git a/spec/policies/project_policy_spec.rb b/spec/policies/project_policy_spec.rb
index e370f536519..a98f091b9fc 100644
--- a/spec/policies/project_policy_spec.rb
+++ b/spec/policies/project_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectPolicy do
+RSpec.describe ProjectPolicy, feature_category: :authentication_and_authorization do
include ExternalAuthorizationServiceHelpers
include AdminModeHelper
include_context 'ProjectPolicy context'
diff --git a/spec/policies/resource_label_event_policy_spec.rb b/spec/policies/resource_label_event_policy_spec.rb
index eff2b0e1af5..66a249c38d9 100644
--- a/spec/policies/resource_label_event_policy_spec.rb
+++ b/spec/policies/resource_label_event_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ResourceLabelEventPolicy do
+RSpec.describe ResourceLabelEventPolicy, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :private) }
let_it_be(:issue) { create(:issue, project: project) }
diff --git a/spec/policies/resource_milestone_event_policy_spec.rb b/spec/policies/resource_milestone_event_policy_spec.rb
new file mode 100644
index 00000000000..22d1f837ae3
--- /dev/null
+++ b/spec/policies/resource_milestone_event_policy_spec.rb
@@ -0,0 +1,73 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResourceMilestoneEventPolicy, feature_category: :team_planning do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:issue) { create(:issue, project: project) }
+ let_it_be(:private_project) { create(:project, :private) }
+
+ describe '#read_resource_milestone_event' do
+ context 'with non-member user' do
+ it 'does not allow to read event' do
+ event = build_event(project)
+
+ expect(permissions(user, event)).to be_disallowed(:read_milestone, :read_resource_milestone_event, :read_note)
+ end
+ end
+
+ context 'with member user' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'allows to read event for accessible milestone' do
+ event = build_event(project)
+
+ expect(permissions(user, event)).to be_allowed(:read_milestone, :read_resource_milestone_event, :read_note)
+ end
+
+ it 'does not allow to read event for not accessible milestone' do
+ event = build_event(private_project)
+
+ expect(permissions(user, event)).to be_disallowed(:read_milestone, :read_resource_milestone_event, :read_note)
+ end
+ end
+ end
+
+ describe '#read_milestone' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'allows to read deleted milestone' do
+ event = build(:resource_milestone_event, issue: issue, milestone: nil)
+
+ expect(permissions(user, event)).to be_allowed(:read_milestone, :read_resource_milestone_event, :read_note)
+ end
+
+ it 'allows to read accessible milestone' do
+ event = build_event(project)
+
+ expect(permissions(user, event)).to be_allowed(:read_milestone, :read_resource_milestone_event, :read_note)
+ end
+
+ it 'does not allow to read not accessible milestone' do
+ event = build_event(private_project)
+
+ expect(permissions(user, event)).to be_disallowed(:read_milestone, :read_resource_milestone_event, :read_note)
+ end
+ end
+
+ def build_event(project)
+ milestone = create(:milestone, project: project)
+
+ build(:resource_milestone_event, issue: issue, milestone: milestone)
+ end
+
+ def permissions(user, issue)
+ described_class.new(user, issue)
+ end
+end
diff --git a/spec/policies/resource_state_event_policy_spec.rb b/spec/policies/resource_state_event_policy_spec.rb
new file mode 100644
index 00000000000..30f52f45c37
--- /dev/null
+++ b/spec/policies/resource_state_event_policy_spec.rb
@@ -0,0 +1,39 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe ResourceStateEventPolicy, feature_category: :team_planning do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :private) }
+ let_it_be(:issue) { create(:issue, project: project) }
+
+ describe '#read_resource_state_event' do
+ context 'with non-member user' do
+ it 'does not allow to read event' do
+ event = build_event(project)
+
+ expect(permissions(user, event)).to be_disallowed(:read_resource_state_event, :read_note)
+ end
+ end
+
+ context 'with member user' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'allows to read event for a state change' do
+ event = build_event(project)
+
+ expect(permissions(user, event)).to be_allowed(:read_resource_state_event, :read_note)
+ end
+ end
+ end
+
+ def build_event(label_project)
+ build(:resource_state_event, issue: issue, state: 2)
+ end
+
+ def permissions(user, issue)
+ described_class.new(user, issue)
+ end
+end
diff --git a/spec/policies/todo_policy_spec.rb b/spec/policies/todo_policy_spec.rb
index 34ba7bf9276..fa62f53c628 100644
--- a/spec/policies/todo_policy_spec.rb
+++ b/spec/policies/todo_policy_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe TodoPolicy do
+RSpec.describe TodoPolicy, feature_category: :project_management do
using RSpec::Parameterized::TableSyntax
let_it_be(:project) { create(:project) }
diff --git a/spec/policies/user_policy_spec.rb b/spec/policies/user_policy_spec.rb
index d02a94b810e..94b7e295167 100644
--- a/spec/policies/user_policy_spec.rb
+++ b/spec/policies/user_policy_spec.rb
@@ -246,4 +246,30 @@ RSpec.describe UserPolicy do
end
end
end
+
+ describe ':read_user_email_address' do
+ context 'when user is admin' do
+ let(:current_user) { admin }
+
+ context 'when admin mode is enabled', :enable_admin_mode do
+ it { is_expected.to be_allowed(:read_user_email_address) }
+ end
+
+ context 'when admin mode is disabled' do
+ it { is_expected.not_to be_allowed(:read_user_email_address) }
+ end
+ end
+
+ context 'when user is not an admin' do
+ context 'requesting their own' do
+ subject { described_class.new(current_user, current_user) }
+
+ it { is_expected.to be_allowed(:read_user_email_address) }
+ end
+
+ context "requesting a different user's" do
+ it { is_expected.not_to be_allowed(:read_user_email_address) }
+ end
+ end
+ end
end
diff --git a/spec/policies/work_item_policy_spec.rb b/spec/policies/work_item_policy_spec.rb
index ed76ec1eccf..3d282271d60 100644
--- a/spec/policies/work_item_policy_spec.rb
+++ b/spec/policies/work_item_policy_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe WorkItemPolicy do
let_it_be(:reporter) { create(:user).tap { |user| project.add_reporter(user) } }
let_it_be(:group_reporter) { create(:user).tap { |user| group.add_reporter(user) } }
let_it_be(:non_member_user) { create(:user) }
- let_it_be(:work_item) { create(:work_item, project: project) }
+ let_it_be_with_reload(:work_item) { create(:work_item, project: project) }
let_it_be(:authored_work_item) { create(:work_item, project: project, author: guest_author) }
let_it_be(:public_work_item) { create(:work_item, project: public_project) }
diff --git a/spec/presenters/ci/build_runner_presenter_spec.rb b/spec/presenters/ci/build_runner_presenter_spec.rb
index 952de121cc4..dedfe6925c5 100644
--- a/spec/presenters/ci/build_runner_presenter_spec.rb
+++ b/spec/presenters/ci/build_runner_presenter_spec.rb
@@ -349,16 +349,6 @@ RSpec.describe Ci::BuildRunnerPresenter do
public: false, masked: false }
)
end
-
- it 'logs file_variable_is_referenced_in_another_variable' do
- expect(Gitlab::AppJsonLogger).to receive(:info).with(
- event: 'file_variable_is_referenced_in_another_variable',
- project_id: project.id,
- variable: 'file_var'
- ).once
-
- runner_variables
- end
end
context 'when there is a raw variable to expand' do
@@ -385,23 +375,6 @@ RSpec.describe Ci::BuildRunnerPresenter do
public: false, masked: false }
)
end
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it 'returns expanded variables' do
- expect(runner_variables).to include(
- { key: 'regular_var', value: 'value 1',
- public: false, masked: false },
- { key: 'raw_var', value: 'value 2',
- public: false, masked: false, raw: true },
- { key: 'var_with_variables', value: 'value 3 and value 1 and value 2 and $undefined_var',
- public: false, masked: false }
- )
- end
- end
end
end
diff --git a/spec/presenters/ci/stage_presenter_spec.rb b/spec/presenters/ci/stage_presenter_spec.rb
index 368f03b0150..e7187b4ac16 100644
--- a/spec/presenters/ci/stage_presenter_spec.rb
+++ b/spec/presenters/ci/stage_presenter_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Ci::StagePresenter do
let!(:retried_build) { create(:ci_build, :tags, :artifacts, :retried, pipeline: stage.pipeline, stage: stage.name) }
before do
- create(:generic_commit_status, pipeline: stage.pipeline, stage: stage.name)
+ create(:generic_commit_status, pipeline: stage.pipeline, ci_stage: stage)
end
shared_examples 'preloaded associations for CI status' do
diff --git a/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb b/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb
index 39682a3311c..87a87cd8d70 100644
--- a/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb
+++ b/spec/presenters/packages/nuget/packages_metadata_presenter_spec.rb
@@ -2,12 +2,13 @@
require 'spec_helper'
-RSpec.describe Packages::Nuget::PackagesMetadataPresenter do
+RSpec.describe Packages::Nuget::PackagesMetadataPresenter, feature_category: :package_registry do
include_context 'with expected presenters dependency groups'
let_it_be(:project) { create(:project) }
let_it_be(:packages) { create_list(:nuget_package, 5, :with_metadatum, name: 'Dummy.Package', project: project) }
- let_it_be(:presenter) { described_class.new(packages) }
+
+ let(:presenter) { described_class.new(project.packages) }
describe '#count' do
subject { presenter.count }
@@ -28,6 +29,14 @@ RSpec.describe Packages::Nuget::PackagesMetadataPresenter do
end
end
+ it 'avoids N+1 database queries' do
+ control = ActiveRecord::QueryRecorder.new { described_class.new(project.packages).items }
+
+ create(:nuget_package, :with_metadatum, name: 'Dummy.Package', project: project)
+
+ expect { described_class.new(project.packages).items }.not_to exceed_query_limit(control)
+ end
+
it 'returns an array' do
items = subject
diff --git a/spec/presenters/project_presenter_spec.rb b/spec/presenters/project_presenter_spec.rb
index 4c2b87f34a1..e3221c18afc 100644
--- a/spec/presenters/project_presenter_spec.rb
+++ b/spec/presenters/project_presenter_spec.rb
@@ -286,6 +286,46 @@ RSpec.describe ProjectPresenter do
link: presenter.project_usage_quotas_path(project)
)
end
+
+ describe '#gitlab_ci_anchor_data' do
+ before do
+ project.update!(auto_devops_enabled: false)
+ end
+
+ context 'when user cannot collaborate' do
+ it 'returns no value' do
+ expect(presenter.gitlab_ci_anchor_data).to be(nil)
+ end
+ end
+
+ context 'when user can collaborate' do
+ before do
+ project.add_developer(user)
+ end
+
+ context 'and the CI/CD file is missing' do
+ it 'returns `Set up CI/CD` button' do
+ expect(presenter.gitlab_ci_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('Set up CI/CD'),
+ link: presenter.project_ci_pipeline_editor_path(project)
+ )
+ end
+ end
+
+ context 'and there is a CI/CD file' do
+ it 'returns `CI/CD configuration` button' do
+ allow(project.repository).to receive(:gitlab_ci_yml).and_return 'Default content'
+
+ expect(presenter.gitlab_ci_anchor_data).to have_attributes(
+ is_link: false,
+ label: a_string_including('CI/CD configuration'),
+ link: presenter.project_ci_pipeline_editor_path(project)
+ )
+ end
+ end
+ end
+ end
end
describe '#releases_anchor_data' do
diff --git a/spec/requests/abuse_reports_controller_spec.rb b/spec/requests/abuse_reports_controller_spec.rb
index 510855d95e0..49a80689c65 100644
--- a/spec/requests/abuse_reports_controller_spec.rb
+++ b/spec/requests/abuse_reports_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AbuseReportsController, feature_category: :users do
+RSpec.describe AbuseReportsController, feature_category: :insider_threat do
let(:reporter) { create(:user) }
let(:user) { create(:user) }
let(:attrs) do
@@ -16,6 +16,18 @@ RSpec.describe AbuseReportsController, feature_category: :users do
end
describe 'GET new' do
+ let(:ref_url) { 'http://example.com' }
+
+ it 'sets the instance variables' do
+ get new_abuse_report_path(user_id: user.id, ref_url: ref_url)
+
+ expect(assigns(:abuse_report)).to be_kind_of(AbuseReport)
+ expect(assigns(:abuse_report)).to have_attributes(
+ user_id: user.id,
+ reported_from_url: ref_url
+ )
+ end
+
context 'when the user has already been deleted' do
it 'redirects the reporter to root_path' do
user_id = user.id
@@ -40,6 +52,82 @@ RSpec.describe AbuseReportsController, feature_category: :users do
end
end
+ describe 'POST add_category', :aggregate_failures do
+ subject(:request) { post add_category_abuse_reports_path, params: request_params }
+
+ let(:abuse_category) { 'spam' }
+
+ context 'when user is reported for abuse' do
+ let(:ref_url) { 'http://example.com' }
+ let(:request_params) do
+ { user_id: user.id, abuse_report: { category: abuse_category, reported_from_url: ref_url } }
+ end
+
+ it 'renders new template' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to render_template(:new)
+ end
+
+ it 'sets the instance variables' do
+ subject
+
+ expect(assigns(:abuse_report)).to be_kind_of(AbuseReport)
+ expect(assigns(:abuse_report)).to have_attributes(
+ user_id: user.id,
+ category: abuse_category,
+ reported_from_url: ref_url
+ )
+ end
+ end
+
+ context 'when abuse_report is missing in params' do
+ let(:request_params) { { user_id: user.id } }
+
+ it 'raises an error' do
+ expect { subject }.to raise_error(ActionController::ParameterMissing)
+ end
+ end
+
+ context 'when user_id is missing in params' do
+ let(:request_params) { { abuse_report: { category: abuse_category } } }
+
+ it 'redirects the reporter to root_path' do
+ subject
+
+ expect(response).to redirect_to root_path
+ expect(flash[:alert]).to eq(_('Cannot create the abuse report. The user has been deleted.'))
+ end
+ end
+
+ context 'when the user has already been deleted' do
+ let(:request_params) { { user_id: user.id, abuse_report: { category: abuse_category } } }
+
+ it 'redirects the reporter to root_path' do
+ user.destroy!
+
+ subject
+
+ expect(response).to redirect_to root_path
+ expect(flash[:alert]).to eq(_('Cannot create the abuse report. The user has been deleted.'))
+ end
+ end
+
+ context 'when the user has already been blocked' do
+ let(:request_params) { { user_id: user.id, abuse_report: { category: abuse_category } } }
+
+ it 'redirects the reporter to the user\'s profile' do
+ user.block
+
+ subject
+
+ expect(response).to redirect_to user
+ expect(flash[:alert]).to eq(_('Cannot create the abuse report. This user has been blocked.'))
+ end
+ end
+ end
+
describe 'POST create' do
context 'with valid attributes' do
it 'saves the abuse report' do
diff --git a/spec/requests/api/appearance_spec.rb b/spec/requests/api/appearance_spec.rb
index 84d5b091b8d..5aba7e096a7 100644
--- a/spec/requests/api/appearance_spec.rb
+++ b/spec/requests/api/appearance_spec.rb
@@ -23,6 +23,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do
expect(json_response).to be_an Hash
expect(json_response['description']).to eq('')
expect(json_response['email_header_and_footer_enabled']).to be(false)
+ expect(json_response['pwa_icon']).to be_nil
expect(json_response['favicon']).to be_nil
expect(json_response['footer_message']).to eq('')
expect(json_response['header_logo']).to be_nil
@@ -33,7 +34,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do
expect(json_response['new_project_guidelines']).to eq('')
expect(json_response['profile_image_guidelines']).to eq('')
expect(json_response['title']).to eq('')
- expect(json_response['short_title']).to eq('')
+ expect(json_response['pwa_short_name']).to eq('')
end
end
end
@@ -52,7 +53,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do
it "allows updating the settings" do
put api("/application/appearance", admin), params: {
title: "GitLab Test Instance",
- short_title: "GitLab",
+ pwa_short_name: "GitLab PWA",
description: "gitlab-test.example.com",
new_project_guidelines: "Please read the FAQs for help.",
profile_image_guidelines: "Custom profile image guidelines"
@@ -62,6 +63,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do
expect(json_response).to be_an Hash
expect(json_response['description']).to eq('gitlab-test.example.com')
expect(json_response['email_header_and_footer_enabled']).to be(false)
+ expect(json_response['pwa_icon']).to be_nil
expect(json_response['favicon']).to be_nil
expect(json_response['footer_message']).to eq('')
expect(json_response['header_logo']).to be_nil
@@ -72,7 +74,7 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do
expect(json_response['new_project_guidelines']).to eq('Please read the FAQs for help.')
expect(json_response['profile_image_guidelines']).to eq('Custom profile image guidelines')
expect(json_response['title']).to eq('GitLab Test Instance')
- expect(json_response['short_title']).to eq('GitLab')
+ expect(json_response['pwa_short_name']).to eq('GitLab PWA')
end
end
@@ -118,12 +120,14 @@ RSpec.describe API::Appearance, 'Appearance', feature_category: :navigation do
put api("/application/appearance", admin), params: {
logo: fixture_file_upload("spec/fixtures/dk.png", "image/png"),
header_logo: fixture_file_upload("spec/fixtures/dk.png", "image/png"),
+ pwa_icon: fixture_file_upload("spec/fixtures/dk.png", "image/png"),
favicon: fixture_file_upload("spec/fixtures/dk.png", "image/png")
}
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['logo']).to eq("/uploads/-/system/appearance/logo/#{appearance.id}/dk.png")
expect(json_response['header_logo']).to eq("/uploads/-/system/appearance/header_logo/#{appearance.id}/dk.png")
+ expect(json_response['pwa_icon']).to eq("/uploads/-/system/appearance/pwa_icon/#{appearance.id}/dk.png")
expect(json_response['favicon']).to eq("/uploads/-/system/appearance/favicon/#{appearance.id}/dk.png")
end
diff --git a/spec/requests/api/boards_spec.rb b/spec/requests/api/boards_spec.rb
index 69804c2c4a4..5f2ff22d0db 100644
--- a/spec/requests/api/boards_spec.rb
+++ b/spec/requests/api/boards_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Boards, feature_category: :team_planning do
+RSpec.describe API::Boards, :with_license, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
let_it_be(:non_member) { create(:user) }
let_it_be(:guest) { create(:user) }
diff --git a/spec/requests/api/bulk_imports_spec.rb b/spec/requests/api/bulk_imports_spec.rb
index 13f079c69e7..4fb4fbe6d5c 100644
--- a/spec/requests/api/bulk_imports_spec.rb
+++ b/spec/requests/api/bulk_imports_spec.rb
@@ -11,9 +11,26 @@ RSpec.describe API::BulkImports, feature_category: :importers do
let_it_be(:entity_3) { create(:bulk_import_entity, bulk_import: import_2) }
let_it_be(:failure_3) { create(:bulk_import_failure, entity: entity_3) }
+ before do
+ stub_application_setting(bulk_import_enabled: true)
+ end
+
+ shared_examples 'disabled feature' do
+ it 'returns 404' do
+ stub_application_setting(bulk_import_enabled: false)
+
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
describe 'GET /bulk_imports' do
+ let(:request) { get api('/bulk_imports', user), params: params }
+ let(:params) { {} }
+
it 'returns a list of bulk imports authored by the user' do
- get api('/bulk_imports', user)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to contain_exactly(import_1.id, import_2.id)
@@ -21,26 +38,38 @@ RSpec.describe API::BulkImports, feature_category: :importers do
context 'sort parameter' do
it 'sorts by created_at descending by default' do
- get api('/bulk_imports', user)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to eq([import_2.id, import_1.id])
end
- it 'sorts by created_at descending when explicitly specified' do
- get api('/bulk_imports', user), params: { sort: 'desc' }
+ context 'when explicitly specified' do
+ context 'when descending' do
+ let(:params) { { sort: 'desc' } }
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.pluck('id')).to eq([import_2.id, import_1.id])
- end
+ it 'sorts by created_at descending' do
+ request
- it 'sorts by created_at ascending when explicitly specified' do
- get api('/bulk_imports', user), params: { sort: 'asc' }
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.pluck('id')).to match_array([import_2.id, import_1.id])
+ end
+ end
- expect(response).to have_gitlab_http_status(:ok)
- expect(json_response.pluck('id')).to eq([import_1.id, import_2.id])
+ context 'when ascending' do
+ let(:params) { { sort: 'asc' } }
+
+ it 'sorts by created_at ascending when explicitly specified' do
+ request
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.pluck('id')).to match_array([import_1.id, import_2.id])
+ end
+ end
end
end
+
+ include_examples 'disabled feature'
end
describe 'POST /bulk_imports' do
@@ -56,21 +85,10 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
end
- context 'when bulk_import feature flag is disabled' do
- before do
- stub_feature_flags(bulk_import: false)
- end
-
- it 'returns 404' do
- post api('/bulk_imports', user), params: {}
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
-
shared_examples 'starting a new migration' do
- it 'starts a new migration' do
- post api('/bulk_imports', user), params: {
+ let(:request) { post api('/bulk_imports', user), params: params }
+ let(:params) do
+ {
configuration: {
url: 'http://gitlab.example',
access_token: 'access_token'
@@ -83,11 +101,45 @@ RSpec.describe API::BulkImports, feature_category: :importers do
}.merge(destination_param)
]
}
+ end
+
+ it 'starts a new migration' do
+ request
expect(response).to have_gitlab_http_status(:created)
expect(json_response['status']).to eq('created')
end
+
+ describe 'migrate projects flag' do
+ context 'when true' do
+ it 'sets true' do
+ params[:entities][0][:migrate_projects] = true
+
+ request
+
+ expect(user.bulk_imports.last.entities.pluck(:migrate_projects)).to contain_exactly(true)
+ end
+ end
+
+ context 'when false' do
+ it 'sets false' do
+ params[:entities][0][:migrate_projects] = false
+
+ request
+
+ expect(user.bulk_imports.last.entities.pluck(:migrate_projects)).to contain_exactly(false)
+ end
+ end
+
+ context 'when unspecified' do
+ it 'sets true' do
+ request
+
+ expect(user.bulk_imports.last.entities.pluck(:migrate_projects)).to contain_exactly(true)
+ end
+ end
+ end
end
include_examples 'starting a new migration' do
@@ -99,8 +151,8 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
context 'when both destination_name & destination_slug are provided' do
- it 'returns a mutually exclusive error' do
- post api('/bulk_imports', user), params: {
+ let(:params) do
+ {
configuration: {
url: 'http://gitlab.example',
access_token: 'access_token'
@@ -115,6 +167,10 @@ RSpec.describe API::BulkImports, feature_category: :importers do
}
]
}
+ end
+
+ it 'returns a mutually exclusive error' do
+ request
expect(response).to have_gitlab_http_status(:bad_request)
@@ -123,8 +179,8 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
context 'when neither destination_name nor destination_slug is provided' do
- it 'returns at_least_one_of error' do
- post api('/bulk_imports', user), params: {
+ let(:params) do
+ {
configuration: {
url: 'http://gitlab.example',
access_token: 'access_token'
@@ -137,6 +193,10 @@ RSpec.describe API::BulkImports, feature_category: :importers do
}
]
}
+ end
+
+ it 'returns at_least_one_of error' do
+ request
expect(response).to have_gitlab_http_status(:bad_request)
@@ -144,9 +204,57 @@ RSpec.describe API::BulkImports, feature_category: :importers do
end
end
+ context 'when the source_full_path is invalid' do
+ it 'returns invalid error' do
+ params[:entities][0][:source_full_path] = 'http://example.com/full_path'
+
+ request
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq("entities[0][source_full_path] must be a relative path and not include protocol, sub-domain, " \
+ "or domain information. E.g. 'source/full/path' not 'https://example.com/source/full/path'")
+ end
+ end
+
+ context 'when the destination_namespace is invalid' do
+ it 'returns invalid error' do
+ params[:entities][0][:destination_namespace] = "?not a destination-namespace"
+
+ request
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq("entities[0][destination_namespace] cannot start with a dash or forward slash, " \
+ "or end with a period or forward slash. It can only contain alphanumeric " \
+ "characters, periods, underscores, forward slashes and dashes. " \
+ "E.g. 'destination_namespace' or 'destination/namespace'")
+ end
+ end
+
+ context 'when the destination_namespace is an empty string' do
+ it 'accepts the param and starts a new migration' do
+ params[:entities][0][:destination_namespace] = ''
+
+ request
+ expect(response).to have_gitlab_http_status(:created)
+
+ expect(json_response['status']).to eq('created')
+ end
+ end
+
+ context 'when the destination_slug is invalid' do
+ it 'returns invalid error' do
+ params[:entities][0][:destination_slug] = 'des?tin?atoi-slugg'
+
+ request
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to include("entities[0][destination_slug] cannot start with a dash " \
+ "or forward slash, or end with a period or forward slash. " \
+ "It can only contain alphanumeric characters, periods, underscores, and dashes. " \
+ "E.g. 'destination_namespace' not 'destination/namespace'")
+ end
+ end
+
context 'when provided url is blocked' do
- it 'returns blocked url error' do
- post api('/bulk_imports', user), params: {
+ let(:params) do
+ {
configuration: {
url: 'url',
access_token: 'access_token'
@@ -158,49 +266,71 @@ RSpec.describe API::BulkImports, feature_category: :importers do
destination_namespace: 'destination_namespace'
]
}
+ end
+
+ it 'returns blocked url error' do
+ request
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['message']).to eq('Validation failed: Url is blocked: Only allowed schemes are http, https')
end
end
+
+ include_examples 'disabled feature'
end
describe 'GET /bulk_imports/entities' do
+ let(:request) { get api('/bulk_imports/entities', user) }
+
it 'returns a list of all import entities authored by the user' do
- get api('/bulk_imports/entities', user)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to contain_exactly(entity_1.id, entity_2.id, entity_3.id)
end
+
+ include_examples 'disabled feature'
end
describe 'GET /bulk_imports/:id' do
+ let(:request) { get api("/bulk_imports/#{import_1.id}", user) }
+
it 'returns specified bulk import' do
- get api("/bulk_imports/#{import_1.id}", user)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(import_1.id)
end
+
+ include_examples 'disabled feature'
end
describe 'GET /bulk_imports/:id/entities' do
+ let(:request) { get api("/bulk_imports/#{import_2.id}/entities", user) }
+
it 'returns specified bulk import entities with failures' do
- get api("/bulk_imports/#{import_2.id}/entities", user)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response.pluck('id')).to contain_exactly(entity_3.id)
expect(json_response.first['failures'].first['exception_class']).to eq(failure_3.exception_class)
end
+
+ include_examples 'disabled feature'
end
describe 'GET /bulk_imports/:id/entities/:entity_id' do
+ let(:request) { get api("/bulk_imports/#{import_1.id}/entities/#{entity_2.id}", user) }
+
it 'returns specified bulk import entity' do
- get api("/bulk_imports/#{import_1.id}/entities/#{entity_2.id}", user)
+ request
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['id']).to eq(entity_2.id)
end
+
+ include_examples 'disabled feature'
end
context 'when user is unauthenticated' do
diff --git a/spec/requests/api/ci/jobs_spec.rb b/spec/requests/api/ci/jobs_spec.rb
index 4e348ae64b6..875bfc5b94f 100644
--- a/spec/requests/api/ci/jobs_spec.rb
+++ b/spec/requests/api/ci/jobs_spec.rb
@@ -487,6 +487,76 @@ RSpec.describe API::Ci::Jobs, feature_category: :continuous_integration do
end
end
+ describe 'GET /projects/:id/jobs offset pagination' do
+ before do
+ running_job
+ end
+
+ it 'returns one record for the first page' do
+ get api("/projects/#{project.id}/jobs", api_user), params: { per_page: 1 }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(running_job.id)
+ end
+
+ it 'returns second record when passed in offset and per_page params' do
+ get api("/projects/#{project.id}/jobs", api_user), params: { page: 2, per_page: 1 }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(job.id)
+ end
+ end
+
+ describe 'GET /projects/:id/jobs keyset pagination' do
+ before do
+ running_job
+ end
+
+ it 'returns first page with cursor to next page' do
+ get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', per_page: 1 }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(running_job.id)
+ expect(response.headers["Link"]).to include("cursor")
+ next_cursor = response.headers["Link"].match("(?<cursor_data>cursor=.*?)&")["cursor_data"]
+
+ get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', per_page: 1 }.merge(Rack::Utils.parse_query(next_cursor))
+
+ expect(response).to have_gitlab_http_status(:ok)
+ json_response = Gitlab::Json.parse(response.body)
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(job.id)
+ expect(response.headers).not_to include("Link")
+ end
+
+ it 'respects scope filters' do
+ get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', scope: ['success'] }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(job.id)
+ expect(response.headers).not_to include("Link")
+ end
+
+ context 'with :jobs_api_keyset_pagination disabled' do
+ before do
+ stub_feature_flags(jobs_api_keyset_pagination: false)
+ end
+
+ it 'defaults to offset pagination' do
+ get api("/projects/#{project.id}/jobs", api_user), params: { pagination: 'keyset', per_page: 1 }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response.size).to eq(1)
+ expect(json_response.first['id']).to eq(running_job.id)
+ expect(response.headers["Link"]).not_to include("cursor")
+ end
+ end
+ end
+
describe 'GET /projects/:id/jobs rate limited' do
let(:query) { {} }
diff --git a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
index 1c119079c50..3d3d699542b 100644
--- a/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
+++ b/spec/requests/api/ci/runner/jobs_artifacts_spec.rb
@@ -575,6 +575,45 @@ RSpec.describe API::Ci::Runner, :clean_gitlab_redis_shared_state, feature_catego
end
end
+ context 'when access level is private' do
+ subject(:request) { upload_artifacts(file_upload, headers_with_token, params) }
+
+ let(:params) { { artifact_type: :archive, artifact_format: :zip, accessibility: 'private' } }
+
+ it 'sets job artifact access level to private' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).to be_private_accessibility
+ end
+ end
+
+ context 'when access level is public' do
+ subject(:request) { upload_artifacts(file_upload, headers_with_token, params) }
+
+ let(:params) { { artifact_type: :archive, artifact_format: :zip, accessibility: 'public' } }
+
+ it 'sets job artifact access level to public' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).to be_public_accessibility
+ end
+ end
+
+ context 'when access level is unknown' do
+ subject(:request) { upload_artifacts(file_upload, headers_with_token, params) }
+
+ let(:params) { { artifact_type: :archive, artifact_format: :zip } }
+
+ it 'sets job artifact access level to public' do
+ subject
+
+ expect(response).to have_gitlab_http_status(:created)
+ expect(job.reload.job_artifacts_archive).to be_public_accessibility
+ end
+ end
+
context 'when artifact_type is archive' do
context 'when artifact_format is zip' do
subject(:request) { upload_artifacts(file_upload, headers_with_token, params) }
diff --git a/spec/requests/api/commits_spec.rb b/spec/requests/api/commits_spec.rb
index 5874d764b00..3932abd20cc 100644
--- a/spec/requests/api/commits_spec.rb
+++ b/spec/requests/api/commits_spec.rb
@@ -2337,18 +2337,6 @@ RSpec.describe API::Commits, feature_category: :source_code_management do
expect(json_response['commit_source']).to eq('gitaly')
end
end
-
- context 'when feature flag is disabled' do
- before do
- stub_feature_flags(ssh_commit_signatures: false)
- end
-
- it 'returns 404' do
- get api(route, current_user)
-
- expect(response).to have_gitlab_http_status(:not_found)
- end
- end
end
end
end
diff --git a/spec/requests/api/debian_project_packages_spec.rb b/spec/requests/api/debian_project_packages_spec.rb
index c27e165b39b..5258d26be17 100644
--- a/spec/requests/api/debian_project_packages_spec.rb
+++ b/spec/requests/api/debian_project_packages_spec.rb
@@ -5,7 +5,17 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d
include HttpBasicAuthHelpers
include WorkhorseHelpers
- include_context 'Debian repository shared context', :project, true do
+ include_context 'Debian repository shared context', :project, false do
+ shared_examples 'accept GET request on private project with access to package registry for everyone' do
+ include_context 'Debian repository access', :private, :anonymous, :basic do
+ before do
+ container.project_feature.reload.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it_behaves_like 'Debian packages GET request', :success
+ end
+ end
+
context 'with invalid parameter' do
let(:url) { "/projects/1/packages/debian/dists/with+space/InRelease" }
@@ -16,54 +26,63 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/Release.gpg" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^-----BEGIN PGP SIGNATURE-----/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/packages/debian/dists/*distribution/Release' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/Release" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^Codename: fixture-distribution\n$/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/packages/debian/dists/*distribution/InRelease' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/InRelease" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^-----BEGIN PGP SIGNED MESSAGE-----/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/packages/debian/dists/*distribution/:component/binary-:architecture/Packages' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/binary-#{architecture.name}/Packages" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /Description: This is an incomplete Packages file/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/packages/debian/dists/*distribution/:component/binary-:architecture/by-hash/SHA256/:file_sha256' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/binary-#{architecture.name}/by-hash/SHA256/#{component_file_older_sha256.file_sha256}" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^Other SHA256$/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
- describe 'GET projects/:id/packages/debian/dists/*distribution/source/Sources' do
+ describe 'GET projects/:id/packages/debian/dists/*distribution/:component/source/Sources' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/source/Sources" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /Description: This is an incomplete Sources file/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
- describe 'GET projects/:id/packages/debian/dists/*distribution/source/by-hash/SHA256/:file_sha256' do
+ describe 'GET projects/:id/packages/debian/dists/*distribution/:component/source/by-hash/SHA256/:file_sha256' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/source/by-hash/SHA256/#{component_file_sources_older_sha256.file_sha256}" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^Other SHA256$/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/packages/debian/dists/*distribution/:component/debian-installer/binary-:architecture/Packages' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/debian-installer/binary-#{architecture.name}/Packages" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /Description: This is an incomplete D-I Packages file/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/packages/debian/dists/*distribution/:component/debian-installer/binary-:architecture/by-hash/SHA256/:file_sha256' do
let(:url) { "/projects/#{container.id}/packages/debian/dists/#{distribution.codename}/#{component.name}/debian-installer/binary-#{architecture.name}/by-hash/SHA256/#{component_file_di_older_sha256.file_sha256}" }
it_behaves_like 'Debian packages read endpoint', 'GET', :success, /^Other SHA256$/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/packages/debian/pool/:codename/:letter/:package_name/:package_version/:file_name' do
@@ -90,6 +109,10 @@ RSpec.describe API::DebianProjectPackages, feature_category: :package_registry d
end
end
end
+
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone' do
+ let(:file_name) { 'sample_1.2.3~alpha2.dsc' }
+ end
end
describe 'PUT projects/:id/packages/debian/:file_name' do
diff --git a/spec/requests/api/environments_spec.rb b/spec/requests/api/environments_spec.rb
index d06e70a1a02..6164555ad19 100644
--- a/spec/requests/api/environments_spec.rb
+++ b/spec/requests/api/environments_spec.rb
@@ -4,12 +4,14 @@ require 'spec_helper'
RSpec.describe API::Environments, feature_category: :continuous_delivery do
let_it_be(:user) { create(:user) }
+ let_it_be(:developer) { create(:user) }
let_it_be(:non_member) { create(:user) }
let_it_be(:project) { create(:project, :private, :repository, namespace: user.namespace) }
let_it_be_with_reload(:environment) { create(:environment, project: project) }
before do
project.add_maintainer(user)
+ project.add_developer(developer)
end
describe 'GET /projects/:id/environments', :aggregate_failures do
@@ -69,6 +71,34 @@ RSpec.describe API::Environments, feature_category: :continuous_delivery do
expect(json_response.size).to eq(0)
end
+ context "when params[:search] is less than #{described_class::MIN_SEARCH_LENGTH} characters" do
+ before do
+ stub_feature_flags(environment_search_api_min_chars: false)
+ end
+
+ it 'returns a normal response' do
+ get api("/projects/#{project.id}/environments?search=ab", user)
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(response).to include_pagination_headers
+ expect(json_response).to be_an Array
+ expect(json_response.size).to eq(0)
+ end
+
+ context 'and environment_search_api_min_chars flag is enabled for the project' do
+ before do
+ stub_feature_flags(environment_search_api_min_chars: project)
+ end
+
+ it 'returns with status 400' do
+ get api("/projects/#{project.id}/environments?search=ab", user)
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to include("Search query is less than #{described_class::MIN_SEARCH_LENGTH} characters")
+ end
+ end
+ end
+
it 'returns environment by valid state' do
get api("/projects/#{project.id}/environments?states=available", user)
@@ -154,6 +184,50 @@ RSpec.describe API::Environments, feature_category: :continuous_delivery do
end
end
+ describe 'POST /projects/:id/environments/stop_stale' do
+ context 'as a maintainer' do
+ it 'returns a 200' do
+ post api("/projects/#{project.id}/environments/stop_stale", user), params: { before: 1.week.ago.to_date.to_s }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+
+ it 'returns a 400 for bad input date' do
+ post api("/projects/#{project.id}/environments/stop_stale", user), params: { before: 1.day.ago.to_date.to_s }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq('400 Bad request - Invalid Date')
+ end
+
+ it 'returns a 400 for service error' do
+ expect_next_instance_of(::Environments::StopStaleService) do |service|
+ expect(service).to receive(:execute).and_return(ServiceResponse.error(message: 'Test Error'))
+ end
+
+ post api("/projects/#{project.id}/environments/stop_stale", user), params: { before: 1.week.ago.to_date.to_s }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']).to eq('Test Error')
+ end
+ end
+
+ context 'a non member' do
+ it 'rejects the request' do
+ post api("/projects/#{project.id}/environments/stop_stale", non_member), params: { before: 1.week.ago.to_date.to_s }
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+
+ context 'a developer' do
+ it 'rejects the request' do
+ post api("/projects/#{project.id}/environments/stop_stale", developer), params: { before: 1.week.ago.to_date.to_s }
+
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+ end
+
describe 'PUT /projects/:id/environments/:environment_id' do
it 'returns a 200 if name and external_url are changed' do
url = 'https://mepmep.whatever.ninja'
diff --git a/spec/requests/api/files_spec.rb b/spec/requests/api/files_spec.rb
index 9cee3c06bb1..f4066c54c47 100644
--- a/spec/requests/api/files_spec.rb
+++ b/spec/requests/api/files_spec.rb
@@ -6,6 +6,24 @@ RSpec.describe API::Files, feature_category: :source_code_management do
include RepoHelpers
let_it_be(:group) { create(:group, :public) }
+ let(:helper) do
+ fake_class = Class.new do
+ include ::API::Helpers::HeadersHelpers
+
+ attr_reader :headers
+
+ def initialize
+ @headers = {}
+ end
+
+ def header(key, value)
+ @headers[key] = value
+ end
+ end
+
+ fake_class.new
+ end
+
let_it_be_with_refind(:user) { create(:user) }
let_it_be(:inherited_guest) { create(:user) }
let_it_be(:inherited_reporter) { create(:user) }
@@ -37,25 +55,9 @@ RSpec.describe API::Files, feature_category: :source_code_management do
}
end
- let(:author_email) { 'user@example.org' }
- let(:author_name) { 'John Doe' }
-
- let(:helper) do
- fake_class = Class.new do
- include ::API::Helpers::HeadersHelpers
-
- attr_reader :headers
-
- def initialize
- @headers = {}
- end
-
- def header(key, value)
- @headers[key] = value
- end
- end
-
- fake_class.new
+ shared_context 'with author parameters' do
+ let(:author_email) { 'user@example.org' }
+ let(:author_name) { 'John Doe' }
end
before_all do
@@ -702,6 +704,80 @@ RSpec.describe API::Files, feature_category: :source_code_management do
end
end
+ describe 'HEAD /projects/:id/repository/files/:file_path/raw' do
+ let(:request) { head api(route(file_path) + '/raw', current_user), params: params }
+
+ describe 'response headers' do
+ subject { response.headers }
+
+ context 'and user is a developer' do
+ let(:current_user) { user }
+
+ it 'responds with blob data' do
+ request
+ headers = response.headers
+ expect(headers['X-Gitlab-File-Name']).to eq(file_name)
+ expect(headers['X-Gitlab-File-Path']).to eq('files/ruby/popen.rb')
+ expect(headers['X-Gitlab-Content-Sha256']).to eq('c440cd09bae50c4632cc58638ad33c6aa375b6109d811e76a9cc3a613c1e8887')
+ expect(headers['X-Gitlab-Ref']).to eq('master')
+ expect(headers['X-Gitlab-Blob-Id']).to eq('7e3e39ebb9b2bf433b4ad17313770fbe4051649c')
+ expect(headers['X-Gitlab-Commit-Id']).to eq(project.repository.commit.id)
+ expect(headers['X-Gitlab-Last-Commit-Id']).to eq('570e7b2abdd848b95f2f578043fc23bd6f6fd24d')
+ end
+
+ context 'when lfs parameter is true and the project has lfs enabled' do
+ before do
+ allow(Gitlab.config.lfs).to receive(:enabled).and_return(true)
+ project.update_attribute(:lfs_enabled, true)
+ end
+
+ let(:request) { head api(route('files%2Flfs%2Flfs_object.iso') + '/raw', current_user), params: params.merge(lfs: true) }
+
+ context 'and the file has an lfs object' do
+ let_it_be(:lfs_object) { create(:lfs_object, :with_file, oid: '91eff75a492a3ed0dfcb544d7f31326bc4014c8551849c192fd1e48d4dd2c897') }
+
+ it 'responds with 404' do
+ request
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+
+ context 'and the project has access to the lfs object' do
+ before do
+ project.lfs_objects << lfs_object
+ end
+
+ context 'and lfs uses AWS' do
+ before do
+ stub_lfs_object_storage(config: Gitlab.config.lfs.object_store.merge(connection: {
+ provider: 'AWS',
+ aws_access_key_id: '',
+ aws_secret_access_key: ''
+ }))
+ lfs_object.file.migrate!(LfsObjectUploader::Store::REMOTE)
+ end
+
+ it 'redirects to the lfs object file with a signed url' do
+ request
+
+ expect(response).to have_gitlab_http_status(:found)
+ expect(response.location).to include(lfs_object.reload.file.path)
+ expect(response.location).to include('X-Amz-SignedHeaders')
+ end
+ end
+ end
+ end
+ end
+ end
+
+ context 'and user is a guest' do
+ it_behaves_like '403 response' do
+ let(:request) { head api(route(file_path), guest), params: params }
+ end
+ end
+ end
+ end
+
describe 'GET /projects/:id/repository/files/:file_path/raw' do
shared_examples_for 'repository raw files' do
it 'returns 400 when file path is invalid' do
@@ -1006,6 +1082,8 @@ RSpec.describe API::Files, feature_category: :source_code_management do
end
context 'when specifying an author' do
+ include_context 'with author parameters'
+
it 'creates a new file with the specified author' do
params.merge!(author_email: author_email, author_name: author_name)
post api(route('new_file_with_author%2Etxt'), user), params: params
@@ -1163,6 +1241,8 @@ RSpec.describe API::Files, feature_category: :source_code_management do
end
context 'when specifying an author' do
+ include_context 'with author parameters'
+
it 'updates a file with the specified author' do
params.merge!(author_email: author_email, author_name: author_name, content: 'New content')
@@ -1236,6 +1316,8 @@ RSpec.describe API::Files, feature_category: :source_code_management do
end
context 'when specifying an author' do
+ include_context 'with author parameters'
+
before do
params.merge!(author_email: author_email, author_name: author_name)
end
diff --git a/spec/requests/api/graphql/ci/config_spec.rb b/spec/requests/api/graphql/ci/config_spec.rb
index 8154f132430..5f43a0806f3 100644
--- a/spec/requests/api/graphql/ci/config_spec.rb
+++ b/spec/requests/api/graphql/ci/config_spec.rb
@@ -5,6 +5,7 @@ require 'spec_helper'
RSpec.describe 'Query.ciConfig', feature_category: :continuous_integration do
include GraphqlHelpers
include StubRequests
+ include RepoHelpers
subject(:post_graphql_query) { post_graphql(query, current_user: user) }
@@ -245,17 +246,22 @@ RSpec.describe 'Query.ciConfig', feature_category: :continuous_integration do
)
end
- before do
- allow_next_instance_of(Repository) do |repository|
- allow(repository).to receive(:blob_data_at).with(an_instance_of(String), 'other_file.yml') do
- YAML.dump(
- build: {
- script: 'build'
- }
- )
- end
+ let(:project_files) do
+ {
+ 'other_file.yml' => <<~YAML
+ build:
+ script: build
+ YAML
+ }
+ end
+
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ example.run
end
+ end
+ before do
post_graphql_query
end
@@ -370,25 +376,33 @@ RSpec.describe 'Query.ciConfig', feature_category: :continuous_integration do
)
end
- before do
- allow_next_instance_of(Repository) do |repository|
- allow(repository).to receive(:blob_data_at).with(an_instance_of(String), 'other_file.yml') do
- YAML.dump(
- build: {
- script: 'build'
- }
- )
- end
+ let(:project_files) do
+ {
+ 'other_file.yml' => <<~YAML
+ build:
+ script: build
+ YAML
+ }
+ end
- allow(repository).to receive(:blob_data_at).with(an_instance_of(String), 'other_project_file.yml') do
- YAML.dump(
- other_project_test: {
- script: 'other_project_test'
- }
- )
+ let(:other_project_files) do
+ {
+ 'other_project_file.yml' => <<~YAML
+ other_project_test:
+ script: other_project_test
+ YAML
+ }
+ end
+
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ create_and_delete_files(other_project, other_project_files) do
+ example.run
end
end
+ end
+ before do
stub_full_request('https://gitlab.com/gitlab-org/gitlab/raw/1234/.hello.yml').to_return(body: remote_file_content)
post_graphql_query
diff --git a/spec/requests/api/graphql/ci/jobs_spec.rb b/spec/requests/api/graphql/ci/jobs_spec.rb
index 7a1dc614dcf..131cdb77107 100644
--- a/spec/requests/api/graphql/ci/jobs_spec.rb
+++ b/spec/requests/api/graphql/ci/jobs_spec.rb
@@ -88,10 +88,10 @@ RSpec.describe 'Query.project.pipeline', feature_category: :continuous_integrati
build_stage = create(:ci_stage, position: 2, name: 'build', project: project, pipeline: pipeline)
test_stage = create(:ci_stage, position: 3, name: 'test', project: project, pipeline: pipeline)
- create(:ci_build, pipeline: pipeline, name: 'docker 1 2', scheduling_type: :stage, stage: build_stage, stage_idx: build_stage.position)
- create(:ci_build, pipeline: pipeline, name: 'docker 2 2', stage: build_stage, stage_idx: build_stage.position, scheduling_type: :dag)
- create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', scheduling_type: :stage, stage: test_stage, stage_idx: test_stage.position)
- test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', scheduling_type: :dag, stage: test_stage, stage_idx: test_stage.position)
+ create(:ci_build, pipeline: pipeline, name: 'docker 1 2', scheduling_type: :stage, ci_stage: build_stage, stage_idx: build_stage.position)
+ create(:ci_build, pipeline: pipeline, name: 'docker 2 2', ci_stage: build_stage, stage_idx: build_stage.position, scheduling_type: :dag)
+ create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', scheduling_type: :stage, ci_stage: test_stage, stage_idx: test_stage.position)
+ test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', scheduling_type: :dag, ci_stage: test_stage, stage_idx: test_stage.position)
create(:ci_build_need, build: test_job, name: 'my test job')
end
diff --git a/spec/requests/api/graphql/group/merge_requests_spec.rb b/spec/requests/api/graphql/group/merge_requests_spec.rb
index 6976685ecc0..adaee3031a9 100644
--- a/spec/requests/api/graphql/group/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/group/merge_requests_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# Based on ee/spec/requests/api/epics_spec.rb
# Should follow closely in order to ensure all situations are covered
-RSpec.describe 'Query.group.mergeRequests', feature_category: :code_review do
+RSpec.describe 'Query.group.mergeRequests', feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:group) { create(:group) }
diff --git a/spec/requests/api/graphql/group_query_spec.rb b/spec/requests/api/graphql/group_query_spec.rb
index bc288c0a98b..ce5816999a6 100644
--- a/spec/requests/api/graphql/group_query_spec.rb
+++ b/spec/requests/api/graphql/group_query_spec.rb
@@ -4,7 +4,7 @@ require 'spec_helper'
# Based on spec/requests/api/groups_spec.rb
# Should follow closely in order to ensure all situations are covered
-RSpec.describe 'getting group information', feature_category: :subgroups do
+RSpec.describe 'getting group information', :with_license, feature_category: :subgroups do
include GraphqlHelpers
include UploadHelpers
diff --git a/spec/requests/api/graphql/issues_spec.rb b/spec/requests/api/graphql/issues_spec.rb
index ba6f8ec2cab..e67c92d6c33 100644
--- a/spec/requests/api/graphql/issues_spec.rb
+++ b/spec/requests/api/graphql/issues_spec.rb
@@ -2,11 +2,13 @@
require 'spec_helper'
+# rubocop:disable RSpec/MultipleMemoizedHelpers
RSpec.describe 'getting an issue list at root level', feature_category: :team_planning do
include GraphqlHelpers
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
+ let_it_be(:current_user) { developer }
let_it_be(:group1) { create(:group).tap { |group| group.add_developer(developer) } }
let_it_be(:group2) { create(:group).tap { |group| group.add_developer(developer) } }
let_it_be(:project_a) { create(:project, :repository, :public, group: group1) }
@@ -82,9 +84,11 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
end
let_it_be(:issues, reload: true) { [issue_a, issue_b, issue_c, issue_d, issue_e] }
+ # we need to always provide at least one filter to the query so it doesn't fail
+ let_it_be(:base_params) { { iids: issues.map { |issue| issue.iid.to_s } } }
let(:issue_filter_params) { {} }
- let(:current_user) { developer }
+ let(:all_query_params) { base_params.merge(**issue_filter_params) }
let(:fields) do
<<~QUERY
nodes { id }
@@ -95,6 +99,16 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
group2.add_reporter(reporter)
end
+ shared_examples 'query that requires at least one filter' do
+ it 'requires at least one filter to be provided to the query' do
+ post_graphql(query, current_user: developer)
+
+ expect(graphql_errors).to contain_exactly(
+ hash_including('message' => _('You must provide at least one filter argument for this query'))
+ )
+ end
+ end
+
context 'when the root_level_issues_query feature flag is disabled' do
before do
stub_feature_flags(root_level_issues_query: false)
@@ -107,20 +121,31 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
end
end
+ context 'when no filters are provided' do
+ let(:all_query_params) { {} }
+
+ it_behaves_like 'query that requires at least one filter'
+ end
+
+ context 'when only non filter arguments are provided' do
+ let(:all_query_params) { { sort: :SEVERITY_ASC } }
+
+ it_behaves_like 'query that requires at least one filter'
+ end
+
# All new specs should be added to the shared example if the change also
# affects the `issues` query at the root level of the API.
# Shared example also used in spec/requests/api/graphql/project/issues_spec.rb
it_behaves_like 'graphql issue list request spec' do
let_it_be(:external_user) { create(:user) }
+ let_it_be(:another_user) { reporter }
let(:public_projects) { [project_a, project_c] }
- let(:another_user) { reporter }
let(:issue_nodes_path) { %w[issues nodes] }
# filters
let(:expected_negated_assignee_issues) { [issue_b, issue_c, issue_d, issue_e] }
- let(:expected_unioned_assignee_issues) { [issue_a, issue_c] }
let(:voted_issues) { [issue_a, issue_c] }
let(:no_award_issues) { [issue_b, issue_d, issue_e] }
let(:locked_discussion_issues) { [issue_b, issue_d] }
@@ -148,9 +173,6 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
let(:same_project_issue2) { issue_e }
before_all do
- issue_a.assignee_ids = developer.id
- issue_c.assignee_ids = reporter.id
-
create(:award_emoji, :upvote, user: developer, awardable: issue_a)
create(:award_emoji, :upvote, user: developer, awardable: issue_c)
end
@@ -158,7 +180,7 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
def pagination_query(params)
graphql_query_for(
:issues,
- params,
+ base_params.merge(**params.to_h),
"#{page_info} nodes { id }"
)
end
@@ -177,6 +199,32 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
end
end
+ context 'with rate limiting' do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit, graphql: true do
+ let_it_be(:current_user) { developer }
+
+ let(:error_message) do
+ 'This endpoint has been requested with the search argument too many times. Try again later.'
+ end
+
+ def request
+ post_graphql(query({ search: 'test' }), current_user: developer)
+ end
+ end
+
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit_unauthenticated, graphql: true do
+ let_it_be(:current_user) { nil }
+
+ let(:error_message) do
+ 'This endpoint has been requested with the search argument too many times. Try again later.'
+ end
+
+ def request
+ post_graphql(query({ search: 'test' }))
+ end
+ end
+ end
+
def execute_query
post_query
end
@@ -185,7 +233,7 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
post_graphql(query, current_user: request_user)
end
- def query(params = issue_filter_params)
+ def query(params = all_query_params)
graphql_query_for(
:issues,
params,
@@ -193,3 +241,4 @@ RSpec.describe 'getting an issue list at root level', feature_category: :team_pl
)
end
end
+# rubocop:enable RSpec/MultipleMemoizedHelpers
diff --git a/spec/requests/api/graphql/merge_request/merge_request_spec.rb b/spec/requests/api/graphql/merge_request/merge_request_spec.rb
index 213697bacc1..02ea7bac920 100644
--- a/spec/requests/api/graphql/merge_request/merge_request_spec.rb
+++ b/spec/requests/api/graphql/merge_request/merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Query.merge_request(id)', feature_category: :code_review do
+RSpec.describe 'Query.merge_request(id)', feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:project) { create(:project, :empty_repo) }
diff --git a/spec/requests/api/graphql/mutations/achievements/create_spec.rb b/spec/requests/api/graphql/mutations/achievements/create_spec.rb
new file mode 100644
index 00000000000..1713f050540
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/achievements/create_spec.rb
@@ -0,0 +1,78 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Mutations::Achievements::Create, feature_category: :users do
+ include GraphqlHelpers
+ include WorkhorseHelpers
+
+ let_it_be(:developer) { create(:user) }
+ let_it_be(:maintainer) { create(:user) }
+ let_it_be(:group) { create(:group) }
+
+ let(:mutation) { graphql_mutation(:achievements_create, params) }
+ let(:name) { 'Name' }
+ let(:description) { 'Description' }
+ let(:revokeable) { false }
+ let(:avatar) { fixture_file_upload("spec/fixtures/dk.png") }
+ let(:params) do
+ {
+ namespace_id: group.to_global_id,
+ name: name,
+ avatar: avatar,
+ description: description,
+ revokeable: revokeable
+ }
+ end
+
+ subject { post_graphql_mutation_with_uploads(mutation, current_user: current_user) }
+
+ def mutation_response
+ graphql_mutation_response(:achievements_create)
+ end
+
+ before_all do
+ group.add_developer(developer)
+ group.add_maintainer(maintainer)
+ end
+
+ context 'when the user does not have permission' do
+ let(:current_user) { developer }
+ let(:avatar) {}
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+
+ it 'does not create an achievement' do
+ expect { subject }.not_to change { Achievements::Achievement.count }
+ end
+ end
+
+ context 'when the user has permission' do
+ let(:current_user) { maintainer }
+
+ context 'when the params are invalid' do
+ let(:name) {}
+
+ it 'returns the validation error' do
+ subject
+
+ expect(graphql_errors.to_s).to include('provided invalid value for name (Expected value to not be null)')
+ end
+ end
+
+ it 'creates an achievement' do
+ expect { subject }.to change { Achievements::Achievement.count }.by(1)
+ end
+
+ it 'returns the new achievement' do
+ subject
+
+ expect(graphql_data_at(:achievements_create, :achievement)).to match a_hash_including(
+ 'name' => name,
+ 'namespace' => a_hash_including('id' => group.to_global_id.to_s),
+ 'description' => description,
+ 'revokeable' => revokeable
+ )
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/ci/job_play_spec.rb b/spec/requests/api/graphql/mutations/ci/job_play_spec.rb
index 014a5e0f1c7..9ba80e51dee 100644
--- a/spec/requests/api/graphql/mutations/ci/job_play_spec.rb
+++ b/spec/requests/api/graphql/mutations/ci/job_play_spec.rb
@@ -8,17 +8,25 @@ RSpec.describe 'JobPlay', feature_category: :continuous_integration do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project) }
let_it_be(:pipeline) { create(:ci_pipeline, project: project, user: user) }
- let_it_be(:job) { create(:ci_build, pipeline: pipeline, name: 'build') }
+ let_it_be(:job) { create(:ci_build, :playable, pipeline: pipeline, name: 'build') }
- let(:mutation) do
- variables = {
+ let(:variables) do
+ {
id: job.to_global_id.to_s
}
+ end
+
+ let(:mutation) do
graphql_mutation(:job_play, variables,
<<-QL
errors
job {
id
+ manualVariables {
+ nodes {
+ key
+ }
+ }
}
QL
)
@@ -43,4 +51,29 @@ RSpec.describe 'JobPlay', feature_category: :continuous_integration do
expect(response).to have_gitlab_http_status(:success)
expect(mutation_response['job']['id']).to eq(job_id)
end
+
+ context 'when given variables' do
+ let(:variables) do
+ {
+ id: job.to_global_id.to_s,
+ variables: [
+ { key: 'MANUAL_VAR_1', value: 'test var' },
+ { key: 'MANUAL_VAR_2', value: 'test var 2' }
+ ]
+ }
+ end
+
+ it 'provides those variables to the job', :aggregated_errors do
+ expect_next_instance_of(Ci::PlayBuildService) do |instance|
+ expect(instance).to receive(:execute).with(an_instance_of(Ci::Build), variables[:variables]).and_call_original
+ end
+
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['job']['manualVariables']['nodes'].pluck('key')).to contain_exactly(
+ 'MANUAL_VAR_1', 'MANUAL_VAR_2'
+ )
+ end
+ end
end
diff --git a/spec/requests/api/graphql/mutations/groups/update_spec.rb b/spec/requests/api/graphql/mutations/groups/update_spec.rb
index ea3d42a4463..a9acc593229 100644
--- a/spec/requests/api/graphql/mutations/groups/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/groups/update_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe 'GroupUpdate', feature_category: :subgroups do
let(:variables) do
{
full_path: group.full_path,
- shared_runners_setting: 'DISABLED_WITH_OVERRIDE'
+ shared_runners_setting: 'DISABLED_AND_OVERRIDABLE'
}
end
@@ -52,6 +52,23 @@ RSpec.describe 'GroupUpdate', feature_category: :subgroups do
expect(group.reload.shared_runners_setting).to eq(variables[:shared_runners_setting].downcase)
end
+ context 'when using DISABLED_WITH_OVERRIDE (deprecated)' do
+ let(:variables) do
+ {
+ full_path: group.full_path,
+ shared_runners_setting: 'DISABLED_WITH_OVERRIDE'
+ }
+ end
+
+ it 'updates shared runners settings with disabled_and_overridable' do
+ post_graphql_mutation(mutation, current_user: user)
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(graphql_errors).to be_nil
+ expect(group.reload.shared_runners_setting).to eq('disabled_and_overridable')
+ end
+ end
+
context 'when bad arguments are provided' do
let(:variables) { { full_path: '', shared_runners_setting: 'INVALID' } }
diff --git a/spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb b/spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb
new file mode 100644
index 00000000000..ad70129a7bc
--- /dev/null
+++ b/spec/requests/api/graphql/mutations/members/groups/bulk_update_spec.rb
@@ -0,0 +1,130 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'GroupMemberBulkUpdate', feature_category: :subgroups do
+ include GraphqlHelpers
+
+ let_it_be(:current_user) { create(:user) }
+ let_it_be(:user1) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:group) { create(:group) }
+ let_it_be(:group_member1) { create(:group_member, group: group, user: user1) }
+ let_it_be(:group_member2) { create(:group_member, group: group, user: user2) }
+ let_it_be(:mutation_name) { :group_member_bulk_update }
+
+ let(:input) do
+ {
+ 'group_id' => group.to_global_id.to_s,
+ 'user_ids' => [user1.to_global_id.to_s, user2.to_global_id.to_s],
+ 'access_level' => 'GUEST'
+ }
+ end
+
+ let(:extra_params) { { expires_at: 10.days.from_now } }
+ let(:input_params) { input.merge(extra_params) }
+ let(:mutation) { graphql_mutation(mutation_name, input_params) }
+ let(:mutation_response) { graphql_mutation_response(mutation_name) }
+
+ context 'when user is not logged-in' do
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user is not an owner' do
+ before do
+ group.add_maintainer(current_user)
+ end
+
+ it_behaves_like 'a mutation that returns a top-level access error'
+ end
+
+ context 'when user is an owner' do
+ before do
+ group.add_owner(current_user)
+ end
+
+ shared_examples 'updates the user access role' do
+ specify do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ new_access_levels = mutation_response['groupMembers'].map { |member| member['accessLevel']['integerValue'] }
+ expect(response).to have_gitlab_http_status(:success)
+ expect(mutation_response['errors']).to be_empty
+ expect(new_access_levels).to all(be Gitlab::Access::GUEST)
+ end
+ end
+
+ it_behaves_like 'updates the user access role'
+
+ context 'when inherited members are passed' do
+ let_it_be(:subgroup) { create(:group, parent: group) }
+ let_it_be(:subgroup_member) { create(:group_member, group: subgroup) }
+
+ let(:input) do
+ {
+ 'group_id' => group.to_global_id.to_s,
+ 'user_ids' => [user1.to_global_id.to_s, user2.to_global_id.to_s, subgroup_member.user.to_global_id.to_s],
+ 'access_level' => 'GUEST'
+ }
+ end
+
+ it 'does not update the members' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ error = Mutations::Members::Groups::BulkUpdate::INVALID_MEMBERS_ERROR
+ expect(json_response['errors'].first['message']).to include(error)
+ end
+ end
+
+ context 'when members count is more than the allowed limit' do
+ let(:max_members_update_limit) { 1 }
+
+ before do
+ stub_const('Mutations::Members::Groups::BulkUpdate::MAX_MEMBERS_UPDATE_LIMIT', max_members_update_limit)
+ end
+
+ it 'does not update the members' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ error = Mutations::Members::Groups::BulkUpdate::MAX_MEMBERS_UPDATE_ERROR
+ expect(json_response['errors'].first['message']).to include(error)
+ end
+ end
+
+ context 'when the update service raises access denied error' do
+ before do
+ allow_next_instance_of(Members::UpdateService) do |instance|
+ allow(instance).to receive(:execute).and_raise(Gitlab::Access::AccessDeniedError)
+ end
+ end
+
+ it 'does not update the members' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['groupMembers']).to be_nil
+ expect(mutation_response['errors'])
+ .to contain_exactly("Unable to update members, please check user permissions.")
+ end
+ end
+
+ context 'when the update service returns an error message' do
+ before do
+ allow_next_instance_of(Members::UpdateService) do |instance|
+ error_result = {
+ message: 'Expires at cannot be a date in the past',
+ status: :error,
+ members: [group_member1]
+ }
+ allow(instance).to receive(:execute).and_return(error_result)
+ end
+ end
+
+ it 'will pass through the error' do
+ post_graphql_mutation(mutation, current_user: current_user)
+
+ expect(mutation_response['groupMembers'].first['id']).to eq(group_member1.to_global_id.to_s)
+ expect(mutation_response['errors']).to contain_exactly('Expires at cannot be a date in the past')
+ end
+ end
+ end
+end
diff --git a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
index c954fd50cc4..59f41c5e878 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/create_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Creation of a new merge request', feature_category: :code_review do
+RSpec.describe 'Creation of a new merge request', feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb
index c41161eff2b..7a1b3982111 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/reviewer_rereview_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Setting assignees of a merge request', feature_category: :code_review do
+RSpec.describe 'Setting assignees of a merge request', feature_category: :code_review_workflow do
include GraphqlHelpers
let(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
index 364d13291db..b5f2042c42a 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_assignees_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Setting assignees of a merge request', :assume_throttled, feature_category: :code_review do
+RSpec.describe 'Setting assignees of a merge request', :assume_throttled, feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_draft_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_draft_spec.rb
index b48a94fbeb9..0c2e2975350 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_draft_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_draft_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Setting Draft status of a merge request', feature_category: :code_review do
+RSpec.describe 'Setting Draft status of a merge request', feature_category: :code_review_workflow do
include GraphqlHelpers
let(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb
index d88982c508c..73a38adf723 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_locked_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Setting locked status of a merge request', feature_category: :code_review do
+RSpec.describe 'Setting locked status of a merge request', feature_category: :code_review_workflow do
include GraphqlHelpers
let(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb
index a0f0e45d1fc..3907ebad9ce 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_milestone_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Setting milestone of a merge request', feature_category: :code_review do
+RSpec.describe 'Setting milestone of a merge request', feature_category: :code_review_workflow do
include GraphqlHelpers
let(:current_user) { create(:user) }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb
index a5be2a95c8b..fd87112be33 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_reviewers_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Setting reviewers of a merge request', :assume_throttled, feature_category: :code_review do
+RSpec.describe 'Setting reviewers of a merge request', :assume_throttled, feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository) }
diff --git a/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
index daf1f529847..0e77b048646 100644
--- a/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
+++ b/spec/requests/api/graphql/mutations/merge_requests/set_subscription_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Setting subscribed status of a merge request', feature_category: :code_review do
+RSpec.describe 'Setting subscribed status of a merge request', feature_category: :code_review_workflow do
include GraphqlHelpers
it_behaves_like 'a subscribable resource api' do
diff --git a/spec/requests/api/graphql/mutations/work_items/update_spec.rb b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
index 14cb18d04b8..b33a394d023 100644
--- a/spec/requests/api/graphql/mutations/work_items/update_spec.rb
+++ b/spec/requests/api/graphql/mutations/work_items/update_spec.rb
@@ -489,10 +489,10 @@ RSpec.describe 'Update a work item', feature_category: :team_planning do
expect(response).to have_gitlab_http_status(:success)
expect(widgets_response).to include(
{
- 'children' => { 'edges' => [
+ 'children' => { 'edges' => match_array([
{ 'node' => { 'id' => valid_child2.to_global_id.to_s } },
{ 'node' => { 'id' => valid_child1.to_global_id.to_s } }
- ] },
+ ]) },
'parent' => nil,
'type' => 'HIERARCHY'
}
diff --git a/spec/requests/api/graphql/project/branch_rules_spec.rb b/spec/requests/api/graphql/project/branch_rules_spec.rb
index 7f6a66e2377..2ca37a49149 100644
--- a/spec/requests/api/graphql/project/branch_rules_spec.rb
+++ b/spec/requests/api/graphql/project/branch_rules_spec.rb
@@ -69,12 +69,6 @@ RSpec.describe 'getting list of branch rules for a project', feature_category: :
before do
create(:protected_branch, project: project)
- allow_next_instance_of(Resolvers::ProjectResolver) do |resolver|
- allow(resolver).to receive(:resolve)
- .with(full_path: project.full_path)
- .and_return(project)
- end
- allow(project.repository).to receive(:branch_names).and_call_original
end
it 'avoids N+1 queries', :use_sql_query_cache, :aggregate_failures do
@@ -93,7 +87,6 @@ RSpec.describe 'getting list of branch rules for a project', feature_category: :
end.not_to exceed_all_query_limit(control)
expect_n_matching_branches_count_fields(3)
- expect(project.repository).to have_received(:branch_names).at_least(2).times
end
def expect_n_matching_branches_count_fields(count)
@@ -110,16 +103,16 @@ RSpec.describe 'getting list of branch rules for a project', feature_category: :
let_it_be(:branch_name_b) { 'diff-*' }
let_it_be(:branch_rules) { [branch_rule_a, branch_rule_b] }
let_it_be(:branch_rule_a) do
- create(:protected_branch, project: project, name: branch_name_a, id: 9999)
+ create(:protected_branch, project: project, name: branch_name_a)
end
let_it_be(:branch_rule_b) do
- create(:protected_branch, project: project, name: branch_name_b, id: 10000)
+ create(:protected_branch, project: project, name: branch_name_b)
end
- # branchRules are returned in reverse order, newest first, sorted by primary_key.
- let(:branch_rule_b_data) { branch_rules_data.dig(0, 'node') }
+ # branchRules are returned in alphabetical order
let(:branch_rule_a_data) { branch_rules_data.dig(1, 'node') }
+ let(:branch_rule_b_data) { branch_rules_data.dig(0, 'node') }
before do
post_graphql(query, current_user: current_user, variables: variables)
@@ -128,22 +121,28 @@ RSpec.describe 'getting list of branch rules for a project', feature_category: :
it_behaves_like 'a working graphql query'
it 'includes all fields', :use_sql_query_cache, :aggregate_failures do
- expect(branch_rule_a_data['name']).to eq(branch_name_a)
- expect(branch_rule_a_data['isDefault']).to be(true).or be(false)
- expect(branch_rule_a_data['branchProtection']).to be_present
- expect(branch_rule_a_data['matchingBranchesCount']).to eq(1)
- expect(branch_rule_a_data['createdAt']).to be_present
- expect(branch_rule_a_data['updatedAt']).to be_present
+ expect(branch_rule_a_data).to include(
+ 'name' => branch_name_a,
+ 'isDefault' => be_boolean,
+ 'isProtected' => true,
+ 'matchingBranchesCount' => 1,
+ 'branchProtection' => be_kind_of(Hash),
+ 'createdAt' => be_kind_of(String),
+ 'updatedAt' => be_kind_of(String)
+ )
wildcard_count = TestEnv::BRANCH_SHA.keys.count do |branch_name|
branch_name.starts_with?('diff-')
end
- expect(branch_rule_b_data['name']).to eq(branch_name_b)
- expect(branch_rule_b_data['isDefault']).to be(true).or be(false)
- expect(branch_rule_b_data['branchProtection']).to be_present
- expect(branch_rule_b_data['matchingBranchesCount']).to eq(wildcard_count)
- expect(branch_rule_b_data['createdAt']).to be_present
- expect(branch_rule_b_data['updatedAt']).to be_present
+ expect(branch_rule_b_data).to include(
+ 'name' => branch_name_b,
+ 'isDefault' => be_boolean,
+ 'isProtected' => true,
+ 'matchingBranchesCount' => wildcard_count,
+ 'branchProtection' => be_kind_of(Hash),
+ 'createdAt' => be_kind_of(String),
+ 'updatedAt' => be_kind_of(String)
+ )
end
context 'when limiting the number of results' do
diff --git a/spec/requests/api/graphql/project/issues_spec.rb b/spec/requests/api/graphql/project/issues_spec.rb
index ec5e3c6f0de..cc41795f770 100644
--- a/spec/requests/api/graphql/project/issues_spec.rb
+++ b/spec/requests/api/graphql/project/issues_spec.rb
@@ -91,7 +91,6 @@ RSpec.describe 'getting an issue list for a project', feature_category: :team_pl
# filters
let(:expected_negated_assignee_issues) { [issue_b, issue_c, issue_d, issue_e] }
- let(:expected_unioned_assignee_issues) { [issue_a, issue_b] }
let(:voted_issues) { [issue_a] }
let(:no_award_issues) { [issue_b, issue_c, issue_d, issue_e] }
let(:locked_discussion_issues) { [issue_a] }
@@ -119,9 +118,6 @@ RSpec.describe 'getting an issue list for a project', feature_category: :team_pl
let(:same_project_issue2) { issue_b }
before_all do
- issue_a.assignee_ids = current_user.id
- issue_b.assignee_ids = another_user.id
-
create(:award_emoji, :upvote, user: current_user, awardable: issue_a)
end
diff --git a/spec/requests/api/graphql/project/jobs_spec.rb b/spec/requests/api/graphql/project/jobs_spec.rb
index d05d4a2f4b6..aea6cad9e62 100644
--- a/spec/requests/api/graphql/project/jobs_spec.rb
+++ b/spec/requests/api/graphql/project/jobs_spec.rb
@@ -33,10 +33,10 @@ RSpec.describe 'Query.project.jobs', feature_category: :continuous_integration d
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
build_stage = create(:ci_stage, position: 1, name: 'build', project: project, pipeline: pipeline)
test_stage = create(:ci_stage, position: 2, name: 'test', project: project, pipeline: pipeline)
- create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 1 2', stage: build_stage)
- create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 2 2', stage: build_stage)
- create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 1 2', stage: test_stage)
- test_job = create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 2 2', stage: test_stage)
+ create(:ci_build, pipeline: pipeline, name: 'docker 1 2', ci_stage: build_stage)
+ create(:ci_build, pipeline: pipeline, name: 'docker 2 2', ci_stage: build_stage)
+ create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', ci_stage: test_stage)
+ test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', ci_stage: test_stage)
create(:ci_build_need, build: test_job, name: 'docker 1 2')
post_graphql(query, current_user: user)
@@ -45,8 +45,8 @@ RSpec.describe 'Query.project.jobs', feature_category: :continuous_integration d
post_graphql(query, current_user: user)
end
- create(:ci_build, name: 'test-a', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline)
- test_b_job = create(:ci_build, name: 'test-b', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline)
+ create(:ci_build, name: 'test-a', ci_stage: test_stage, pipeline: pipeline)
+ test_b_job = create(:ci_build, name: 'test-b', ci_stage: test_stage, pipeline: pipeline)
create(:ci_build_need, build: test_b_job, name: 'docker 2 2')
expect do
diff --git a/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb b/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb
index 36e148468bc..4884e04ab23 100644
--- a/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request/diff_notes_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting notes for a merge request', feature_category: :code_review do
+RSpec.describe 'getting notes for a merge request', feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:noteable) { create(:merge_request) }
diff --git a/spec/requests/api/graphql/project/merge_request_spec.rb b/spec/requests/api/graphql/project/merge_request_spec.rb
index b7aafdf305a..6aa96cfc070 100644
--- a/spec/requests/api/graphql/project/merge_request_spec.rb
+++ b/spec/requests/api/graphql/project/merge_request_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting merge request information nested in a project', feature_category: :code_review do
+RSpec.describe 'getting merge request information nested in a project', feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:project) { create(:project, :repository, :public) }
diff --git a/spec/requests/api/graphql/project/merge_requests_spec.rb b/spec/requests/api/graphql/project/merge_requests_spec.rb
index b3b4c8fe0d5..8407faa967e 100644
--- a/spec/requests/api/graphql/project/merge_requests_spec.rb
+++ b/spec/requests/api/graphql/project/merge_requests_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'getting merge request listings nested in a project', feature_category: :code_review do
+RSpec.describe 'getting merge request listings nested in a project', feature_category: :code_review_workflow do
include GraphqlHelpers
let_it_be(:group) { create(:group) }
diff --git a/spec/requests/api/graphql/project/pipeline_spec.rb b/spec/requests/api/graphql/project/pipeline_spec.rb
index 0eeb382510e..abfdf07c288 100644
--- a/spec/requests/api/graphql/project/pipeline_spec.rb
+++ b/spec/requests/api/graphql/project/pipeline_spec.rb
@@ -348,10 +348,10 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
build_stage = create(:ci_stage, position: 1, name: 'build', project: project, pipeline: pipeline)
test_stage = create(:ci_stage, position: 2, name: 'test', project: project, pipeline: pipeline)
- create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 1 2', stage: build_stage)
- create(:ci_build, pipeline: pipeline, stage_idx: build_stage.position, name: 'docker 2 2', stage: build_stage)
- create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 1 2', stage: test_stage)
- test_job = create(:ci_build, pipeline: pipeline, stage_idx: test_stage.position, name: 'rspec 2 2', stage: test_stage)
+ create(:ci_build, pipeline: pipeline, name: 'docker 1 2', ci_stage: build_stage)
+ create(:ci_build, pipeline: pipeline, name: 'docker 2 2', ci_stage: build_stage)
+ create(:ci_build, pipeline: pipeline, name: 'rspec 1 2', ci_stage: test_stage)
+ test_job = create(:ci_build, pipeline: pipeline, name: 'rspec 2 2', ci_stage: test_stage)
create(:ci_build_need, build: test_job, name: 'docker 1 2')
post_graphql(query, current_user: current_user)
@@ -360,8 +360,8 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
post_graphql(query, current_user: current_user)
end
- create(:ci_build, name: 'test-a', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline)
- test_b_job = create(:ci_build, name: 'test-b', stage: test_stage, stage_idx: test_stage.position, pipeline: pipeline)
+ create(:ci_build, name: 'test-a', ci_stage: test_stage, pipeline: pipeline)
+ test_b_job = create(:ci_build, name: 'test-b', ci_stage: test_stage, pipeline: pipeline)
create(:ci_build_need, build: test_b_job, name: 'docker 2 2')
expect do
@@ -409,7 +409,8 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
it 'does not generate N+1 queries', :request_store, :use_sql_query_cache do
# create extra statuses
- create(:generic_commit_status, :pending, name: 'generic-build-a', pipeline: pipeline, stage_idx: 0, stage: 'build')
+ external_stage = create(:ci_stage, position: 10, name: 'external', project: project, pipeline: pipeline)
+ create(:generic_commit_status, :pending, name: 'generic-build-a', pipeline: pipeline, ci_stage: external_stage)
create(:ci_bridge, :failed, name: 'deploy-a', pipeline: pipeline, stage_idx: 2, stage: 'deploy')
# warm up
@@ -419,7 +420,7 @@ RSpec.describe 'getting pipeline information nested in a project', feature_categ
post_graphql(query, current_user: current_user)
end
- create(:generic_commit_status, :pending, name: 'generic-build-b', pipeline: pipeline, stage_idx: 0, stage: 'build')
+ create(:generic_commit_status, :pending, name: 'generic-build-b', pipeline: pipeline, ci_stage: external_stage)
create(:ci_build, :failed, name: 'test-a', pipeline: pipeline, stage_idx: 1, stage: 'test')
create(:ci_build, :running, name: 'test-b', pipeline: pipeline, stage_idx: 1, stage: 'test')
create(:ci_build, :pending, name: 'deploy-b', pipeline: pipeline, stage_idx: 2, stage: 'deploy')
diff --git a/spec/requests/api/graphql/project/runners_spec.rb b/spec/requests/api/graphql/project/runners_spec.rb
index 7304de7bec6..bee7ce2e372 100644
--- a/spec/requests/api/graphql/project/runners_spec.rb
+++ b/spec/requests/api/graphql/project/runners_spec.rb
@@ -53,16 +53,4 @@ RSpec.describe 'Project.runners', feature_category: :runner do
expect(graphql_data_at(:project, :runners, :nodes)).to be_empty
end
end
-
- context 'when on_demand_scans_runner_tags feature flag is disabled' do
- before do
- stub_feature_flags(on_demand_scans_runner_tags: false)
- end
-
- it 'returns no runners' do
- post_graphql(query, current_user: user)
-
- expect(graphql_data_at(:project, :runners, :nodes)).to be_empty
- end
- end
end
diff --git a/spec/requests/api/graphql/project/work_items_spec.rb b/spec/requests/api/graphql/project/work_items_spec.rb
index a59da706a8a..de35c943749 100644
--- a/spec/requests/api/graphql/project/work_items_spec.rb
+++ b/spec/requests/api/graphql/project/work_items_spec.rb
@@ -263,7 +263,7 @@ RSpec.describe 'getting a work item list for a project', feature_category: :team
GRAPHQL
end
- before do
+ before_all do
create_notes(item1, "some note1")
create_notes(item2, "some note2")
end
diff --git a/spec/requests/api/graphql/user_spec.rb b/spec/requests/api/graphql/user_spec.rb
index 2e1e4971767..3e82d783a18 100644
--- a/spec/requests/api/graphql/user_spec.rb
+++ b/spec/requests/api/graphql/user_spec.rb
@@ -58,4 +58,45 @@ RSpec.describe 'User', feature_category: :users do
)
end
end
+
+ describe 'email fields' do
+ before_all do
+ current_user.commit_email = current_user.emails.first.email
+ current_user.save!
+ end
+
+ let_it_be(:query) do
+ graphql_query_for(
+ :user,
+ { username: current_user.username },
+ 'emails { nodes { email } } commitEmail namespaceCommitEmails { nodes { id } }'
+ )
+ end
+
+ let_it_be(:email_1) { create(:email, user: current_user) }
+ let_it_be(:email_2) { create(:email, user: current_user) }
+ let_it_be(:namespace_commit_email_1) { create(:namespace_commit_email, email: email_1) }
+ let_it_be(:namespace_commit_email_2) { create(:namespace_commit_email, email: email_2) }
+
+ context 'with permission' do
+ it 'returns the relevant email details' do
+ post_graphql(query, current_user: current_user)
+
+ expect(graphql_data['user']['emails']['nodes'].pluck('email')).to match_array(
+ current_user.emails.map(&:email))
+ expect(graphql_data['user']['namespaceCommitEmails']['nodes']).not_to be_empty
+ expect(graphql_data['user']['commitEmail']).to eq(current_user.commit_email)
+ end
+ end
+
+ context 'without permission' do
+ it 'does not return email details' do
+ post_graphql(query, current_user: create(:user))
+
+ expect(graphql_data['user']['emails']['nodes']).to be_empty
+ expect(graphql_data['user']['namespaceCommitEmails']['nodes']).to be_empty
+ expect(graphql_data['user']['commitEmail']).to be_nil
+ end
+ end
+ end
end
diff --git a/spec/requests/api/graphql/work_item_spec.rb b/spec/requests/api/graphql/work_item_spec.rb
index df7dbaea420..6b5d437df83 100644
--- a/spec/requests/api/graphql/work_item_spec.rb
+++ b/spec/requests/api/graphql/work_item_spec.rb
@@ -193,6 +193,24 @@ RSpec.describe 'Query.work_item(id)', feature_category: :team_planning do
)
end
end
+
+ context 'when ordered by default by created_at' do
+ let_it_be(:newest_child) { create(:work_item, :task, project: project, created_at: 5.minutes.from_now) }
+ let_it_be(:oldest_child) { create(:work_item, :task, project: project, created_at: 5.minutes.ago) }
+ let_it_be(:newest_link) { create(:parent_link, work_item_parent: work_item, work_item: newest_child) }
+ let_it_be(:oldest_link) { create(:parent_link, work_item_parent: work_item, work_item: oldest_child) }
+
+ let(:hierarchy_widget) { work_item_data['widgets'].find { |widget| widget['type'] == 'HIERARCHY' } }
+ let(:hierarchy_children) { hierarchy_widget['children']['nodes'] }
+
+ it 'places the oldest child item to the beginning of the children list' do
+ expect(hierarchy_children.first['id']).to eq(oldest_child.to_gid.to_s)
+ end
+
+ it 'places the newest child item to the end of the children list' do
+ expect(hierarchy_children.last['id']).to eq(newest_child.to_gid.to_s)
+ end
+ end
end
describe 'assignees widget' do
diff --git a/spec/requests/api/group_boards_spec.rb b/spec/requests/api/group_boards_spec.rb
index 01f0e6e2061..acc30b2c137 100644
--- a/spec/requests/api/group_boards_spec.rb
+++ b/spec/requests/api/group_boards_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::GroupBoards, feature_category: :team_planning do
+RSpec.describe API::GroupBoards, :with_license, feature_category: :team_planning do
let_it_be(:user) { create(:user) }
let_it_be(:non_member) { create(:user) }
let_it_be(:guest) { create(:user) }
diff --git a/spec/requests/api/group_export_spec.rb b/spec/requests/api/group_export_spec.rb
index 565365506a7..9dd5fe6f7c4 100644
--- a/spec/requests/api/group_export_spec.rb
+++ b/spec/requests/api/group_export_spec.rb
@@ -173,6 +173,8 @@ RSpec.describe API::GroupExport, feature_category: :importers do
let(:status_path) { "/groups/#{group.id}/export_relations/status" }
before do
+ stub_application_setting(bulk_import_enabled: true)
+
group.add_owner(user)
end
@@ -212,11 +214,12 @@ RSpec.describe API::GroupExport, feature_category: :importers do
context 'when export_file.file does not exist' do
it 'returns 404' do
- allow(upload).to receive(:export_file).and_return(nil)
+ allow(export).to receive(:upload).and_return(nil)
get api(download_path, user)
expect(response).to have_gitlab_http_status(:not_found)
+ expect(json_response['message']).to eq('404 Not found')
end
end
end
@@ -234,5 +237,11 @@ RSpec.describe API::GroupExport, feature_category: :importers do
expect(json_response.pluck('status')).to contain_exactly(-1, 0, 1)
end
end
+
+ context 'when bulk import is disabled' do
+ it_behaves_like '404 response' do
+ let(:request) { get api(path, user) }
+ end
+ end
end
end
diff --git a/spec/requests/api/import_github_spec.rb b/spec/requests/api/import_github_spec.rb
index dce82f1cf37..0d75bb94144 100644
--- a/spec/requests/api/import_github_spec.rb
+++ b/spec/requests/api/import_github_spec.rb
@@ -6,33 +6,35 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
let(:token) { "asdasd12345" }
let(:provider) { :github }
let(:access_params) { { github_access_token: token } }
+ let(:provider_username) { user.username }
+ let(:provider_user) { double('provider', login: provider_username).as_null_object }
+ let(:provider_repo) do
+ {
+ name: 'vim',
+ full_name: "#{provider_username}/vim",
+ owner: double('provider', login: provider_username),
+ description: 'provider',
+ private: false,
+ clone_url: 'https://fake.url/vim.git',
+ has_wiki: true
+ }
+ end
- describe "POST /import/github" do
- let(:user) { create(:user) }
- let(:project) { create(:project) }
- let(:provider_username) { user.username }
- let(:provider_user) { double('provider', login: provider_username) }
- let(:provider_repo) do
- {
- name: 'vim',
- full_name: "#{provider_username}/vim",
- owner: double('provider', login: provider_username),
- description: 'provider',
- private: false,
- clone_url: 'https://fake.url/vim.git',
- has_wiki: true
- }
- end
+ let(:client) { double('client', user: provider_user, repository: provider_repo) }
- before do
- Grape::Endpoint.before_each do |endpoint|
- allow(endpoint).to receive(:client).and_return(double('client', user: provider_user, repository: provider_repo).as_null_object)
- end
+ before do
+ Grape::Endpoint.before_each do |endpoint|
+ allow(endpoint).to receive(:client).and_return(client)
end
+ end
- after do
- Grape::Endpoint.before_each nil
- end
+ after do
+ Grape::Endpoint.before_each nil
+ end
+
+ describe "POST /import/github" do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
it 'rejects requests when Github Importer is disabled' do
stub_application_setting(import_sources: nil)
@@ -90,6 +92,23 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
end
+ context 'when target_namespace is blank' do
+ it 'returns 400 response' do
+ allow(Gitlab::LegacyGithubImport::ProjectCreator)
+ .to receive(:new).with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **access_params)
+ .and_return(double(execute: project))
+
+ post api("/import/github", user), params: {
+ target_namespace: '',
+ personal_access_token: token,
+ repo_id: non_existing_record_id
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq 'target_namespace is empty'
+ end
+ end
+
context 'when unauthenticated user' do
it 'returns 403 response' do
post api("/import/github"), params: {
@@ -150,4 +169,78 @@ RSpec.describe API::ImportGithub, feature_category: :importers do
end
end
end
+
+ describe 'POST /import/github/gists' do
+ let_it_be(:user) { create(:user) }
+ let(:params) { { personal_access_token: token } }
+
+ context 'when feature github_import_gists is enabled' do
+ before do
+ stub_feature_flags(github_import_gists: true)
+ end
+
+ context 'when gists import was started' do
+ before do
+ allow(Import::Github::GistsImportService)
+ .to receive(:new).with(user, client, access_params)
+ .and_return(double(execute: { status: :success }))
+ end
+
+ it 'returns 202' do
+ post api('/import/github/gists', user), params: params
+
+ expect(response).to have_gitlab_http_status(:accepted)
+ end
+ end
+
+ context 'when gists import is in progress' do
+ before do
+ allow(Import::Github::GistsImportService)
+ .to receive(:new).with(user, client, access_params)
+ .and_return(double(execute: { status: :error, message: 'Import already in progress', http_status: :unprocessable_entity }))
+ end
+
+ it 'returns 422 error' do
+ post api('/import/github/gists', user), params: params
+
+ expect(response).to have_gitlab_http_status(:unprocessable_entity)
+ expect(json_response['errors']).to eq('Import already in progress')
+ end
+ end
+
+ context 'when unauthenticated user' do
+ it 'returns 403 error' do
+ post api('/import/github/gists'), params: params
+
+ expect(response).to have_gitlab_http_status(:unauthorized)
+ end
+ end
+
+ context 'when rate limit reached' do
+ before do
+ allow(Import::Github::GistsImportService)
+ .to receive(:new).with(user, client, access_params)
+ .and_raise(Gitlab::GithubImport::RateLimitError)
+ end
+
+ it 'returns 429 error' do
+ post api('/import/github/gists', user), params: params
+
+ expect(response).to have_gitlab_http_status(:too_many_requests)
+ end
+ end
+ end
+
+ context 'when feature github_import_gists is disabled' do
+ before do
+ stub_feature_flags(github_import_gists: false)
+ end
+
+ it 'returns 404 error' do
+ post api('/import/github/gists', user), params: params
+
+ expect(response).to have_gitlab_http_status(:not_found)
+ end
+ end
+ end
end
diff --git a/spec/requests/api/internal/base_spec.rb b/spec/requests/api/internal/base_spec.rb
index f9284f21aaa..767f3e8b5b5 100644
--- a/spec/requests/api/internal/base_spec.rb
+++ b/spec/requests/api/internal/base_spec.rb
@@ -453,28 +453,10 @@ RSpec.describe API::Internal::Base, feature_category: :authentication_and_author
expect(json_response['message']['error']).to eq('This endpoint has been requested too many times. Try again later.')
end
- context 'when rate_limit_gitlab_shell feature flag is disabled' do
- before do
- stub_feature_flags(rate_limit_gitlab_shell: false)
- end
-
- it 'is not throttled by rate limiter' do
- expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
-
- subject
- end
- end
+ it 'is not throttled by rate limiter' do
+ expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
- context 'when rate_limit_gitlab_shell_by_ip feature flag is disabled' do
- before do
- stub_feature_flags(rate_limit_gitlab_shell_by_ip: false)
- end
-
- it 'is not throttled by rate limiter' do
- expect(::Gitlab::ApplicationRateLimiter).not_to receive(:throttled?)
-
- subject
- end
+ subject
end
context 'when the IP is in a trusted range' do
diff --git a/spec/requests/api/issues/get_project_issues_spec.rb b/spec/requests/api/issues/get_project_issues_spec.rb
index 70966d23576..6fc3903103b 100644
--- a/spec/requests/api/issues/get_project_issues_spec.rb
+++ b/spec/requests/api/issues/get_project_issues_spec.rb
@@ -11,15 +11,24 @@ RSpec.describe API::Issues, feature_category: :team_planning do
let_it_be(:group) { create(:group, :public) }
- let(:user2) { create(:user) }
- let(:non_member) { create(:user) }
+ let_it_be(:user2) { create(:user) }
+ let_it_be(:non_member) { create(:user) }
let_it_be(:guest) { create(:user) }
let_it_be(:author) { create(:author) }
let_it_be(:assignee) { create(:assignee) }
- let(:admin) { create(:user, :admin) }
- let(:issue_title) { 'foo' }
- let(:issue_description) { 'closed' }
- let!(:closed_issue) do
+ let_it_be(:admin) { create(:user, :admin) }
+
+ let_it_be(:milestone) { create(:milestone, title: '1.0.0', project: project) }
+ let_it_be(:empty_milestone) do
+ create(:milestone, title: '2.0.0', project: project)
+ end
+
+ let(:no_milestone_title) { 'None' }
+ let(:any_milestone_title) { 'Any' }
+
+ let_it_be(:issue_title) { 'foo' }
+ let_it_be(:issue_description) { 'closed' }
+ let_it_be(:closed_issue) do
create :closed_issue,
author: user,
assignees: [user],
@@ -31,7 +40,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
closed_at: 1.hour.ago
end
- let!(:confidential_issue) do
+ let_it_be(:confidential_issue) do
create :issue,
:confidential,
project: project,
@@ -41,7 +50,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
updated_at: 2.hours.ago
end
- let!(:issue) do
+ let_it_be(:issue) do
create :issue,
author: user,
assignees: [user],
@@ -53,22 +62,12 @@ RSpec.describe API::Issues, feature_category: :team_planning do
description: issue_description
end
- let_it_be(:label) do
- create(:label, title: 'label', color: '#FFAABB', project: project)
- end
+ let_it_be(:label) { create(:label, title: 'label', color: '#FFAABB', project: project) }
+ let_it_be(:label_link) { create(:label_link, label: label, target: issue) }
- let!(:label_link) { create(:label_link, label: label, target: issue) }
- let(:milestone) { create(:milestone, title: '1.0.0', project: project) }
- let_it_be(:empty_milestone) do
- create(:milestone, title: '2.0.0', project: project)
- end
+ let_it_be(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
- let!(:note) { create(:note_on_issue, author: user, project: project, noteable: issue) }
-
- let(:no_milestone_title) { 'None' }
- let(:any_milestone_title) { 'Any' }
-
- let!(:merge_request1) do
+ let_it_be(:merge_request1) do
create(:merge_request,
:simple,
author: user,
@@ -77,7 +76,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
description: "closes #{issue.to_reference}")
end
- let!(:merge_request2) do
+ let_it_be(:merge_request2) do
create(:merge_request,
:simple,
author: user,
@@ -101,7 +100,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
shared_examples 'project issues statistics' do
it 'returns project issues statistics' do
- get api("/issues_statistics", user), params: params
+ get api("/projects/#{project.id}/issues_statistics", current_user), params: params
expect(response).to have_gitlab_http_status(:ok)
expect(json_response['statistics']).not_to be_nil
@@ -138,6 +137,8 @@ RSpec.describe API::Issues, feature_category: :team_planning do
end
context 'issues_statistics' do
+ let(:current_user) { nil }
+
context 'no state is treated as all state' do
let(:params) { {} }
let(:counts) { { all: 2, closed: 1, opened: 1 } }
@@ -534,30 +535,32 @@ RSpec.describe API::Issues, feature_category: :team_planning do
end
context 'issues_statistics' do
+ let(:current_user) { user }
+
context 'no state is treated as all state' do
let(:params) { {} }
- let(:counts) { { all: 2, closed: 1, opened: 1 } }
+ let(:counts) { { all: 3, closed: 1, opened: 2 } }
it_behaves_like 'project issues statistics'
end
context 'statistics when all state is passed' do
let(:params) { { state: :all } }
- let(:counts) { { all: 2, closed: 1, opened: 1 } }
+ let(:counts) { { all: 3, closed: 1, opened: 2 } }
it_behaves_like 'project issues statistics'
end
context 'closed state is treated as all state' do
let(:params) { { state: :closed } }
- let(:counts) { { all: 2, closed: 1, opened: 1 } }
+ let(:counts) { { all: 3, closed: 1, opened: 2 } }
it_behaves_like 'project issues statistics'
end
context 'opened state is treated as all state' do
let(:params) { { state: :opened } }
- let(:counts) { { all: 2, closed: 1, opened: 1 } }
+ let(:counts) { { all: 3, closed: 1, opened: 2 } }
it_behaves_like 'project issues statistics'
end
@@ -592,7 +595,7 @@ RSpec.describe API::Issues, feature_category: :team_planning do
context 'sort does not affect statistics ' do
let(:params) { { state: :opened, order_by: 'updated_at' } }
- let(:counts) { { all: 2, closed: 1, opened: 1 } }
+ let(:counts) { { all: 3, closed: 1, opened: 2 } }
it_behaves_like 'project issues statistics'
end
diff --git a/spec/requests/api/issues/issues_spec.rb b/spec/requests/api/issues/issues_spec.rb
index 94f0443e14a..b89db82b150 100644
--- a/spec/requests/api/issues/issues_spec.rb
+++ b/spec/requests/api/issues/issues_spec.rb
@@ -145,6 +145,11 @@ RSpec.describe API::Issues, feature_category: :team_planning do
let(:result) { issuable.id }
end
+ it_behaves_like 'issuable API rate-limited search' do
+ let(:url) { '/issues' }
+ let(:issuable) { issue }
+ end
+
it 'returns authentication error without any scope' do
get api('/issues')
diff --git a/spec/requests/api/markdown_golden_master_spec.rb b/spec/requests/api/markdown_golden_master_spec.rb
deleted file mode 100644
index 1bb5a1d67ae..00000000000
--- a/spec/requests/api/markdown_golden_master_spec.rb
+++ /dev/null
@@ -1,9 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# See spec/fixtures/markdown/markdown_golden_master_examples.yml for documentation on how this spec works.
-RSpec.describe API::Markdown, 'Golden Master', feature_category: :team_planning do
- markdown_yml_file_path = File.expand_path('../../fixtures/markdown/markdown_golden_master_examples.yml', __dir__)
- include_context 'API::Markdown Golden Master shared context', markdown_yml_file_path
-end
diff --git a/spec/requests/api/merge_requests_spec.rb b/spec/requests/api/merge_requests_spec.rb
index 0b69000ae7e..4cd93603c31 100644
--- a/spec/requests/api/merge_requests_spec.rb
+++ b/spec/requests/api/merge_requests_spec.rb
@@ -55,6 +55,11 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do
let(:issuable) { merge_request }
let(:result) { [merge_request_merged.id, merge_request_locked.id, merge_request_closed.id, merge_request.id] }
end
+
+ it_behaves_like 'issuable API rate-limited search' do
+ let(:url) { endpoint_path }
+ let(:issuable) { merge_request }
+ end
end
context 'when authenticated' do
@@ -663,6 +668,11 @@ RSpec.describe API::MergeRequests, feature_category: :source_code_management do
let(:result) { [merge_request_merged.id, merge_request_locked.id, merge_request_closed.id, merge_request.id] }
end
+ it_behaves_like 'issuable API rate-limited search' do
+ let(:url) { '/merge_requests' }
+ let(:issuable) { merge_request }
+ end
+
it "returns authentication error without any scope" do
get api("/merge_requests")
diff --git a/spec/requests/api/ml/mlflow_spec.rb b/spec/requests/api/ml/mlflow_spec.rb
index c1ed7d56ba4..fdf115f7e92 100644
--- a/spec/requests/api/ml/mlflow_spec.rb
+++ b/spec/requests/api/ml/mlflow_spec.rb
@@ -347,6 +347,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do
{
experiment_id: experiment.iid.to_s,
start_time: Time.now.to_i,
+ run_name: "A new Run",
tags: [
{ key: 'hello', value: 'world' }
]
@@ -359,6 +360,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do
expected_properties = {
'experiment_id' => params[:experiment_id],
'user_id' => current_user.id.to_s,
+ 'run_name' => "A new Run",
'start_time' => params[:start_time],
'status' => 'RUNNING',
'lifecycle_stage' => 'active'
@@ -407,7 +409,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do
'experiment_id' => candidate.experiment.iid.to_s,
'user_id' => candidate.user.id.to_s,
'start_time' => candidate.start_time,
- 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_candidate_#{candidate.iid}/-/",
+ 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_candidate_#{candidate.id}/-/",
'status' => "RUNNING",
'lifecycle_stage' => "active"
}
@@ -426,8 +428,8 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do
{ 'key' => candidate.params[1].name, 'value' => candidate.params[1].value }
],
'tags' => [
- { 'key' => 'metadata_1', 'value' => 'value1' },
- { 'key' => 'metadata_2', 'value' => 'value2' }
+ { 'key' => candidate.metadata[0].name, 'value' => candidate.metadata[0].value },
+ { 'key' => candidate.metadata[1].name, 'value' => candidate.metadata[1].value }
]
})
end
@@ -450,7 +452,7 @@ RSpec.describe API::Ml::Mlflow, feature_category: :mlops do
'user_id' => candidate.user.id.to_s,
'start_time' => candidate.start_time,
'end_time' => params[:end_time],
- 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_candidate_#{candidate.iid}/-/",
+ 'artifact_uri' => "http://www.example.com/api/v4/projects/#{project_id}/packages/generic/ml_candidate_#{candidate.id}/-/",
'status' => 'FAILED',
'lifecycle_stage' => 'active'
}
diff --git a/spec/requests/api/nuget_group_packages_spec.rb b/spec/requests/api/nuget_group_packages_spec.rb
index 9de612f7bc7..4335ad75ab6 100644
--- a/spec/requests/api/nuget_group_packages_spec.rb
+++ b/spec/requests/api/nuget_group_packages_spec.rb
@@ -17,25 +17,51 @@ RSpec.describe API::NugetGroupPackages, feature_category: :package_registry do
shared_examples 'handling all endpoints' do
describe 'GET /api/v4/groups/:id/-/packages/nuget' do
- it_behaves_like 'handling nuget service requests', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ it_behaves_like 'handling nuget service requests',
+ example_names_with_status: {
+ anonymous_requests_example_name: 'rejects nuget packages access',
+ anonymous_requests_status: :unauthorized,
+ guest_requests_example_name: 'process nuget service index request',
+ guest_requests_status: :success
+ } do
let(:url) { "/groups/#{target.id}/-/packages/nuget/index.json" }
end
end
describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/index' do
- it_behaves_like 'handling nuget metadata requests with package name', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ it_behaves_like 'handling nuget metadata requests with package name',
+ example_names_with_status:
+ {
+ anonymous_requests_example_name: 'rejects nuget packages access',
+ anonymous_requests_status: :unauthorized,
+ guest_requests_example_name: 'rejects nuget packages access',
+ guest_requests_status: :not_found
+ } do
let(:url) { "/groups/#{target.id}/-/packages/nuget/metadata/#{package_name}/index.json" }
end
end
describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/*package_version' do
- it_behaves_like 'handling nuget metadata requests with package name and package version', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ it_behaves_like 'handling nuget metadata requests with package name and package version',
+ example_names_with_status:
+ {
+ anonymous_requests_example_name: 'rejects nuget packages access',
+ anonymous_requests_status: :unauthorized,
+ guest_requests_example_name: 'rejects nuget packages access',
+ guest_requests_status: :not_found
+ } do
let(:url) { "/groups/#{target.id}/-/packages/nuget/metadata/#{package_name}/#{package.version}.json" }
end
end
describe 'GET /api/v4/groups/:id/-/packages/nuget/query' do
- it_behaves_like 'handling nuget search requests', anonymous_requests_example_name: 'rejects nuget packages access', anonymous_requests_status: :unauthorized do
+ it_behaves_like 'handling nuget search requests',
+ example_names_with_status: {
+ anonymous_requests_example_name: 'rejects nuget packages access',
+ anonymous_requests_status: :unauthorized,
+ guest_requests_example_name: 'process empty nuget search request',
+ guest_requests_status: :success
+ } do
let(:url) { "/groups/#{target.id}/-/packages/nuget/query?#{query_parameters.to_query}" }
end
end
@@ -133,13 +159,13 @@ RSpec.describe API::NugetGroupPackages, feature_category: :package_registry do
describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/index' do
let(:url) { "/groups/#{group.id}/-/packages/nuget/metadata/#{package_name}/index.json" }
- it_behaves_like 'returning response status', :forbidden
+ it_behaves_like 'returning response status', :success
end
describe 'GET /api/v4/groups/:id/-/packages/nuget/metadata/*package_name/*package_version' do
let(:url) { "/groups/#{group.id}/-/packages/nuget/metadata/#{package_name}/#{package.version}.json" }
- it_behaves_like 'returning response status', :forbidden
+ it_behaves_like 'returning response status', :success
end
describe 'GET /api/v4/groups/:id/-/packages/nuget/query' do
@@ -150,7 +176,7 @@ RSpec.describe API::NugetGroupPackages, feature_category: :package_registry do
let(:query_parameters) { { q: search_term, take: take, skip: skip, prerelease: include_prereleases }.compact }
let(:url) { "/groups/#{group.id}/-/packages/nuget/query?#{query_parameters.to_query}" }
- it_behaves_like 'returning response status', :forbidden
+ it_behaves_like 'returning response status', :success
end
end
diff --git a/spec/requests/api/pages_domains_spec.rb b/spec/requests/api/pages_domains_spec.rb
index 65fcf9e006a..ba1fb5105b8 100644
--- a/spec/requests/api/pages_domains_spec.rb
+++ b/spec/requests/api/pages_domains_spec.rb
@@ -265,6 +265,7 @@ RSpec.describe API::PagesDomains, feature_category: :pages do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: kind_of(Numeric),
domain: params[:domain]
)
@@ -393,6 +394,7 @@ RSpec.describe API::PagesDomains, feature_category: :pages do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: pages_domain_secure.id,
domain: pages_domain_secure.domain
)
end
@@ -556,6 +558,7 @@ RSpec.describe API::PagesDomains, feature_category: :pages do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
diff --git a/spec/requests/api/project_debian_distributions_spec.rb b/spec/requests/api/project_debian_distributions_spec.rb
index 9807f177c5d..dfe93e9fbad 100644
--- a/spec/requests/api/project_debian_distributions_spec.rb
+++ b/spec/requests/api/project_debian_distributions_spec.rb
@@ -5,7 +5,17 @@ RSpec.describe API::ProjectDebianDistributions, feature_category: :package_regis
include HttpBasicAuthHelpers
include WorkhorseHelpers
- include_context 'Debian repository shared context', :project, true do
+ include_context 'Debian repository shared context', :project, false do
+ shared_examples 'accept GET request on private project with access to package registry for everyone' do
+ include_context 'Debian repository access', :private, :anonymous, :basic do
+ before do
+ container.project_feature.reload.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it_behaves_like 'Debian distributions GET request', :success
+ end
+ end
+
describe 'POST projects/:id/debian_distributions' do
let(:method) { :post }
let(:url) { "/projects/#{container.id}/debian_distributions" }
@@ -18,24 +28,37 @@ RSpec.describe API::ProjectDebianDistributions, feature_category: :package_regis
it_behaves_like 'Debian distributions write endpoint', 'GET', :bad_request, /^{"message":{"codename":\["has already been taken"\]}}$/
end
+
+ context 'with access to package registry for everyone' do
+ include_context 'Debian repository access', :private, :anonymous, :basic do
+ before do
+ container.project_feature.reload.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it_behaves_like 'Debian distributions POST request', :not_found
+ end
+ end
end
describe 'GET projects/:id/debian_distributions' do
let(:url) { "/projects/#{container.id}/debian_distributions" }
it_behaves_like 'Debian distributions read endpoint', 'GET', :success, /^\[{.*"codename":"existing-codename",.*"components":\["existing-component"\],.*"architectures":\["all","existing-arch"\]/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/debian_distributions/:codename' do
let(:url) { "/projects/#{container.id}/debian_distributions/#{distribution.codename}" }
it_behaves_like 'Debian distributions read endpoint', 'GET', :success, /^{.*"codename":"existing-codename",.*"components":\["existing-component"\],.*"architectures":\["all","existing-arch"\]/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'GET projects/:id/debian_distributions/:codename/key.asc' do
let(:url) { "/projects/#{container.id}/debian_distributions/#{distribution.codename}/key.asc" }
it_behaves_like 'Debian distributions read endpoint', 'GET', :success, /^-----BEGIN PGP PUBLIC KEY BLOCK-----/
+ it_behaves_like 'accept GET request on private project with access to package registry for everyone'
end
describe 'PUT projects/:id/debian_distributions/:codename' do
diff --git a/spec/requests/api/project_export_spec.rb b/spec/requests/api/project_export_spec.rb
index fdd76c63069..096f0b73b4c 100644
--- a/spec/requests/api/project_export_spec.rb
+++ b/spec/requests/api/project_export_spec.rb
@@ -511,6 +511,10 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category:
let_it_be(:status_path) { "/projects/#{project.id}/export_relations/status" }
+ before do
+ stub_application_setting(bulk_import_enabled: true)
+ end
+
context 'when user is a maintainer' do
before do
project.add_maintainer(user)
@@ -584,9 +588,9 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category:
end
end
- context 'with bulk_import FF disabled' do
+ context 'with bulk_import is disabled' do
before do
- stub_feature_flags(bulk_import: false)
+ stub_application_setting(bulk_import_enabled: false)
end
describe 'POST /projects/:id/export_relations' do
@@ -641,5 +645,11 @@ RSpec.describe API::ProjectExport, :clean_gitlab_redis_cache, feature_category:
end
end
end
+
+ context 'when bulk import is disabled' do
+ it_behaves_like '404 response' do
+ let(:request) { get api(path, user) }
+ end
+ end
end
end
diff --git a/spec/requests/api/projects_spec.rb b/spec/requests/api/projects_spec.rb
index 6e8168c0ee1..d62f8a32453 100644
--- a/spec/requests/api/projects_spec.rb
+++ b/spec/requests/api/projects_spec.rb
@@ -1169,7 +1169,7 @@ RSpec.describe API::Projects do
expect(response).to have_gitlab_http_status(:bad_request)
end
- it "assigns attributes to project", :aggregate_failures do
+ it 'assigns attributes to project', :aggregate_failures do
project = attributes_for(:project, {
path: 'camelCasePath',
issues_enabled: false,
@@ -1198,6 +1198,11 @@ RSpec.describe API::Projects do
attrs[:feature_flags_access_level] = 'disabled'
attrs[:infrastructure_access_level] = 'disabled'
attrs[:monitor_access_level] = 'disabled'
+ attrs[:snippets_access_level] = 'disabled'
+ attrs[:wiki_access_level] = 'disabled'
+ attrs[:builds_access_level] = 'disabled'
+ attrs[:merge_requests_access_level] = 'disabled'
+ attrs[:issues_access_level] = 'disabled'
end
post api('/projects', user), params: project
@@ -1228,6 +1233,11 @@ RSpec.describe API::Projects do
expect(project.project_feature.feature_flags_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.infrastructure_access_level).to eq(ProjectFeature::DISABLED)
expect(project.project_feature.monitor_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project.project_feature.wiki_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project.project_feature.builds_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project.project_feature.merge_requests_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project.project_feature.issues_access_level).to eq(ProjectFeature::DISABLED)
+ expect(project.project_feature.snippets_access_level).to eq(ProjectFeature::DISABLED)
end
it 'assigns container_registry_enabled to project', :aggregate_failures do
@@ -2278,7 +2288,7 @@ RSpec.describe API::Projects do
end
end
- context 'when authenticated as an admin' do
+ context 'when authenticated as an admin', :with_license do
before do
stub_container_registry_config(enabled: true, host_port: 'registry.example.org:5000')
end
diff --git a/spec/requests/api/release/links_spec.rb b/spec/requests/api/release/links_spec.rb
index 6036960c43c..4a7821fcb0a 100644
--- a/spec/requests/api/release/links_spec.rb
+++ b/spec/requests/api/release/links_spec.rb
@@ -222,6 +222,24 @@ RSpec.describe API::Release::Links, feature_category: :release_orchestration do
expect(response).to match_response_schema('release/link')
end
+ context 'when using `direct_asset_path`' do
+ before do
+ params[:direct_asset_path] = params.delete(:filepath)
+ end
+
+ it 'creates a new release link successfully' do
+ expect do
+ post api("/projects/#{project.id}/releases/v0.1/assets/links", maintainer), params: params
+ end.to change { Releases::Link.count }.by(1)
+
+ release.reload
+
+ expect(last_release_link.name).to eq('awesome-app.dmg')
+ expect(last_release_link.filepath).to eq('/binaries/awesome-app.dmg')
+ expect(last_release_link.url).to eq('https://example.com/download/awesome-app.dmg')
+ end
+ end
+
context 'when using JOB-TOKEN auth' do
let(:job) { create(:ci_build, :running, user: maintainer) }
@@ -357,6 +375,15 @@ RSpec.describe API::Release::Links, feature_category: :release_orchestration do
expect(response).to match_response_schema('release/link')
end
+ context 'when using `direct_asset_path`' do
+ it 'updates the release link' do
+ put api("/projects/#{project.id}/releases/v0.1/assets/links/#{release_link.id}", maintainer),
+ params: params.merge(direct_asset_path: '/binaries/awesome-app.msi')
+
+ expect(json_response['direct_asset_url']).to eq("http://localhost/#{project.namespace.path}/#{project.name}/-/releases/#{release.tag}/downloads/binaries/awesome-app.msi")
+ end
+ end
+
context 'when using JOB-TOKEN auth' do
let(:job) { create(:ci_build, :running, user: maintainer) }
diff --git a/spec/requests/api/releases_spec.rb b/spec/requests/api/releases_spec.rb
index a1aff9a6b1c..e209ad2b2d5 100644
--- a/spec/requests/api/releases_spec.rb
+++ b/spec/requests/api/releases_spec.rb
@@ -573,7 +573,7 @@ RSpec.describe API::Releases, feature_category: :release_orchestration do
end
end
- describe 'GET /projects/:id/releases/:tag_name/downloads/*file_path' do
+ describe 'GET /projects/:id/releases/:tag_name/downloads/*direct_asset_path' do
let!(:release) { create(:release, project: project, tag: 'v0.1', author: maintainer) }
let!(:link) { create(:release_link, release: release, url: "#{url}#{filepath}", filepath: filepath) }
let(:filepath) { '/bin/bigfile.exe' }
@@ -637,6 +637,16 @@ RSpec.describe API::Releases, feature_category: :release_orchestration do
end
end
+ context 'when direct_asset_path is used' do
+ let(:direct_asset_path) { filepath }
+
+ it 'redirects to the file download URL successfully' do
+ get api("/projects/#{project.id}/releases/v0.1/downloads#{direct_asset_path}", maintainer)
+
+ expect(response).to redirect_to("#{url}#{direct_asset_path}")
+ end
+ end
+
context 'when filepath does not exists' do
it 'returns 404 for maintater' do
get api("/projects/#{project.id}/releases/v0.1/downloads/bin/not_existing.exe", maintainer)
@@ -911,6 +921,22 @@ RSpec.describe API::Releases, feature_category: :release_orchestration do
end.not_to change { Project.find_by_id(project.id).repository.tag_count }
end
+ context 'when using `direct_asset_path` for the asset link' do
+ before do
+ params[:direct_asset_path] = params.delete(:filepath)
+ end
+
+ it 'creates a new release successfully' do
+ expect do
+ post api("/projects/#{project.id}/releases", maintainer), params: params
+ end.to change { Release.count }.by(1)
+
+ release = project.releases.last
+
+ expect(release.links.last.filepath).to eq('/permanent/path/to/runbook')
+ end
+ end
+
context 'with protected tag' do
context 'when user has access to the protected tag' do
let!(:protected_tag) { create(:protected_tag, :developers_can_create, name: '*', project: project) }
diff --git a/spec/requests/api/repositories_spec.rb b/spec/requests/api/repositories_spec.rb
index 393ada1da4f..555ba2bc978 100644
--- a/spec/requests/api/repositories_spec.rb
+++ b/spec/requests/api/repositories_spec.rb
@@ -353,15 +353,9 @@ RSpec.describe API::Repositories, feature_category: :source_code_management do
expect(response).to have_gitlab_http_status(:too_many_requests)
end
- context "when hotlinking detection is enabled" do
- before do
- stub_feature_flags(repository_archive_hotlinking_interception: true)
- end
-
- it_behaves_like "hotlink interceptor" do
- let(:http_request) do
- get api(route, current_user), headers: headers
- end
+ it_behaves_like "hotlink interceptor" do
+ let(:http_request) do
+ get api(route, current_user), headers: headers
end
end
end
diff --git a/spec/requests/api/rubygem_packages_spec.rb b/spec/requests/api/rubygem_packages_spec.rb
index 6f048fa57a8..34cf6033811 100644
--- a/spec/requests/api/rubygem_packages_spec.rb
+++ b/spec/requests/api/rubygem_packages_spec.rb
@@ -55,11 +55,11 @@ RSpec.describe API::RubygemPackages, feature_category: :package_registry do
end
where(:user_role, :token_type, :valid_token, :status) do
- :guest | :personal_access_token | true | :not_found
+ :guest | :personal_access_token | true | :forbidden
:guest | :personal_access_token | false | :unauthorized
:guest | :deploy_token | true | :not_found
:guest | :deploy_token | false | :unauthorized
- :guest | :job_token | true | :not_found
+ :guest | :job_token | true | :forbidden
:guest | :job_token | false | :unauthorized
:reporter | :personal_access_token | true | :not_found
:reporter | :personal_access_token | false | :unauthorized
@@ -174,6 +174,17 @@ RSpec.describe API::RubygemPackages, feature_category: :package_registry do
end
end
+ context 'with access to package registry for everyone' do
+ let(:snowplow_gitlab_standard_context) { { project: project, namespace: project.namespace, property: 'i_package_rubygems_user' } }
+
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it_behaves_like 'Rubygems gem download', :anonymous, :success
+ end
+
context 'with package files pending destruction' do
let_it_be(:package_file_pending_destruction) { create(:package_file, :pending_destruction, :xml, package: package, file_name: file_name) }
@@ -423,5 +434,16 @@ RSpec.describe API::RubygemPackages, feature_category: :package_registry do
it_behaves_like params[:shared_examples_name], params[:user_role], params[:expected_status], params[:member]
end
end
+
+ context 'with access to package registry for everyone' do
+ let(:params) { {} }
+
+ before do
+ project.update!(visibility_level: Gitlab::VisibilityLevel::PRIVATE)
+ project.project_feature.update!(package_registry_access_level: ProjectFeature::PUBLIC)
+ end
+
+ it_behaves_like 'dependency endpoint success', :anonymous, :success
+ end
end
end
diff --git a/spec/requests/api/search_spec.rb b/spec/requests/api/search_spec.rb
index 430d3b7d187..035f53db12e 100644
--- a/spec/requests/api/search_spec.rb
+++ b/spec/requests/api/search_spec.rb
@@ -377,6 +377,26 @@ RSpec.describe API::Search, feature_category: :global_search do
end
end
+ context 'global search is disabled for the given scope' do
+ it 'returns forbidden response' do
+ allow_next_instance_of(SearchService) do |instance|
+ allow(instance).to receive(:global_search_enabled_for_scope?).and_return false
+ end
+ get api(endpoint, user), params: { search: 'awesome', scope: 'issues' }
+ expect(response).to have_gitlab_http_status(:forbidden)
+ end
+ end
+
+ context 'global search is enabled for the given scope' do
+ it 'returns forbidden response' do
+ allow_next_instance_of(SearchService) do |instance|
+ allow(instance).to receive(:global_search_enabled_for_scope?).and_return true
+ end
+ get api(endpoint, user), params: { search: 'awesome', scope: 'issues' }
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+
it 'increments the custom search sli error rate with error false if no error occurred' do
expect(Gitlab::Metrics::GlobalSearchSlis).to receive(:record_error_rate).with(
error: false,
diff --git a/spec/requests/api/settings_spec.rb b/spec/requests/api/settings_spec.rb
index e93ef52ef03..4d85849cff3 100644
--- a/spec/requests/api/settings_spec.rb
+++ b/spec/requests/api/settings_spec.rb
@@ -65,6 +65,7 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['can_create_group']).to eq(true)
expect(json_response['jira_connect_application_key']).to eq(nil)
expect(json_response['jira_connect_proxy_url']).to eq(nil)
+ expect(json_response['user_defaults_to_private_profile']).to eq(false)
end
end
@@ -166,7 +167,9 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
can_create_group: false,
jira_connect_application_key: '123',
jira_connect_proxy_url: 'http://example.com',
- bulk_import_enabled: false
+ bulk_import_enabled: false,
+ allow_runner_registration_token: true,
+ user_defaults_to_private_profile: true
}
expect(response).to have_gitlab_http_status(:ok)
@@ -232,6 +235,8 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
expect(json_response['jira_connect_application_key']).to eq('123')
expect(json_response['jira_connect_proxy_url']).to eq('http://example.com')
expect(json_response['bulk_import_enabled']).to be(false)
+ expect(json_response['allow_runner_registration_token']).to be(true)
+ expect(json_response['user_defaults_to_private_profile']).to be(true)
end
end
@@ -801,5 +806,62 @@ RSpec.describe API::Settings, 'Settings', :do_not_mock_admin_mode_setting, featu
.to include(a_string_matching('is not a number'))
end
end
+
+ context 'with housekeeping enabled' do
+ it 'at least one of housekeeping_incremental_repack_period or housekeeping_optimize_repository_period is required' do
+ put api("/application/settings", admin), params: {
+ housekeeping_enabled: true
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(
+ "housekeeping_incremental_repack_period, housekeeping_optimize_repository_period are missing, exactly one parameter must be provided"
+ )
+ end
+
+ context 'when housekeeping_incremental_repack_period is specified' do
+ it 'requires all three housekeeping settings' do
+ put api("/application/settings", admin), params: {
+ housekeeping_enabled: true,
+ housekeeping_incremental_repack_period: 10
+ }
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['error']).to eq(
+ "housekeeping_full_repack_period, housekeeping_gc_period, housekeeping_incremental_repack_period provide all or none of parameters"
+ )
+ end
+
+ it 'returns housekeeping_optimize_repository_period value for all housekeeping settings attributes' do
+ put api("/application/settings", admin), params: {
+ housekeeping_enabled: true,
+ housekeeping_gc_period: 150,
+ housekeeping_full_repack_period: 125,
+ housekeeping_incremental_repack_period: 100
+ }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['housekeeping_optimize_repository_period']).to eq(100)
+ expect(json_response['housekeeping_full_repack_period']).to eq(100)
+ expect(json_response['housekeeping_gc_period']).to eq(100)
+ expect(json_response['housekeeping_incremental_repack_period']).to eq(100)
+ end
+ end
+
+ context 'when housekeeping_optimize_repository_period is specified' do
+ it 'returns housekeeping_optimize_repository_period value for all housekeeping settings attributes' do
+ put api("/application/settings", admin), params: {
+ housekeeping_enabled: true,
+ housekeeping_optimize_repository_period: 100
+ }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(json_response['housekeeping_optimize_repository_period']).to eq(100)
+ expect(json_response['housekeeping_full_repack_period']).to eq(100)
+ expect(json_response['housekeeping_gc_period']).to eq(100)
+ expect(json_response['housekeeping_incremental_repack_period']).to eq(100)
+ end
+ end
+ end
end
end
diff --git a/spec/requests/api/snippet_repository_storage_moves_spec.rb b/spec/requests/api/snippet_repository_storage_moves_spec.rb
index 6081531aee9..9afd8147eb6 100644
--- a/spec/requests/api/snippet_repository_storage_moves_spec.rb
+++ b/spec/requests/api/snippet_repository_storage_moves_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::SnippetRepositoryStorageMoves, feature_category: :gitaly do
+RSpec.describe API::SnippetRepositoryStorageMoves, :with_license, feature_category: :gitaly do
it_behaves_like 'repository_storage_moves API', 'snippets' do
let_it_be(:container) { create(:snippet, :repository).tap { |snippet| snippet.create_repository } }
let_it_be(:storage_move) { create(:snippet_repository_storage_move, :scheduled, container: container) }
diff --git a/spec/requests/api/suggestions_spec.rb b/spec/requests/api/suggestions_spec.rb
index 93b2435c601..4a4692684e3 100644
--- a/spec/requests/api/suggestions_spec.rb
+++ b/spec/requests/api/suggestions_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe API::Suggestions, feature_category: :code_review do
+RSpec.describe API::Suggestions, feature_category: :code_review_workflow do
let(:project) { create(:project, :repository) }
let(:user) { create(:user) }
diff --git a/spec/requests/api/todos_spec.rb b/spec/requests/api/todos_spec.rb
index 5a342f79926..8c3bdd5a9f0 100644
--- a/spec/requests/api/todos_spec.rb
+++ b/spec/requests/api/todos_spec.rb
@@ -6,6 +6,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
include DesignManagementTestHelpers
let_it_be(:group) { create(:group) }
+ let_it_be(:group_2) { create(:group) }
let_it_be(:project_1) { create(:project, :repository, group: group) }
let_it_be(:project_2) { create(:project) }
let_it_be(:author_1) { create(:user) }
@@ -15,7 +16,8 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
let_it_be(:work_item) { create(:work_item, :task, project: project_1) }
let_it_be(:merge_request) { create(:merge_request, source_project: project_1) }
let_it_be(:alert) { create(:alert_management_alert, project: project_1) }
- let_it_be(:group_request_todo) { create(:todo, author: author_1, user: john_doe, target: group, action: Todo::MEMBER_ACCESS_REQUESTED) }
+ let_it_be(:project_request_todo) { create(:todo, author: author_1, user: john_doe, target: project_2, action: Todo::MEMBER_ACCESS_REQUESTED) }
+ let_it_be(:group_request_todo) { create(:todo, author: author_1, user: john_doe, target: group_2, action: Todo::MEMBER_ACCESS_REQUESTED) }
let_it_be(:alert_todo) { create(:todo, project: project_1, author: john_doe, user: john_doe, target: alert) }
let_it_be(:merge_request_todo) { create(:todo, project: project_1, author: author_2, user: john_doe, target: merge_request) }
let_it_be(:pending_1) { create(:todo, :mentioned, project: project_1, author: author_1, user: john_doe, target: issue) }
@@ -72,7 +74,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
expect(response).to have_gitlab_http_status(:ok)
expect(response).to include_pagination_headers
expect(json_response).to be_an Array
- expect(json_response.length).to eq(7)
+ expect(json_response.length).to eq(8)
expect(json_response[0]).to include(
'id' => pending_5.id,
@@ -133,11 +135,23 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
'target_type' => 'Namespace',
'action_name' => 'member_access_requested',
'target' => hash_including(
- 'id' => group.id,
- 'name' => group.name,
- 'full_path' => group.full_path
+ 'id' => group_2.id,
+ 'name' => group_2.name,
+ 'full_path' => group_2.full_path
),
- 'target_url' => Gitlab::Routing.url_helpers.group_group_members_url(group, tab: 'access_requests')
+ 'target_url' => Gitlab::Routing.url_helpers.group_group_members_url(group_2, tab: 'access_requests')
+ )
+
+ expect(json_response[7]).to include(
+ 'target_type' => 'Project',
+ 'action_name' => 'member_access_requested',
+ 'target' => hash_including(
+ 'id' => project_2.id,
+ 'name' => project_2.name,
+ 'path' => project_2.path
+ ),
+ 'target_url' => Gitlab::Routing.url_helpers.project_project_members_url(project_2, tab: 'access_requests'),
+ 'body' => project_2.full_path
)
end
@@ -149,7 +163,7 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
get api('/todos', john_doe)
- expect(json_response.count).to eq(7)
+ expect(json_response.count).to eq(8)
expect(json_response.map { |t| t['id'] }).not_to include(no_access_todo.id, pending_4.id)
end
end
@@ -242,8 +256,10 @@ RSpec.describe API::Todos, feature_category: :source_code_management do
merge_request_3 = create(:merge_request, :jira_branch, source_project: new_todo.project)
create(:on_commit_todo, project: new_todo.project, author: author_1, user: john_doe, target: merge_request_3)
create(:todo, project: new_todo.project, author: author_2, user: john_doe, target: merge_request_3)
+ create(:todo, author: author_2, user: john_doe, target: project_2, action: Todo::MEMBER_ACCESS_REQUESTED)
+ create(:todo, author: author_2, user: john_doe, target: group_2, action: Todo::MEMBER_ACCESS_REQUESTED)
- expect { get api('/todos', john_doe) }.not_to exceed_query_limit(control1).with_threshold(6)
+ expect { get api('/todos', john_doe) }.not_to exceed_query_limit(control1).with_threshold(5)
control2 = ActiveRecord::QueryRecorder.new { get api('/todos', john_doe) }
create_issue_todo_for(john_doe)
diff --git a/spec/requests/api/users_spec.rb b/spec/requests/api/users_spec.rb
index bfb71d95f5e..c063187fdf4 100644
--- a/spec/requests/api/users_spec.rb
+++ b/spec/requests/api/users_spec.rb
@@ -1454,6 +1454,46 @@ RSpec.describe API::Users, feature_category: :users do
include_examples 'does not allow the "read_user" scope'
end
+
+ context "`private_profile` attribute" do
+ context "based on the application setting" do
+ before do
+ stub_application_setting(user_defaults_to_private_profile: true)
+ end
+
+ let(:params) { attributes_for(:user) }
+
+ shared_examples_for 'creates the user with the value of `private_profile` based on the application setting' do
+ specify do
+ post api("/users", admin), params: params
+
+ expect(response).to have_gitlab_http_status(:created)
+ user = User.find_by(id: json_response['id'], private_profile: true)
+ expect(user).to be_present
+ end
+ end
+
+ context 'when the attribute is not overridden in params' do
+ it_behaves_like 'creates the user with the value of `private_profile` based on the application setting'
+ end
+
+ context 'when the attribute is overridden in params' do
+ it 'creates the user with the value of `private_profile` same as the value of the overridden param' do
+ post api("/users", admin), params: params.merge(private_profile: false)
+
+ expect(response).to have_gitlab_http_status(:created)
+ user = User.find_by(id: json_response['id'], private_profile: false)
+ expect(user).to be_present
+ end
+
+ context 'overridden as `nil`' do
+ let(:params) { attributes_for(:user, private_profile: nil) }
+
+ it_behaves_like 'creates the user with the value of `private_profile` based on the application setting'
+ end
+ end
+ end
+ end
end
describe "PUT /users/:id" do
@@ -1634,12 +1674,6 @@ RSpec.describe API::Users, feature_category: :users do
expect(user.reload.external?).to be_truthy
end
- it "private profile is false by default" do
- put api("/users/#{user.id}", admin), params: {}
-
- expect(user.reload.private_profile).to eq(false)
- end
-
it "does have default values for theme and color-scheme ID" do
put api("/users/#{user.id}", admin), params: {}
@@ -1647,13 +1681,6 @@ RSpec.describe API::Users, feature_category: :users do
expect(user.reload.color_scheme_id).to eq(Gitlab::ColorSchemes.default.id)
end
- it "updates private profile" do
- put api("/users/#{user.id}", admin), params: { private_profile: true }
-
- expect(response).to have_gitlab_http_status(:ok)
- expect(user.reload.private_profile).to eq(true)
- end
-
it "updates viewing diffs file by file" do
put api("/users/#{user.id}", admin), params: { view_diffs_file_by_file: true }
@@ -1661,22 +1688,40 @@ RSpec.describe API::Users, feature_category: :users do
expect(user.reload.user_preference.view_diffs_file_by_file?).to eq(true)
end
- it "updates private profile to false when nil is given" do
- user.update!(private_profile: true)
+ context 'updating `private_profile`' do
+ it "updates private profile" do
+ current_value = user.private_profile
+ new_value = !current_value
- put api("/users/#{user.id}", admin), params: { private_profile: nil }
+ put api("/users/#{user.id}", admin), params: { private_profile: new_value }
- expect(response).to have_gitlab_http_status(:ok)
- expect(user.reload.private_profile).to eq(false)
- end
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(user.reload.private_profile).to eq(new_value)
+ end
+
+ context 'when `private_profile` is set to `nil`' do
+ before do
+ stub_application_setting(user_defaults_to_private_profile: true)
+ end
- it "does not modify private profile when field is not provided" do
- user.update!(private_profile: true)
+ it "updates private_profile to value of the application setting" do
+ user.update!(private_profile: false)
- put api("/users/#{user.id}", admin), params: {}
+ put api("/users/#{user.id}", admin), params: { private_profile: nil }
- expect(response).to have_gitlab_http_status(:ok)
- expect(user.reload.private_profile).to eq(true)
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(user.reload.private_profile).to eq(true)
+ end
+ end
+
+ it "does not modify private profile when field is not provided" do
+ user.update!(private_profile: true)
+
+ put api("/users/#{user.id}", admin), params: {}
+
+ expect(response).to have_gitlab_http_status(:ok)
+ expect(user.reload.private_profile).to eq(true)
+ end
end
it "does not modify theme or color-scheme ID when field is not provided" do
@@ -3617,6 +3662,15 @@ RSpec.describe API::Users, feature_category: :users do
expect(response.body).to eq('true')
expect(user.reload.state).to eq('blocked')
end
+
+ it 'saves a custom attribute', :freeze_time, feature_category: :insider_threat do
+ block_user
+
+ custom_attribute = user.custom_attributes.last
+
+ expect(custom_attribute.key).to eq(UserCustomAttribute::BLOCKED_BY)
+ expect(custom_attribute.value).to eq("#{admin.username}/#{admin.id}+#{Time.current}")
+ end
end
context 'with an ldap blocked user' do
@@ -3708,6 +3762,15 @@ RSpec.describe API::Users, feature_category: :users do
expect(response).to have_gitlab_http_status(:created)
expect(blocked_user.reload.state).to eq('active')
end
+
+ it 'saves a custom attribute', :freeze_time, feature_category: :insider_threat do
+ unblock_user
+
+ custom_attribute = blocked_user.custom_attributes.last
+
+ expect(custom_attribute.key).to eq(UserCustomAttribute::UNBLOCKED_BY)
+ expect(custom_attribute.value).to eq("#{admin.username}/#{admin.id}+#{Time.current}")
+ end
end
context 'with a ldap blocked user' do
@@ -4045,60 +4108,164 @@ RSpec.describe API::Users, feature_category: :users do
end
end
- describe 'GET /user/status' do
- let(:path) { '/user/status' }
+ describe '/user/status' do
+ let(:user_status) { create(:user_status, clear_status_at: 8.hours.from_now) }
+ let(:user_with_status) { user_status.user }
+ let(:params) { {} }
+ let(:request_user) { user }
- it_behaves_like 'rendering user status'
- end
+ shared_examples '/user/status successful response' do
+ context 'when request is successful' do
+ let(:params) { { emoji: 'smirk', message: 'hello world' } }
- describe 'PUT /user/status' do
- it 'saves the status' do
- put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world' }
+ it 'saves the status' do
+ set_user_status
- expect(response).to have_gitlab_http_status(:success)
- expect(json_response['emoji']).to eq('smirk')
+ expect(response).to have_gitlab_http_status(:success)
+ expect(json_response['emoji']).to eq('smirk')
+ expect(json_response['message']).to eq('hello world')
+ end
+ end
end
- it 'renders errors when the status was invalid' do
- put api('/user/status', user), params: { emoji: 'does not exist', message: 'hello world' }
+ shared_examples '/user/status unsuccessful response' do
+ context 'when request is unsuccessful' do
+ let(:params) { { emoji: 'does not exist', message: 'hello world' } }
- expect(response).to have_gitlab_http_status(:bad_request)
- expect(json_response['message']['emoji']).to be_present
+ it 'renders errors' do
+ set_user_status
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ expect(json_response['message']['emoji']).to be_present
+ end
+ end
end
- it 'deletes the status when passing empty values' do
- put api('/user/status', user)
+ shared_examples '/user/status passing nil for params' do
+ context 'when passing nil for params' do
+ let(:params) { { emoji: nil, message: nil, clear_status_after: nil } }
+ let(:request_user) { user_with_status }
- expect(response).to have_gitlab_http_status(:success)
- expect(user.reload.status).to be_nil
+ it 'deletes the status' do
+ set_user_status
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(user_with_status.status).to be_nil
+ end
+ end
end
- context 'when clear_status_after is given' do
- it 'sets the clear_status_at column' do
- freeze_time do
+ shared_examples '/user/status clear_status_after field' do
+ context 'when clear_status_after is valid', :freeze_time do
+ let(:params) { { emoji: 'smirk', message: 'hello world', clear_status_after: '3_hours' } }
+
+ it 'sets the clear_status_at column' do
expected_clear_status_at = 3.hours.from_now
- put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world', clear_status_after: '3_hours' }
+ set_user_status
expect(response).to have_gitlab_http_status(:success)
- expect(user.status.reload.clear_status_at).to be_within(1.minute).of(expected_clear_status_at)
- expect(Time.parse(json_response["clear_status_at"])).to be_within(1.minute).of(expected_clear_status_at)
+ expect(user.status.clear_status_at).to be_like_time(expected_clear_status_at)
+ expect(Time.parse(json_response["clear_status_at"])).to be_like_time(expected_clear_status_at)
end
end
- it 'unsets the clear_status_at column' do
- user.create_status!(clear_status_at: 5.hours.ago)
+ context 'when clear_status_after is nil' do
+ let(:params) { { emoji: 'smirk', message: 'hello world', clear_status_after: nil } }
+ let(:request_user) { user_with_status }
- put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world', clear_status_after: nil }
+ it 'unsets the clear_status_at column' do
+ set_user_status
- expect(response).to have_gitlab_http_status(:success)
- expect(user.status.reload.clear_status_at).to be_nil
+ expect(response).to have_gitlab_http_status(:success)
+ expect(user_with_status.status.clear_status_at).to be_nil
+ end
end
- it 'raises error when unknown status value is given' do
- put api('/user/status', user), params: { emoji: 'smirk', message: 'hello world', clear_status_after: 'wrong' }
+ context 'when clear_status_after is invalid' do
+ let(:params) { { emoji: 'smirk', message: 'hello world', clear_status_after: 'invalid' } }
- expect(response).to have_gitlab_http_status(:bad_request)
+ it 'raises error when unknown status value is given' do
+ set_user_status
+
+ expect(response).to have_gitlab_http_status(:bad_request)
+ end
+ end
+ end
+
+ describe 'GET' do
+ let(:path) { '/user/status' }
+
+ it_behaves_like 'rendering user status'
+ end
+
+ describe 'PUT' do
+ subject(:set_user_status) { put api('/user/status', request_user), params: params }
+
+ include_examples '/user/status successful response'
+
+ include_examples '/user/status unsuccessful response'
+
+ include_examples '/user/status passing nil for params'
+
+ include_examples '/user/status clear_status_after field'
+
+ context 'when passing empty params' do
+ let(:request_user) { user_with_status }
+
+ it 'deletes the status' do
+ set_user_status
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(user_with_status.status).to be_nil
+ end
+ end
+
+ context 'when clear_status_after is not given' do
+ let(:params) { { emoji: 'smirk', message: 'hello world' } }
+ let(:request_user) { user_with_status }
+
+ it 'unsets clear_status_at column' do
+ set_user_status
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(user_with_status.status.clear_status_at).to be_nil
+ end
+ end
+ end
+
+ describe 'PATCH' do
+ subject(:set_user_status) { patch api('/user/status', request_user), params: params }
+
+ include_examples '/user/status successful response'
+
+ include_examples '/user/status unsuccessful response'
+
+ include_examples '/user/status passing nil for params'
+
+ include_examples '/user/status clear_status_after field'
+
+ context 'when passing empty params' do
+ let(:request_user) { user_with_status }
+
+ it 'does not update the status' do
+ set_user_status
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(user_with_status.status).to eq(user_status)
+ end
+ end
+
+ context 'when clear_status_after is not given' do
+ let(:params) { { emoji: 'smirk', message: 'hello world' } }
+ let(:request_user) { user_with_status }
+
+ it 'does not unset clear_status_at column' do
+ set_user_status
+
+ expect(response).to have_gitlab_http_status(:success)
+ expect(user_with_status.status.clear_status_at).not_to be_nil
+ end
end
end
end
diff --git a/spec/requests/dashboard_controller_spec.rb b/spec/requests/dashboard_controller_spec.rb
index 9edacb27c93..1c8ab843ebe 100644
--- a/spec/requests/dashboard_controller_spec.rb
+++ b/spec/requests/dashboard_controller_spec.rb
@@ -12,4 +12,32 @@ RSpec.describe DashboardController, feature_category: :authentication_and_author
let(:url) { issues_dashboard_url(:ics, assignee_username: user.username) }
end
end
+
+ context 'issues dashboard' do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
+ let_it_be(:current_user) { create(:user) }
+
+ before do
+ sign_in current_user
+ end
+
+ def request
+ get issues_dashboard_path, params: { scope: 'all', search: 'test' }
+ end
+ end
+ end
+
+ context 'merge requests dashboard' do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
+ let_it_be(:current_user) { create(:user) }
+
+ before do
+ sign_in current_user
+ end
+
+ def request
+ get merge_requests_dashboard_path, params: { scope: 'all', search: 'test' }
+ end
+ end
+ end
end
diff --git a/spec/requests/groups/observability_controller_spec.rb b/spec/requests/groups/observability_controller_spec.rb
index 46690d60539..471cad40c90 100644
--- a/spec/requests/groups/observability_controller_spec.rb
+++ b/spec/requests/groups/observability_controller_spec.rb
@@ -71,23 +71,16 @@ RSpec.describe Groups::ObservabilityController, feature_category: :tracing do
end
end
- describe 'GET #dashboards' do
- let(:path) { group_observability_dashboards_path(group) }
- let(:expected_observability_path) { "#{observability_url}/-/#{group.id}/" }
-
- it_behaves_like 'observability route request'
- end
-
- describe 'GET #manage' do
- let(:path) { group_observability_manage_path(group) }
- let(:expected_observability_path) { "#{observability_url}/-/#{group.id}/dashboards" }
+ describe 'GET #explore' do
+ let(:path) { group_observability_explore_path(group) }
+ let(:expected_observability_path) { "#{observability_url}/-/#{group.id}/explore" }
it_behaves_like 'observability route request'
end
- describe 'GET #explore' do
- let(:path) { group_observability_explore_path(group) }
- let(:expected_observability_path) { "#{observability_url}/-/#{group.id}/explore" }
+ describe 'GET #datasources' do
+ let(:path) { group_observability_datasources_path(group) }
+ let(:expected_observability_path) { "#{observability_url}/-/#{group.id}/datasources" }
it_behaves_like 'observability route request'
end
diff --git a/spec/requests/groups/usage_quotas_controller_spec.rb b/spec/requests/groups/usage_quotas_controller_spec.rb
index bddc95434ce..90fd08063f3 100644
--- a/spec/requests/groups/usage_quotas_controller_spec.rb
+++ b/spec/requests/groups/usage_quotas_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::UsageQuotasController, feature_category: :subscription_cost_management do
+RSpec.describe Groups::UsageQuotasController, :with_license, feature_category: :subscription_cost_management do
let_it_be(:group) { create(:group) }
let_it_be(:subgroup) { create(:group, parent: group) }
let_it_be(:user) { create(:user) }
diff --git a/spec/requests/openid_connect_spec.rb b/spec/requests/openid_connect_spec.rb
index b45f4f1e39f..49279024bd0 100644
--- a/spec/requests/openid_connect_spec.rb
+++ b/spec/requests/openid_connect_spec.rb
@@ -32,6 +32,7 @@ RSpec.describe 'OpenID Connect requests', feature_category: :authentication_and_
{
'name' => 'Alice',
'nickname' => 'alice',
+ 'preferred_username' => 'alice',
'email' => 'public@example.com',
'email_verified' => true,
'website' => 'https://example.com',
diff --git a/spec/requests/projects/issues_controller_spec.rb b/spec/requests/projects/issues_controller_spec.rb
index bbf200eaacd..67a73834f2d 100644
--- a/spec/requests/projects/issues_controller_spec.rb
+++ b/spec/requests/projects/issues_controller_spec.rb
@@ -8,10 +8,14 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
let_it_be(:project) { issue.project }
let_it_be(:user) { issue.author }
+ shared_context 'group project issue' do
+ let_it_be(:project) { create :project, group: group }
+ let_it_be(:issue) { create :issue, project: project }
+ let_it_be(:user) { create(:user) }
+ end
+
describe 'GET #new' do
- before do
- login_as(user)
- end
+ include_context 'group project issue'
it_behaves_like "observability csp policy", described_class do
let(:tested_path) do
@@ -21,9 +25,7 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
end
describe 'GET #show' do
- before do
- login_as(user)
- end
+ include_context 'group project issue'
it_behaves_like "observability csp policy", described_class do
let(:tested_path) do
@@ -32,13 +34,38 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
end
end
+ describe 'GET #index.json' do
+ let_it_be(:public_project) { create(:project, :public) }
+
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
+ let_it_be(:current_user) { create(:user) }
+
+ before do
+ sign_in current_user
+ end
+
+ def request
+ get project_issues_path(public_project, format: :json), params: { scope: 'all', search: 'test' }
+ end
+ end
+
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit_unauthenticated do
+ def request
+ get project_issues_path(public_project, format: :json), params: { scope: 'all', search: 'test' }
+ end
+ end
+ end
+
describe 'GET #discussions' do
before do
login_as(user)
end
let_it_be(:discussion) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
- let_it_be(:discussion_reply) { create(:discussion_note_on_issue, noteable: issue, project: issue.project, in_reply_to: discussion) }
+ let_it_be(:discussion_reply) do
+ create(:discussion_note_on_issue, noteable: issue, project: issue.project, in_reply_to: discussion)
+ end
+
let_it_be(:state_event) { create(:resource_state_event, issue: issue) }
let_it_be(:discussion_2) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
let_it_be(:discussion_3) { create(:discussion_note_on_issue, noteable: issue, project: issue.project) }
@@ -92,7 +119,8 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
context 'when private project' do
let_it_be(:private_project) { create(:project, :private) }
- it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false, ignore_metrics: true do
+ it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false,
+ignore_metrics: true do
let(:url) { project_issues_url(private_project, format: :atom) }
before do
@@ -100,7 +128,8 @@ RSpec.describe Projects::IssuesController, feature_category: :team_planning do
end
end
- it_behaves_like 'authenticates sessionless user for the request spec', 'calendar ics', public_resource: false, ignore_metrics: true do
+ it_behaves_like 'authenticates sessionless user for the request spec', 'calendar ics', public_resource: false,
+ignore_metrics: true do
let(:url) { project_issues_url(private_project, format: :ics) }
before do
diff --git a/spec/requests/projects/merge_requests/content_spec.rb b/spec/requests/projects/merge_requests/content_spec.rb
index 6c58dcb5722..54066756f3e 100644
--- a/spec/requests/projects/merge_requests/content_spec.rb
+++ b/spec/requests/projects/merge_requests/content_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'merge request content spec', feature_category: :code_review do
+RSpec.describe 'merge request content spec', feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:merge_request) { create(:merge_request, :with_head_pipeline, target_project: project, source_project: project) }
diff --git a/spec/requests/projects/merge_requests/context_commit_diffs_spec.rb b/spec/requests/projects/merge_requests/context_commit_diffs_spec.rb
index 10e57970704..24e4dea5cdc 100644
--- a/spec/requests/projects/merge_requests/context_commit_diffs_spec.rb
+++ b/spec/requests/projects/merge_requests/context_commit_diffs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests Context Commit Diffs', feature_category: :code_review do
+RSpec.describe 'Merge Requests Context Commit Diffs', feature_category: :code_review_workflow do
let_it_be(:sha1) { "33f3729a45c02fc67d00adb1b8bca394b0e761d9" }
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
@@ -35,7 +35,6 @@ RSpec.describe 'Merge Requests Context Commit Diffs', feature_category: :code_re
commit: nil,
diff_view: :inline,
merge_ref_head_diff: nil,
- merge_conflicts_in_diff: true,
pagination_data: {
total_pages: nil
}.merge(pagination_data)
diff --git a/spec/requests/projects/merge_requests/creations_spec.rb b/spec/requests/projects/merge_requests/creations_spec.rb
index 59e2047e1c7..ace6ef0f7b8 100644
--- a/spec/requests/projects/merge_requests/creations_spec.rb
+++ b/spec/requests/projects/merge_requests/creations_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'merge requests creations', feature_category: :code_review do
+RSpec.describe 'merge requests creations', feature_category: :code_review_workflow do
describe 'GET /:namespace/:project/merge_requests/new' do
include ProjectForksHelper
@@ -26,14 +26,17 @@ RSpec.describe 'merge requests creations', feature_category: :code_review do
end
it_behaves_like "observability csp policy", Projects::MergeRequests::CreationsController do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, group: group) }
let(:tested_path) do
project_new_merge_request_path(project, merge_request: {
- title: 'Some feature',
- source_branch: 'fix',
- target_branch: 'feature',
- target_project: project,
- source_project: project
- })
+ title: 'Some feature',
+ source_branch: 'fix',
+ target_branch: 'feature',
+ target_project: project,
+ source_project: project
+ })
end
end
end
diff --git a/spec/requests/projects/merge_requests/diffs_spec.rb b/spec/requests/projects/merge_requests/diffs_spec.rb
index 858acac7f0d..f98688bf767 100644
--- a/spec/requests/projects/merge_requests/diffs_spec.rb
+++ b/spec/requests/projects/merge_requests/diffs_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'Merge Requests Diffs', feature_category: :code_review do
+RSpec.describe 'Merge Requests Diffs', feature_category: :code_review_workflow do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
@@ -33,7 +33,6 @@ RSpec.describe 'Merge Requests Diffs', feature_category: :code_review do
commit: nil,
diff_view: :inline,
merge_ref_head_diff: nil,
- merge_conflicts_in_diff: true,
pagination_data: {
total_pages: nil
}.merge(pagination_data)
@@ -112,17 +111,6 @@ RSpec.describe 'Merge Requests Diffs', feature_category: :code_review do
it_behaves_like 'serializes diffs with expected arguments'
end
- context 'with disabled display_merge_conflicts_in_diff feature' do
- let(:collection) { Gitlab::Diff::FileCollection::MergeRequestDiffBatch }
- let(:expected_options) { collection_arguments(total_pages: 20).merge(merge_conflicts_in_diff: false) }
-
- before do
- stub_feature_flags(display_merge_conflicts_in_diff: false)
- end
-
- it_behaves_like 'serializes diffs with expected arguments'
- end
-
context 'with diff_head option' do
subject { go(page: 0, per_page: 5, diff_head: true) }
diff --git a/spec/requests/projects/merge_requests_controller_spec.rb b/spec/requests/projects/merge_requests_controller_spec.rb
index f5f8b5c2d83..f441438a95a 100644
--- a/spec/requests/projects/merge_requests_controller_spec.rb
+++ b/spec/requests/projects/merge_requests_controller_spec.rb
@@ -8,20 +8,65 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :source_code
let_it_be(:user) { merge_request.author }
describe 'GET #show' do
- before do
- login_as(user)
+ let_it_be(:group) { create(:group) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+
+ let(:merge_request) { create :merge_request, source_project: project, author: user }
+
+ context 'when logged in' do
+ before do
+ login_as(user)
+ end
+
+ it_behaves_like "observability csp policy", described_class do
+ let(:tested_path) do
+ project_merge_request_path(project, merge_request)
+ end
+ end
+ end
+
+ context 'when the author of the merge request is banned', feature_category: :insider_threat do
+ let_it_be(:user) { create(:user, :banned) }
+
+ subject { response }
+
+ before do
+ get project_merge_request_path(project, merge_request)
+ end
+
+ it { is_expected.to have_gitlab_http_status(:not_found) }
+ end
+ end
+
+ describe 'GET #index' do
+ let_it_be(:public_project) { create(:project, :public) }
+
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
+ let_it_be(:current_user) { user }
+
+ before do
+ sign_in current_user
+ end
+
+ def request
+ get project_merge_requests_path(public_project), params: { scope: 'all', search: 'test' }
+ end
end
- it_behaves_like "observability csp policy", described_class do
- let(:tested_path) do
- project_merge_request_path(project, merge_request)
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit_unauthenticated do
+ def request
+ get project_merge_requests_path(public_project), params: { scope: 'all', search: 'test' }
end
end
end
describe 'GET #discussions' do
let_it_be(:discussion) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
- let_it_be(:discussion_reply) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project, in_reply_to: discussion) }
+ let_it_be(:discussion_reply) do
+ create(:discussion_note_on_merge_request, noteable: merge_request, project: project, in_reply_to: discussion)
+ end
+
let_it_be(:state_event) { create(:resource_state_event, merge_request: merge_request) }
let_it_be(:discussion_2) { create(:discussion_note_on_merge_request, noteable: merge_request, project: project) }
let_it_be(:discussion_3) { create(:diff_note_on_merge_request, noteable: merge_request, project: project) }
@@ -60,22 +105,6 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :source_code
expect(discussions.count).to eq(1)
expect(notes).to match([a_hash_including('id' => discussion_2.id.to_s)])
end
-
- context 'when paginated_mr_discussions is disabled' do
- before do
- stub_feature_flags(paginated_mr_discussions: false)
- end
-
- it 'returns all discussions and ignores per_page param' do
- get_discussions(per_page: 2)
-
- discussions = Gitlab::Json.parse(response.body)
- notes = discussions.flat_map { |d| d['notes'] }
-
- expect(discussions.count).to eq(4)
- expect(notes.count).to eq(5)
- end
- end
end
end
@@ -91,7 +120,8 @@ RSpec.describe Projects::MergeRequestsController, feature_category: :source_code
context 'when private project' do
let_it_be(:private_project) { create(:project, :private) }
- it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false, ignore_metrics: true do
+ it_behaves_like 'authenticates sessionless user for the request spec', 'index atom', public_resource: false,
+ ignore_metrics: true do
let(:url) { project_merge_requests_url(private_project, format: :atom) }
before do
diff --git a/spec/requests/projects/ml/candidates_controller_spec.rb b/spec/requests/projects/ml/candidates_controller_spec.rb
index 4a0fd1ce4f5..d3f9d92bc44 100644
--- a/spec/requests/projects/ml/candidates_controller_spec.rb
+++ b/spec/requests/projects/ml/candidates_controller_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
let_it_be(:candidate) { create(:ml_candidates, experiment: experiment, user: user) }
let(:ff_value) { true }
- let(:threshold) { 4 }
let(:candidate_iid) { candidate.iid }
before do
@@ -40,14 +39,13 @@ RSpec.describe Projects::Ml::CandidatesController, feature_category: :mlops do
expect(response).to render_template('projects/ml/candidates/show')
end
- # MR removing this xit https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104166
- xit 'does not perform N+1 sql queries' do
- control_count = ActiveRecord::QueryRecorder.new { show_candidate }
+ it 'does not perform N+1 sql queries' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { show_candidate }
create_list(:ml_candidate_params, 3, candidate: candidate)
create_list(:ml_candidate_metrics, 3, candidate: candidate)
- expect { show_candidate }.not_to exceed_all_query_limit(control_count).with_threshold(threshold)
+ expect { show_candidate }.not_to exceed_all_query_limit(control_count)
end
context 'when candidate does not exist' do
diff --git a/spec/requests/projects/ml/experiments_controller_spec.rb b/spec/requests/projects/ml/experiments_controller_spec.rb
index f35f93b1e6c..e8b6f806251 100644
--- a/spec/requests/projects/ml/experiments_controller_spec.rb
+++ b/spec/requests/projects/ml/experiments_controller_spec.rb
@@ -68,24 +68,59 @@ RSpec.describe Projects::Ml::ExperimentsController, feature_category: :mlops do
describe 'GET show' do
let(:params) { basic_params.merge(id: experiment.iid) }
- before do
+ it 'renders the template' do
show_experiment
- end
- it 'renders the template' do
expect(response).to render_template('projects/ml/experiments/show')
end
- # MR removing this xit https://gitlab.com/gitlab-org/gitlab/-/merge_requests/104166
- xit 'does not perform N+1 sql queries' do
- control_count = ActiveRecord::QueryRecorder.new { show_experiment }
+ describe 'pagination' do
+ let_it_be(:candidates) { create_list(:ml_candidates, 5, experiment: experiment) }
+
+ before do
+ stub_const("Projects::Ml::ExperimentsController::MAX_CANDIDATES_PER_PAGE", 2)
+ candidates
+
+ show_experiment
+ end
+
+ context 'when out of bounds' do
+ let(:params) { basic_params.merge(id: experiment.iid, page: 10000) }
+
+ it 'redirects to last page' do
+ last_page = (experiment.candidates.size + 1) / 2
+
+ expect(response).to redirect_to(project_ml_experiment_path(project, experiment.iid, page: last_page))
+ end
+ end
+
+ context 'when bad page' do
+ let(:params) { basic_params.merge(id: experiment.iid, page: 's') }
+
+ it 'uses first page' do
+ expect(assigns(:pagination)).to include(
+ page: 1,
+ is_last_page: false,
+ per_page: 2,
+ total_items: experiment.candidates&.size
+ )
+ end
+ end
+ end
+
+ it 'does not perform N+1 sql queries' do
+ control_count = ActiveRecord::QueryRecorder.new(skip_cached: false) { show_experiment }
create_list(:ml_candidates, 2, :with_metrics_and_params, experiment: experiment)
- expect { show_experiment }.not_to exceed_all_query_limit(control_count).with_threshold(threshold)
+ expect { show_experiment }.not_to exceed_all_query_limit(control_count)
end
- it_behaves_like '404 if feature flag disabled'
+ it_behaves_like '404 if feature flag disabled' do
+ before do
+ show_experiment
+ end
+ end
end
private
diff --git a/spec/requests/projects_controller_spec.rb b/spec/requests/projects_controller_spec.rb
index f08f3578dc0..613f528e8c2 100644
--- a/spec/requests/projects_controller_spec.rb
+++ b/spec/requests/projects_controller_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ProjectsController, feature_category: :projects do
+RSpec.describe ProjectsController, :with_license, feature_category: :projects do
context 'token authentication' do
context 'when public project' do
let_it_be(:public_project) { create(:project, :public) }
diff --git a/spec/requests/pwa_controller_spec.rb b/spec/requests/pwa_controller_spec.rb
index 3971790c094..a80d083c11f 100644
--- a/spec/requests/pwa_controller_spec.rb
+++ b/spec/requests/pwa_controller_spec.rb
@@ -8,12 +8,13 @@ RSpec.describe PwaController, feature_category: :navigation do
get manifest_path(format: :json)
expect(response.body).to include('The complete DevOps platform.')
+ expect(Gitlab::Json.parse(response.body)).to include({ 'short_name' => 'GitLab' })
expect(response).to have_gitlab_http_status(:success)
end
context 'with customized appearance' do
let_it_be(:appearance) do
- create(:appearance, title: 'Long name', short_title: 'Short name', description: 'This is a test')
+ create(:appearance, title: 'Long name', pwa_short_name: 'Short name', description: 'This is a test')
end
it 'uses custom values', :aggregate_failures do
@@ -27,6 +28,23 @@ RSpec.describe PwaController, feature_category: :navigation do
expect(response).to have_gitlab_http_status(:success)
end
end
+
+ context 'when user is signed in' do
+ before do
+ user = create(:user)
+ allow(user).to receive(:role_required?).and_return(true)
+
+ sign_in(user)
+ end
+
+ it 'skips the required signup info storing of user location' do
+ expect_next_instance_of(described_class) do |instance|
+ expect(instance).not_to receive(:store_location_for).with(:user, manifest_path(format: :json))
+ end
+
+ get manifest_path(format: :json)
+ end
+ end
end
describe 'GET #offline' do
diff --git a/spec/requests/rack_attack_global_spec.rb b/spec/requests/rack_attack_global_spec.rb
index 643a98da441..91595f7826a 100644
--- a/spec/requests/rack_attack_global_spec.rb
+++ b/spec/requests/rack_attack_global_spec.rb
@@ -143,11 +143,11 @@ feature_category: :authentication_and_authorization do
describe 'API requests authenticated with OAuth token', :api do
let(:user) { create(:user) }
let(:application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: user) }
- let(:token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: "api") }
+ let(:token) { create(:oauth_access_token, application_id: application.id, resource_owner_id: user.id, scopes: "api", expires_in: period_in_seconds + 1) }
let(:other_user) { create(:user) }
let(:other_user_application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: other_user) }
- let(:other_user_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: other_user.id, scopes: "api") }
+ let(:other_user_token) { create(:oauth_access_token, application_id: application.id, resource_owner_id: other_user.id, scopes: "api") }
let(:throttle_setting_prefix) { 'throttle_authenticated_api' }
let(:api_partial_url) { '/todos' }
@@ -167,8 +167,8 @@ feature_category: :authentication_and_authorization do
end
context 'with a read_api scope' do
- let(:read_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: "read_api") }
- let(:other_user_read_token) { Doorkeeper::AccessToken.create!(application_id: other_user_application.id, resource_owner_id: other_user.id, scopes: "read_api") }
+ let(:read_token) { create(:oauth_access_token, application_id: application.id, resource_owner_id: user.id, scopes: "read_api", expires_in: period_in_seconds + 1) }
+ let(:other_user_read_token) { create(:oauth_access_token, application_id: other_user_application.id, resource_owner_id: other_user.id, scopes: "read_api") }
let(:request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(read_token)) }
let(:other_user_request_args) { api_get_args_with_token_headers(api_partial_url, oauth_token_headers(other_user_read_token)) }
@@ -1202,7 +1202,7 @@ feature_category: :authentication_and_authorization do
context 'authenticated with OAuth token' do
let(:application) { Doorkeeper::Application.create!(name: "MyApp", redirect_uri: "https://app.com", owner: user) }
- let(:oauth_token) { Doorkeeper::AccessToken.create!(application_id: application.id, resource_owner_id: user.id, scopes: "api") }
+ let(:oauth_token) { create(:oauth_access_token, application_id: application.id, resource_owner_id: user.id, scopes: "api", expires_in: period_in_seconds + 1) }
it 'request is authenticated by token in query string' do
expect_authenticated_request
diff --git a/spec/requests/users_controller_spec.rb b/spec/requests/users_controller_spec.rb
index 608284c05f3..11d8be24e06 100644
--- a/spec/requests/users_controller_spec.rb
+++ b/spec/requests/users_controller_spec.rb
@@ -464,6 +464,18 @@ RSpec.describe UsersController, feature_category: :user_management do
expect(response.body).not_to be_empty
end
+ it 'renders the correct url for issues and work items' do
+ work_item = create(:work_item, :task, project: project)
+ issue = create(:issue, project: project)
+ EventCreateService.new.open_issue(work_item, public_user)
+ EventCreateService.new.open_issue(issue, public_user)
+
+ get user_calendar_activities_url public_user.username
+
+ expect(response.body).to include(project_work_items_path(project, work_item.iid, iid_path: true))
+ expect(response.body).to include(project_issue_path(project, issue))
+ end
+
it 'avoids N+1 queries', :request_store do
get user_calendar_activities_url public_user.username
diff --git a/spec/routing/group_routing_spec.rb b/spec/routing/group_routing_spec.rb
index 54fbe9e962d..3ba7d5ad871 100644
--- a/spec/routing/group_routing_spec.rb
+++ b/spec/routing/group_routing_spec.rb
@@ -72,16 +72,12 @@ RSpec.shared_examples 'groups routing' do
expect(get("groups/#{group_path}/-/harbor/repositories/test/artifacts/test/tags")).to route_to('groups/harbor/tags#index', group_id: group_path, repository_id: 'test', artifact_id: 'test')
end
- it 'routes to the observability controller dashboards method' do
- expect(get("groups/#{group_path}/-/observability/dashboards")).to route_to('groups/observability#dashboards', group_id: group_path)
- end
-
it 'routes to the observability controller explore method' do
expect(get("groups/#{group_path}/-/observability/explore")).to route_to('groups/observability#explore', group_id: group_path)
end
- it 'routes to the observability controller manage method' do
- expect(get("groups/#{group_path}/-/observability/manage")).to route_to('groups/observability#manage', group_id: group_path)
+ it 'routes to the observability controller datasources method' do
+ expect(get("groups/#{group_path}/-/observability/datasources")).to route_to('groups/observability#datasources', group_id: group_path)
end
it 'routes to the usage quotas controller' do
diff --git a/spec/rubocop/check_graceful_task_spec.rb b/spec/rubocop/check_graceful_task_spec.rb
index c39a00470fd..38c2d68a593 100644
--- a/spec/rubocop/check_graceful_task_spec.rb
+++ b/spec/rubocop/check_graceful_task_spec.rb
@@ -68,7 +68,7 @@ RSpec.describe RuboCop::CheckGracefulTask do
let(:user_name) { 'GitLab Bot' }
let(:job_name) { 'some job name' }
let(:job_url) { 'some job url' }
- let(:docs_link) { 'https://docs.gitlab.com/ee/development/contributing/style_guides.html#silenced-offenses' }
+ let(:docs_link) { 'https://docs.gitlab.com/ee/development/rubocop_development_guide.html#silenced-offenses' }
before do
env = {
diff --git a/spec/rubocop/cop/background_migration/feature_category_spec.rb b/spec/rubocop/cop/background_migration/feature_category_spec.rb
new file mode 100644
index 00000000000..359520b1d9f
--- /dev/null
+++ b/spec/rubocop/cop/background_migration/feature_category_spec.rb
@@ -0,0 +1,71 @@
+# frozen_string_literal: true
+
+require 'rubocop_spec_helper'
+require_relative '../../../../rubocop/cop/background_migration/feature_category'
+
+RSpec.describe RuboCop::Cop::BackgroundMigration::FeatureCategory, feature_category: :database do
+ let(:cop) { described_class.new }
+
+ context 'for non background migrations' do
+ before do
+ allow(cop).to receive(:in_background_migration?).and_return(false)
+ end
+
+ it 'does not throw any offense' do
+ expect_no_offenses(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ class MyJob < Gitlab::BackgroundMigration::BatchedMigrationJob
+ def perform; end
+ end
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'for background migrations' do
+ before do
+ allow(cop).to receive(:in_background_migration?).and_return(true)
+ end
+
+ it 'throws offense on not defining the feature_category' do
+ expect_offense(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ class MyJob1 < Gitlab::BackgroundMigration::BatchedMigrationJob
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::MSG}
+ end
+ end
+ end
+ RUBY
+ end
+
+ it 'throws offense on not defining a valid feature_category' do
+ expect_offense(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ class MyJob1 < Gitlab::BackgroundMigration::BatchedMigrationJob
+ feature_category :invalid_random
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ #{described_class::INVALID_FEATURE_CATEGORY_MSG}
+ end
+ end
+ end
+ RUBY
+ end
+
+ it 'will not throw offense on defining a valid feature_category' do
+ expect_no_offenses(<<~RUBY)
+ module Gitlab
+ module BackgroundMigration
+ class MyJob < Gitlab::BackgroundMigration::BatchedMigrationJob
+ feature_category :database
+
+ def perform; end
+ end
+ end
+ end
+ RUBY
+ end
+ end
+end
diff --git a/spec/rubocop/cop/gitlab/strong_memoize_attr_spec.rb b/spec/rubocop/cop/gitlab/strong_memoize_attr_spec.rb
index 0ed699f4e8c..fde53f8f98c 100644
--- a/spec/rubocop/cop/gitlab/strong_memoize_attr_spec.rb
+++ b/spec/rubocop/cop/gitlab/strong_memoize_attr_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe RuboCop::Cop::Gitlab::StrongMemoizeAttr do
class Foo
def memoized_method
strong_memoize(:memoized_method) do
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `strong_memoize_attr`, instead of using `strong_memoize` directly
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `strong_memoize_attr`, instead of using `strong_memoize` directly.
'This is a memoized method'
end
end
@@ -35,7 +35,7 @@ RSpec.describe RuboCop::Cop::Gitlab::StrongMemoizeAttr do
class Foo
def enabled?
strong_memoize(:enabled) do
- ^^^^^^^^^^^^^^^^^^^^^^^^ Use `strong_memoize_attr`, instead of using `strong_memoize` directly
+ ^^^^^^^^^^^^^^^^^^^^^^^^ Use `strong_memoize_attr`, instead of using `strong_memoize` directly.
true
end
end
@@ -47,7 +47,7 @@ RSpec.describe RuboCop::Cop::Gitlab::StrongMemoizeAttr do
def enabled?
true
end
- strong_memoize_attr :enabled?, :enabled
+ strong_memoize_attr :enabled?
end
RUBY
end
@@ -62,7 +62,7 @@ RSpec.describe RuboCop::Cop::Gitlab::StrongMemoizeAttr do
msg = 'This is a memoized method'
strong_memoize(:memoized_method) do
- ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `strong_memoize_attr`, instead of using `strong_memoize` directly
+ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ Use `strong_memoize_attr`, instead of using `strong_memoize` directly.
msg
end
end
@@ -72,4 +72,32 @@ RSpec.describe RuboCop::Cop::Gitlab::StrongMemoizeAttr do
expect_no_corrections
end
end
+
+ context 'when strong_memoize() is used in a method with parameters' do
+ it 'does not register an offense' do
+ expect_no_offenses(<<~RUBY)
+ class Foo
+ def memoized_method(param)
+ strong_memoize(:memoized_method) do
+ param.to_s
+ end
+ end
+ end
+ RUBY
+ end
+ end
+
+ context 'when strong_memoize() is used in a singleton method' do
+ it 'does not register an offense' do
+ expect_no_offenses(<<~RUBY)
+ class Foo
+ def self.memoized_method
+ strong_memoize(:memoized_method) do
+ 'this is a memoized method'
+ end
+ end
+ end
+ RUBY
+ end
+ end
end
diff --git a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb b/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
index b0551a79c50..53f19cd01ee 100644
--- a/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
+++ b/spec/rubocop/cop/lint/last_keyword_argument_spec.rb
@@ -3,7 +3,7 @@
require 'rubocop_spec_helper'
require_relative '../../../../rubocop/cop/lint/last_keyword_argument'
-RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument do
+RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument, :ruby27, feature_category: :not_owned do
before do
described_class.instance_variable_set(:@keyword_warnings, nil)
allow(Dir).to receive(:glob).and_call_original
@@ -156,5 +156,13 @@ RSpec.describe RuboCop::Cop::Lint::LastKeywordArgument do
users.call(params)
SOURCE
end
+
+ context 'with Ruby 3.0', :ruby30 do
+ it 'does not register an offense with known warning' do
+ expect_no_offenses(<<~SOURCE, 'create_service.rb')
+ users.call(params)
+ SOURCE
+ end
+ end
end
end
diff --git a/spec/scripts/trigger-build_spec.rb b/spec/scripts/trigger-build_spec.rb
index 52682387e20..760b9bda541 100644
--- a/spec/scripts/trigger-build_spec.rb
+++ b/spec/scripts/trigger-build_spec.rb
@@ -6,7 +6,7 @@ require 'rspec-parameterized'
require_relative '../../scripts/trigger-build'
-RSpec.describe Trigger do
+RSpec.describe Trigger, feature_category: :tooling do
let(:env) do
{
'CI_JOB_URL' => 'ci_job_url',
@@ -362,6 +362,28 @@ RSpec.describe Trigger do
end
end
+ describe "GITLAB_REF_SLUG" do
+ context 'when CI_COMMIT_TAG is set' do
+ before do
+ stub_env('CI_COMMIT_TAG', 'true')
+ end
+
+ it 'sets GITLAB_REF_SLUG to CI_COMMIT_REF_NAME' do
+ expect(subject.variables['GITLAB_REF_SLUG']).to eq(env['CI_COMMIT_REF_NAME'])
+ end
+ end
+
+ context 'when CI_COMMIT_TAG is nil' do
+ before do
+ stub_env('CI_COMMIT_TAG', nil)
+ end
+
+ it 'sets GITLAB_REF_SLUG to CI_COMMIT_SHA' do
+ expect(subject.variables['GITLAB_REF_SLUG']).to eq(env['CI_COMMIT_SHA'])
+ end
+ end
+ end
+
describe "#version_param_value" do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/serializers/ci/downloadable_artifact_entity_spec.rb b/spec/serializers/ci/downloadable_artifact_entity_spec.rb
index 34a271e7422..3142b03581d 100644
--- a/spec/serializers/ci/downloadable_artifact_entity_spec.rb
+++ b/spec/serializers/ci/downloadable_artifact_entity_spec.rb
@@ -17,7 +17,10 @@ RSpec.describe Ci::DownloadableArtifactEntity do
end
context 'when user cannot read job artifact' do
- let!(:build) { create(:ci_build, :success, :artifacts, :non_public_artifacts, pipeline: pipeline) }
+ let!(:build) do
+ create(:ci_build, :success, :private_artifacts,
+ pipeline: pipeline)
+ end
it 'returns only artifacts readable by user', :aggregate_failures do
expect(subject[:artifacts].size).to eq(1)
diff --git a/spec/serializers/ci/pipeline_entity_spec.rb b/spec/serializers/ci/pipeline_entity_spec.rb
index ff364918b4f..ae992e478a6 100644
--- a/spec/serializers/ci/pipeline_entity_spec.rb
+++ b/spec/serializers/ci/pipeline_entity_spec.rb
@@ -49,16 +49,6 @@ RSpec.describe Ci::PipelineEntity do
.to include :stuck, :auto_devops, :yaml_errors,
:retryable, :cancelable, :merge_request
end
-
- context 'when pipeline_name feature flag is disabled' do
- before do
- stub_feature_flags(pipeline_name: false)
- end
-
- it 'does not return name' do
- is_expected.not_to include(:name)
- end
- end
end
context 'when default branch not protected' do
diff --git a/spec/serializers/diffs_entity_spec.rb b/spec/serializers/diffs_entity_spec.rb
index ba40d538ccb..aa8e7275870 100644
--- a/spec/serializers/diffs_entity_spec.rb
+++ b/spec/serializers/diffs_entity_spec.rb
@@ -9,13 +9,11 @@ RSpec.describe DiffsEntity do
let(:request) { EntityRequest.new(project: project, current_user: user) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
- let(:merge_conflicts_in_diff) { false }
let(:options) do
{
request: request,
merge_request: merge_request,
- merge_request_diffs: merge_request_diffs,
- merge_conflicts_in_diff: merge_conflicts_in_diff
+ merge_request_diffs: merge_request_diffs
}
end
@@ -101,10 +99,9 @@ RSpec.describe DiffsEntity do
subject[:diff_files]
end
- context 'when merge_conflicts_in_diff is true' do
+ context 'when there are conflicts' do
let(:conflict_file) { double(path: diff_files.first.new_path, conflict_type: :both_modified) }
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
- let(:merge_conflicts_in_diff) { true }
before do
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
diff --git a/spec/serializers/diffs_metadata_entity_spec.rb b/spec/serializers/diffs_metadata_entity_spec.rb
index 04db576ffb5..415a0d8e450 100644
--- a/spec/serializers/diffs_metadata_entity_spec.rb
+++ b/spec/serializers/diffs_metadata_entity_spec.rb
@@ -9,7 +9,6 @@ RSpec.describe DiffsMetadataEntity do
let(:merge_request) { create(:merge_request_with_diffs, target_project: project, source_project: project) }
let(:merge_request_diffs) { merge_request.merge_request_diffs }
let(:merge_request_diff) { merge_request_diffs.last }
- let(:merge_conflicts_in_diff) { false }
let(:options) { {} }
let(:entity) do
@@ -18,8 +17,7 @@ RSpec.describe DiffsMetadataEntity do
options.merge(
request: request,
merge_request: merge_request,
- merge_request_diffs: merge_request_diffs,
- merge_conflicts_in_diff: merge_conflicts_in_diff
+ merge_request_diffs: merge_request_diffs
)
)
end
@@ -67,10 +65,9 @@ RSpec.describe DiffsMetadataEntity do
subject[:diff_files]
end
- context 'when merge_conflicts_in_diff is true' do
+ context 'when there are conflicts' do
let(:conflict_file) { double(path: raw_diff_files.first.new_path, conflict_type: :both_modified) }
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
- let(:merge_conflicts_in_diff) { true }
before do
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
diff --git a/spec/serializers/group_link/project_group_link_entity_spec.rb b/spec/serializers/group_link/project_group_link_entity_spec.rb
index f2a9f3a107a..1a8fcb2cfd3 100644
--- a/spec/serializers/group_link/project_group_link_entity_spec.rb
+++ b/spec/serializers/group_link/project_group_link_entity_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe GroupLink::ProjectGroupLinkEntity do
context 'when current user has `admin_project_member` permissions' do
before do
+ allow(entity).to receive(:can?).with(current_user, :admin_project_group_link, project_group_link).and_return(false)
allow(entity).to receive(:can?).with(current_user, :admin_project_member, project_group_link.project).and_return(true)
end
@@ -25,7 +26,33 @@ RSpec.describe GroupLink::ProjectGroupLinkEntity do
json = entity.as_json
expect(json[:can_update]).to be true
+ expect(json[:can_remove]).to be false
+ end
+ end
+
+ context 'when current user is a group owner' do
+ before do
+ allow(entity).to receive(:can?).with(current_user, :admin_project_group_link, project_group_link).and_return(true)
+ allow(entity).to receive(:can?).with(current_user, :admin_project_member, project_group_link.project).and_return(false)
+ end
+
+ it 'exposes `can_remove` as true' do
+ json = entity.as_json
+
expect(json[:can_remove]).to be true
end
end
+
+ context 'when current user is not a group owner' do
+ before do
+ allow(entity).to receive(:can?).with(current_user, :admin_project_group_link, project_group_link).and_return(false)
+ allow(entity).to receive(:can?).with(current_user, :admin_project_member, project_group_link.project).and_return(false)
+ end
+
+ it 'exposes `can_remove` as false' do
+ json = entity.as_json
+
+ expect(json[:can_remove]).to be false
+ end
+ end
end
diff --git a/spec/serializers/merge_requests/pipeline_entity_spec.rb b/spec/serializers/merge_requests/pipeline_entity_spec.rb
index a8f4fc44f10..414ce6653bc 100644
--- a/spec/serializers/merge_requests/pipeline_entity_spec.rb
+++ b/spec/serializers/merge_requests/pipeline_entity_spec.rb
@@ -51,15 +51,5 @@ RSpec.describe MergeRequests::PipelineEntity do
expect(entity.as_json).not_to include(:coverage)
end
-
- context 'when pipeline_name feature flag is disabled' do
- before do
- stub_feature_flags(pipeline_name: false)
- end
-
- it 'does not return name' do
- is_expected.not_to include(:name)
- end
- end
end
end
diff --git a/spec/serializers/paginated_diff_entity_spec.rb b/spec/serializers/paginated_diff_entity_spec.rb
index 3d77beb9abc..29484d170f8 100644
--- a/spec/serializers/paginated_diff_entity_spec.rb
+++ b/spec/serializers/paginated_diff_entity_spec.rb
@@ -7,13 +7,11 @@ RSpec.describe PaginatedDiffEntity do
let(:request) { double('request', current_user: user) }
let(:merge_request) { create(:merge_request) }
let(:diff_batch) { merge_request.merge_request_diff.diffs_in_batch(2, 3, diff_options: nil) }
- let(:merge_conflicts_in_diff) { false }
let(:options) do
{
request: request,
merge_request: merge_request,
- pagination_data: diff_batch.pagination_data,
- merge_conflicts_in_diff: merge_conflicts_in_diff
+ pagination_data: diff_batch.pagination_data
}
end
@@ -43,10 +41,9 @@ RSpec.describe PaginatedDiffEntity do
subject[:diff_files]
end
- context 'when merge_conflicts_in_diff is true' do
+ context 'when there are conflicts' do
let(:conflict_file) { double(path: diff_files.first.new_path, conflict_type: :both_modified) }
let(:conflicts) { double(conflicts: double(files: [conflict_file]), can_be_resolved_in_ui?: false) }
- let(:merge_conflicts_in_diff) { true }
before do
allow(merge_request).to receive(:cannot_be_merged?).and_return(true)
diff --git a/spec/serializers/project_mirror_entity_spec.rb b/spec/serializers/project_mirror_entity_spec.rb
index 7ed530ed9e8..88531b3c3d3 100644
--- a/spec/serializers/project_mirror_entity_spec.rb
+++ b/spec/serializers/project_mirror_entity_spec.rb
@@ -2,8 +2,8 @@
require 'spec_helper'
-RSpec.describe ProjectMirrorEntity do
- let(:project) { create(:project, :repository, :remote_mirror) }
+RSpec.describe ProjectMirrorEntity, feature_category: :source_code_management do
+ let(:project) { build(:project, :repository, :remote_mirror) }
let(:entity) { described_class.new(project) }
subject { entity.as_json }
diff --git a/spec/serializers/stage_entity_spec.rb b/spec/serializers/stage_entity_spec.rb
index 95d3fd254d4..5cb5724ebdc 100644
--- a/spec/serializers/stage_entity_spec.rb
+++ b/spec/serializers/stage_entity_spec.rb
@@ -63,7 +63,7 @@ RSpec.describe StageEntity do
context 'and contains commit status' do
before do
- create(:generic_commit_status, pipeline: pipeline, stage: 'test')
+ create(:generic_commit_status, pipeline: pipeline, ci_stage: stage)
end
it 'contains commit status' do
diff --git a/spec/services/achievements/create_service_spec.rb b/spec/services/achievements/create_service_spec.rb
new file mode 100644
index 00000000000..f62a45deb50
--- /dev/null
+++ b/spec/services/achievements/create_service_spec.rb
@@ -0,0 +1,46 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Achievements::CreateService, feature_category: :users do
+ describe '#execute' do
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { attributes_for(:achievement, namespace: group) }
+
+ subject(:response) { described_class.new(namespace: group, current_user: user, params: params).execute }
+
+ context 'when user does not have permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_developer(user)
+ end
+
+ it 'returns an error' do
+ expect(response).to be_error
+ expect(response.message).to match_array(
+ ['You have insufficient permissions to create achievements for this namespace'])
+ end
+ end
+
+ context 'when user has permission' do
+ let_it_be(:group) { create(:group) }
+
+ before_all do
+ group.add_maintainer(user)
+ end
+
+ it 'creates an achievement' do
+ expect(response).to be_success
+ end
+
+ it 'returns an error when the achievement is not persisted' do
+ params[:name] = nil
+
+ expect(response).to be_error
+ expect(response.message).to match_array(["Name can't be blank"])
+ end
+ end
+ end
+end
diff --git a/spec/services/audit_event_service_spec.rb b/spec/services/audit_event_service_spec.rb
index 063d250f22b..1d079adc0be 100644
--- a/spec/services/audit_event_service_spec.rb
+++ b/spec/services/audit_event_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe AuditEventService do
+RSpec.describe AuditEventService, :with_license do
let_it_be(:project) { create(:project) }
let_it_be(:user) { create(:user, :with_sign_ins) }
let_it_be(:project_member) { create(:project_member, user: user) }
diff --git a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
index 5a7852fc32f..9a74f5ca07a 100644
--- a/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
+++ b/spec/services/bulk_imports/create_pipeline_trackers_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::CreatePipelineTrackersService do
+RSpec.describe BulkImports::CreatePipelineTrackersService, feature_category: :importers do
describe '#execute!' do
context 'when entity is group' do
it 'creates trackers for group entity' do
diff --git a/spec/services/bulk_imports/create_service_spec.rb b/spec/services/bulk_imports/create_service_spec.rb
index f1e5533139e..75f88e3989c 100644
--- a/spec/services/bulk_imports/create_service_spec.rb
+++ b/spec/services/bulk_imports/create_service_spec.rb
@@ -2,10 +2,11 @@
require 'spec_helper'
-RSpec.describe BulkImports::CreateService do
+RSpec.describe BulkImports::CreateService, feature_category: :importers do
let(:user) { create(:user) }
let(:credentials) { { url: 'http://gitlab.example', access_token: 'token' } }
let(:destination_group) { create(:group, path: 'destination1') }
+ let(:migrate_projects) { true }
let_it_be(:parent_group) { create(:group, path: 'parent-group') }
let(:params) do
[
@@ -13,19 +14,23 @@ RSpec.describe BulkImports::CreateService do
source_type: 'group_entity',
source_full_path: 'full/path/to/group1',
destination_slug: 'destination group 1',
- destination_namespace: 'full/path/to/destination1'
+ destination_namespace: 'parent-group',
+ migrate_projects: migrate_projects
+
},
{
source_type: 'group_entity',
source_full_path: 'full/path/to/group2',
destination_slug: 'destination group 2',
- destination_namespace: 'full/path/to/destination2'
+ destination_namespace: 'parent-group',
+ migrate_projects: migrate_projects
},
{
source_type: 'project_entity',
source_full_path: 'full/path/to/project1',
destination_slug: 'destination project 1',
- destination_namespace: 'full/path/to/destination1'
+ destination_namespace: 'parent-group',
+ migrate_projects: migrate_projects
}
]
end
@@ -33,113 +38,223 @@ RSpec.describe BulkImports::CreateService do
subject { described_class.new(user, params, credentials) }
describe '#execute' do
- let_it_be(:source_version) do
- Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
- ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
- end
-
- before do
- allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
- allow(instance).to receive(:instance_version).and_return(source_version)
- allow(instance).to receive(:instance_enterprise).and_return(false)
- end
- end
+ context 'when gitlab version is 15.5 or higher' do
+ let(:source_version) { { version: "15.6.0", enterprise: false } }
- it 'creates bulk import' do
- parent_group.add_owner(user)
- expect { subject.execute }.to change { BulkImport.count }.by(1)
-
- last_bulk_import = BulkImport.last
-
- expect(last_bulk_import.user).to eq(user)
- expect(last_bulk_import.source_version).to eq(source_version.to_s)
- expect(last_bulk_import.user).to eq(user)
- expect(last_bulk_import.source_enterprise).to eq(false)
-
- expect_snowplow_event(
- category: 'BulkImports::CreateService',
- action: 'create',
- label: 'bulk_import_group'
- )
-
- expect_snowplow_event(
- category: 'BulkImports::CreateService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
- )
- end
-
- it 'creates bulk import entities' do
- expect { subject.execute }.to change { BulkImports::Entity.count }.by(3)
- end
+ context 'when a BulkImports::Error is raised while validating the instance version' do
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client)
+ .to receive(:validate_instance_version!)
+ .and_raise(BulkImports::Error, "This is a BulkImports error.")
+ end
+ end
- it 'creates bulk import configuration' do
- expect { subject.execute }.to change { BulkImports::Configuration.count }.by(1)
- end
+ it 'rescues the error and raises a ServiceResponse::Error' do
+ result = subject.execute
- it 'enqueues BulkImportWorker' do
- expect(BulkImportWorker).to receive(:perform_async)
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("This is a BulkImports error.")
+ end
+ end
- subject.execute
- end
+ context 'when required scopes are not present' do
+ it 'returns ServiceResponse with error if token does not have api scope' do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(
+ status: 200,
+ body: source_version.to_json,
+ headers: { 'Content-Type' => 'application/json' }
+ )
- it 'returns success ServiceResponse' do
- result = subject.execute
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |client|
+ allow(client).to receive(:validate_instance_version!).and_raise(BulkImports::Error.scope_validation_failure)
+ end
- expect(result).to be_a(ServiceResponse)
- expect(result).to be_success
- end
+ result = subject.execute
- it 'returns ServiceResponse with error if validation fails' do
- params[0][:source_full_path] = nil
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message)
+ .to eq(
+ "Import aborted as the provided personal access token does not have the required 'api' scope or is " \
+ "no longer valid."
+ )
+ end
+ end
- result = subject.execute
+ context 'when token validation succeeds' do
+ before do
+ stub_request(:get, 'http://gitlab.example/api/v4/version?private_token=token').to_return(status: 404)
+ stub_request(:get, 'http://gitlab.example/api/v4/metadata?private_token=token')
+ .to_return(status: 200, body: source_version.to_json, headers: { 'Content-Type' => 'application/json' })
+ stub_request(:get, 'http://gitlab.example/api/v4/personal_access_tokens/self?private_token=token')
+ .to_return(
+ status: 200,
+ body: { 'scopes' => ['api'] }.to_json,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
- expect(result).to be_a(ServiceResponse)
- expect(result).to be_error
- expect(result.message).to eq("Validation failed: Source full path can't be blank")
- end
+ it 'creates bulk import' do
+ parent_group.add_owner(user)
+ expect { subject.execute }.to change { BulkImport.count }.by(1)
- describe '#user-role' do
- context 'when there is a parent_namespace and the user is a member' do
- let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
- let(:params) do
- [
- {
- source_type: 'group_entity',
- source_full_path: 'full/path/to/group1',
- destination_slug: 'destination200',
- destination_namespace: 'parent-group'
- }
- ]
- end
+ last_bulk_import = BulkImport.last
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_version).to eq(source_version[:version])
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_enterprise).to eq(false)
- it 'defines access_level from parent namespace membership' do
- parent_group.add_guest(user)
- subject.execute
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'bulk_import_group'
+ )
expect_snowplow_event(
category: 'BulkImports::CreateService',
action: 'create',
label: 'import_access_level',
user: user,
- extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
+ extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
)
end
+
+ describe 'projects migration flag' do
+ let(:import) { BulkImport.last }
+
+ context 'when false' do
+ let(:migrate_projects) { false }
+
+ it 'sets false' do
+ subject.execute
+
+ expect(import.entities.pluck(:migrate_projects)).to contain_exactly(false, false, false)
+ end
+ end
+
+ context 'when true' do
+ let(:migrate_projects) { true }
+
+ it 'sets true' do
+ subject.execute
+
+ expect(import.entities.pluck(:migrate_projects)).to contain_exactly(true, true, true)
+ end
+ end
+
+ context 'when nil' do
+ let(:migrate_projects) { nil }
+
+ it 'sets true' do
+ subject.execute
+
+ expect(import.entities.pluck(:migrate_projects)).to contain_exactly(true, true, true)
+ end
+ end
+ end
end
+ end
- context 'when there is a parent_namespace and the user is not a member' do
- let(:params) do
- [
- {
- source_type: 'group_entity',
- source_full_path: 'full/path/to/group1',
- destination_slug: 'destination-group-1',
- destination_namespace: 'parent-group'
- }
- ]
+ context 'when gitlab version is lower than 15.5' do
+ let(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ allow(instance).to receive(:instance_enterprise).and_return(false)
+ end
+ end
+
+ it 'creates bulk import' do
+ parent_group.add_owner(user)
+ expect { subject.execute }.to change { BulkImport.count }.by(1)
+
+ last_bulk_import = BulkImport.last
+
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_version).to eq(source_version.to_s)
+ expect(last_bulk_import.user).to eq(user)
+ expect(last_bulk_import.source_enterprise).to eq(false)
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'bulk_import_group'
+ )
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'bulk_import_group' }
+ )
+ end
+
+ it 'creates bulk import entities' do
+ expect { subject.execute }.to change { BulkImports::Entity.count }.by(3)
+ end
+
+ it 'creates bulk import configuration' do
+ expect { subject.execute }.to change { BulkImports::Configuration.count }.by(1)
+ end
+
+ it 'enqueues BulkImportWorker' do
+ expect(BulkImportWorker).to receive(:perform_async)
+
+ subject.execute
+ end
+
+ it 'returns success ServiceResponse' do
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_success
+ end
+
+ it 'returns ServiceResponse with error if validation fails' do
+ params[0][:source_full_path] = nil
+
+ result = subject.execute
+
+ expect(result).to be_a(ServiceResponse)
+ expect(result).to be_error
+ expect(result.message).to eq("Validation failed: Source full path can't be blank")
+ end
+
+ describe '#user-role' do
+ context 'when there is a parent_namespace and the user is a member' do
+ let(:group2) { create(:group, path: 'destination200', source_id: parent_group.id ) }
+ let(:params) do
+ [
+ {
+ source_type: 'group_entity',
+ source_full_path: 'full/path/to/group1',
+ destination_slug: 'destination200',
+ destination_namespace: 'parent-group'
+ }
+ ]
+ end
+
+ it 'defines access_level from parent namespace membership' do
+ parent_group.add_guest(user)
+ subject.execute
+
+ expect_snowplow_event(
+ category: 'BulkImports::CreateService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Guest', import_type: 'bulk_import_group' }
+ )
+ end
end
it 'defines access_level as not a member' do
diff --git a/spec/services/bulk_imports/get_importable_data_service_spec.rb b/spec/services/bulk_imports/get_importable_data_service_spec.rb
index eccd3e5f49d..570f5199f01 100644
--- a/spec/services/bulk_imports/get_importable_data_service_spec.rb
+++ b/spec/services/bulk_imports/get_importable_data_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe BulkImports::GetImportableDataService do
+RSpec.describe BulkImports::GetImportableDataService, feature_category: :importers do
describe '#execute' do
include_context 'bulk imports requests context', 'https://gitlab.example.com'
@@ -34,6 +34,18 @@ RSpec.describe BulkImports::GetImportableDataService do
]
end
+ let(:source_version) do
+ Gitlab::VersionInfo.new(::BulkImport::MIN_MAJOR_VERSION,
+ ::BulkImport::MIN_MINOR_VERSION_FOR_PROJECT)
+ end
+
+ before do
+ allow_next_instance_of(BulkImports::Clients::HTTP) do |instance|
+ allow(instance).to receive(:instance_version).and_return(source_version)
+ allow(instance).to receive(:instance_enterprise).and_return(false)
+ end
+ end
+
subject do
described_class.new(params, query_params, credentials).execute
end
diff --git a/spec/services/chat_names/authorize_user_service_spec.rb b/spec/services/chat_names/authorize_user_service_spec.rb
index 53d90c7f100..4c261ece504 100644
--- a/spec/services/chat_names/authorize_user_service_spec.rb
+++ b/spec/services/chat_names/authorize_user_service_spec.rb
@@ -2,12 +2,11 @@
require 'spec_helper'
-RSpec.describe ChatNames::AuthorizeUserService do
+RSpec.describe ChatNames::AuthorizeUserService, feature_category: :users do
describe '#execute' do
- let(:integration) { create(:integration) }
let(:result) { subject.execute }
- subject { described_class.new(integration, params) }
+ subject { described_class.new(params) }
context 'when all parameters are valid' do
let(:params) { { team_id: 'T0001', team_domain: 'myteam', user_id: 'U0001', user_name: 'user' } }
diff --git a/spec/services/ci/create_downstream_pipeline_service_spec.rb b/spec/services/ci/create_downstream_pipeline_service_spec.rb
index bcdb2b4f796..fd978bffacb 100644
--- a/spec/services/ci/create_downstream_pipeline_service_spec.rb
+++ b/spec/services/ci/create_downstream_pipeline_service_spec.rb
@@ -41,12 +41,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
subject { service.execute(bridge) }
- shared_context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- before do
- stub_feature_flags(ci_bridge_remove_sourced_pipelines: false)
- end
- end
-
context 'when downstream project has not been found' do
let(:trigger) do
{ trigger: { project: 'unknown/project' } }
@@ -128,19 +122,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- it 'creates a new pipeline in a downstream project' do
- expect(pipeline.user).to eq bridge.user
- expect(pipeline.project).to eq downstream_project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
- expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
- expect(pipeline.source_bridge).to eq bridge
- expect(pipeline.source_bridge).to be_a ::Ci::Bridge
- end
- end
-
it_behaves_like 'logs downstream pipeline creation' do
let(:downstream_pipeline) { pipeline }
let(:expected_root_pipeline) { upstream_pipeline }
@@ -179,31 +160,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(subject).to be_error
expect(subject.message).to eq("Already has a downstream pipeline")
end
-
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- before do
- bridge.sourced_pipelines.create!(
- source_pipeline: bridge.pipeline,
- source_project: bridge.project,
- project: bridge.project,
- pipeline: create(:ci_pipeline, project: bridge.project)
- )
- end
-
- it 'logs an error and exits' do
- expect(Gitlab::ErrorTracking)
- .to receive(:track_exception)
- .with(
- instance_of(described_class::DuplicateDownstreamPipelineError),
- bridge_id: bridge.id, project_id: bridge.project.id)
- .and_call_original
- expect(Ci::CreatePipelineService).not_to receive(:new)
- expect(subject).to be_error
- expect(subject.message).to eq("Already has a downstream pipeline")
- end
- end
end
context 'when target ref is not specified' do
@@ -237,19 +193,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- it 'creates a new pipeline in a downstream project' do
- expect(pipeline.user).to eq bridge.user
- expect(pipeline.project).to eq downstream_project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
- expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
- expect(pipeline.source_bridge).to eq bridge
- expect(pipeline.source_bridge).to be_a ::Ci::Bridge
- end
- end
-
it 'updates the bridge status when downstream pipeline gets processed' do
expect(pipeline.reload).to be_failed
expect(bridge.reload).to be_failed
@@ -301,20 +244,6 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
expect(pipeline.source_bridge).to be_a ::Ci::Bridge
end
- context 'when ci_bridge_remove_sourced_pipelines is disabled' do
- include_context 'when ci_bridge_remove_sourced_pipelines is disabled'
-
- it 'creates a child pipeline in the same project' do
- expect(pipeline.builds.map(&:name)).to match_array(%w[rspec echo])
- expect(pipeline.user).to eq bridge.user
- expect(pipeline.project).to eq bridge.project
- expect(bridge.sourced_pipelines.first.pipeline).to eq pipeline
- expect(pipeline.triggered_by_pipeline).to eq upstream_pipeline
- expect(pipeline.source_bridge).to eq bridge
- expect(pipeline.source_bridge).to be_a ::Ci::Bridge
- end
- end
-
it 'updates bridge status when downstream pipeline gets processed' do
expect(pipeline.reload).to be_created
expect(bridge.reload).to be_success
@@ -825,11 +754,13 @@ RSpec.describe Ci::CreateDownstreamPipelineService, '#execute', feature_category
it 'does not create a pipeline and drops the bridge' do
expect { subject }.not_to change(downstream_project.ci_pipelines, :count)
expect(subject).to be_error
- expect(subject.message).to match_array(["No stages / jobs for this pipeline."])
+ expect(subject.message).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
expect(bridge.reload).to be_failed
expect(bridge.failure_reason).to eq('downstream_pipeline_creation_failed')
- expect(bridge.options[:downstream_errors]).to eq(['No stages / jobs for this pipeline.'])
+ expect(bridge.options[:downstream_errors]).to match_array(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
diff --git a/spec/services/ci/create_pipeline_service/cache_spec.rb b/spec/services/ci/create_pipeline_service/cache_spec.rb
index 82c3d374636..f9640f99031 100644
--- a/spec/services/ci/create_pipeline_service/cache_spec.rb
+++ b/spec/services/ci/create_pipeline_service/cache_spec.rb
@@ -37,6 +37,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
paths: ['logs/', 'binaries/'],
policy: 'pull-push',
untracked: true,
+ unprotect: false,
when: 'on_success'
}
@@ -69,7 +70,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /[a-f0-9]{40}/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
@@ -85,7 +87,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /default/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
@@ -118,7 +121,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /\$ENV_VAR-[a-f0-9]{40}/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
@@ -134,7 +138,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
key: /\$ENV_VAR-default/,
paths: ['logs/'],
policy: 'pull-push',
- when: 'on_success'
+ when: 'on_success',
+ unprotect: false
}
expect(pipeline).to be_persisted
diff --git a/spec/services/ci/create_pipeline_service/include_spec.rb b/spec/services/ci/create_pipeline_service/include_spec.rb
index 3764663fd74..f18b4883aaf 100644
--- a/spec/services/ci/create_pipeline_service/include_spec.rb
+++ b/spec/services/ci/create_pipeline_service/include_spec.rb
@@ -2,7 +2,10 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
+RSpec.describe Ci::CreatePipelineService,
+:yaml_processor_feature_flag_corectness, feature_category: :pipeline_authoring do
+ include RepoHelpers
+
context 'include:' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -16,14 +19,17 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:file_location) { 'spec/fixtures/gitlab/ci/external_files/.gitlab-ci-template-1.yml' }
- before do
- allow(project.repository)
- .to receive(:blob_data_at).with(project.commit.id, '.gitlab-ci.yml')
- .and_return(config)
+ let(:project_files) do
+ {
+ '.gitlab-ci.yml' => config,
+ file_location => File.read(Rails.root.join(file_location))
+ }
+ end
- allow(project.repository)
- .to receive(:blob_data_at).with(project.commit.id, file_location)
- .and_return(File.read(Rails.root.join(file_location)))
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ example.run
+ end
end
shared_examples 'not including the file' do
diff --git a/spec/services/ci/create_pipeline_service/logger_spec.rb b/spec/services/ci/create_pipeline_service/logger_spec.rb
index ccb15bfa684..ecb24a61075 100644
--- a/spec/services/ci/create_pipeline_service/logger_spec.rb
+++ b/spec/services/ci/create_pipeline_service/logger_spec.rb
@@ -139,5 +139,74 @@ RSpec.describe Ci::CreatePipelineService, # rubocop: disable RSpec/FilePath
expect(pipeline).to be_created_successfully
end
end
+
+ describe 'pipeline includes count' do
+ before do
+ stub_const('Gitlab::Ci::Config::External::Context::MAX_INCLUDES', 2)
+ end
+
+ context 'when the includes count exceeds the maximum' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return({ includes: [{ file: 1 }, { file: 2 }, { file: 3 }] })
+ end
+ end
+
+ it 'creates a log entry' do
+ expect(Gitlab::AppJsonLogger)
+ .to receive(:info)
+ .with(a_hash_including({ 'pipeline_includes_count' => 3 }))
+ .and_call_original
+
+ expect(pipeline).to be_created_successfully
+ end
+ end
+
+ context 'when the includes count does not exceed the maximum' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return({ includes: [{ file: 1 }, { file: 2 }] })
+ end
+ end
+
+ it 'does not create a log entry but it collects the data' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+
+ expect(service.logger.observations_hash)
+ .to match(a_hash_including({ 'pipeline_includes_count' => 2 }))
+ end
+ end
+
+ context 'when the includes data is nil' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return({})
+ end
+ end
+
+ it 'does not create a log entry' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+ end
+ end
+
+ context 'when the pipeline config_metadata is nil' do
+ before do
+ allow_next_instance_of(Ci::Pipeline) do |pipeline|
+ allow(pipeline).to receive(:config_metadata)
+ .and_return(nil)
+ end
+ end
+
+ it 'does not create a log entry but it collects the data' do
+ expect(Gitlab::AppJsonLogger).not_to receive(:info)
+ expect(pipeline).to be_created_successfully
+ end
+ end
+ end
end
end
diff --git a/spec/services/ci/create_pipeline_service/rules_spec.rb b/spec/services/ci/create_pipeline_service/rules_spec.rb
index b866293393b..26bb8b7d006 100644
--- a/spec/services/ci/create_pipeline_service/rules_spec.rb
+++ b/spec/services/ci/create_pipeline_service/rules_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, feature_category: :pipeline_authoring do
let(:project) { create(:project, :repository) }
let(:user) { project.first_owner }
let(:ref) { 'refs/heads/master' }
@@ -1166,7 +1166,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
let(:ref) { 'refs/heads/master' }
it 'invalidates the pipeline with an empty jobs error' do
- expect(pipeline.errors[:base]).to include('No stages / jobs for this pipeline.')
+ expect(pipeline.errors[:base]).to include('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
diff --git a/spec/services/ci/create_pipeline_service/variables_spec.rb b/spec/services/ci/create_pipeline_service/variables_spec.rb
index e9e0cf2c6e0..fd138bde656 100644
--- a/spec/services/ci/create_pipeline_service/variables_spec.rb
+++ b/spec/services/ci/create_pipeline_service/variables_spec.rb
@@ -60,27 +60,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
{ key: 'VAR8', value: "value 8 $CI_PIPELINE_ID", public: true, masked: false, raw: true }
)
end
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it 'creates the pipeline with a job that has all variables expanded' do
- expect(pipeline).to be_created_successfully
-
- expect(Ci::BuildRunnerPresenter.new(rspec).runner_variables).to include(
- { key: 'VAR1', value: "JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR2', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR3', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR4', value: "JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR5', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR6', value: "PIPELINEID-#{pipeline.id} and JOBID-#{rspec.id}", public: true, masked: false },
- { key: 'VAR7', value: "overridden value 7 #{pipeline.id}", public: true, masked: false },
- { key: 'VAR8', value: "value 8 #{pipeline.id}", public: true, masked: false }
- )
- end
- end
end
context 'when trigger variables have expand: true/false' do
@@ -109,22 +88,6 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
{ key: 'VAR3', value: "PIPELINEID-$CI_PIPELINE_ID and $VAR1", raw: true }
)
end
-
- context 'when the FF ci_raw_variables_in_yaml_config is disabled' do
- before do
- stub_feature_flags(ci_raw_variables_in_yaml_config: false)
- end
-
- it 'creates the pipeline with a job that has all variables expanded' do
- expect(pipeline).to be_created_successfully
-
- expect(child.downstream_variables).to include(
- { key: 'VAR1', value: "PROJECTID-#{project.id}" },
- { key: 'VAR2', value: "PIPELINEID-#{pipeline.id} and PROJECTID-$CI_PROJECT_ID" },
- { key: 'VAR3', value: "PIPELINEID-#{pipeline.id} and PROJECTID-$CI_PROJECT_ID" }
- )
- end
- end
end
end
diff --git a/spec/services/ci/create_pipeline_service_spec.rb b/spec/services/ci/create_pipeline_service_spec.rb
index 8628e95ba80..b0ba07ea295 100644
--- a/spec/services/ci/create_pipeline_service_spec.rb
+++ b/spec/services/ci/create_pipeline_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :clean_gitlab_redis_cache do
+RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectness, :clean_gitlab_redis_cache, feature_category: :continuous_integration do
include ProjectForksHelper
let_it_be_with_refind(:project) { create(:project, :repository) }
@@ -684,7 +684,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
result = execute_service
expect(result).to be_error
- expect(result.message).to eq('No stages / jobs for this pipeline.')
+ expect(result.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(result.payload).not_to be_persisted
expect(Ci::Build.all).to be_empty
expect(Ci::Pipeline.count).to eq(0)
@@ -759,7 +760,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
stub_ci_pipeline_yaml_file(config)
end
- it 'creates the environment with tags' do
+ it 'creates the environment with tags', :sidekiq_inline do
result = execute_service.payload
expect(result).to be_persisted
@@ -862,7 +863,7 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
stub_ci_pipeline_yaml_file(YAML.dump(ci_yaml))
end
- it 'creates a pipeline with the environment' do
+ it 'creates a pipeline with the environment', :sidekiq_inline do
result = execute_service.payload
expect(result).to be_persisted
@@ -1311,9 +1312,10 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
}
end
- it 'has a job with environment' do
+ it 'has a job with environment', :sidekiq_inline do
expect(pipeline.builds.count).to eq(1)
expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
+ expect(pipeline.builds.first.persisted_environment.name).to eq('review/master')
expect(pipeline.builds.first.deployment).to be_created
end
end
@@ -1423,9 +1425,11 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
- expect(pipeline.errors[:base]).to eq(['No stages / jobs for this pipeline.'])
+ expect(pipeline.errors[:base]).to eq(['Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.'])
end
end
end
@@ -1633,7 +1637,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1669,7 +1674,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1697,7 +1703,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1727,7 +1734,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
@@ -1755,7 +1763,8 @@ RSpec.describe Ci::CreatePipelineService, :yaml_processor_feature_flag_corectnes
it 'does not create a detached merge request pipeline', :aggregate_failures do
expect(response).to be_error
- expect(response.message).to eq('No stages / jobs for this pipeline.')
+ expect(response.message).to eq('Pipeline will not run for the selected trigger. ' \
+ 'The rules configuration prevented any jobs from being added to the pipeline.')
expect(pipeline).not_to be_persisted
end
end
diff --git a/spec/services/ci/job_artifacts/create_service_spec.rb b/spec/services/ci/job_artifacts/create_service_spec.rb
index 5df590a1b78..711002e28af 100644
--- a/spec/services/ci/job_artifacts/create_service_spec.rb
+++ b/spec/services/ci/job_artifacts/create_service_spec.rb
@@ -61,6 +61,49 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.locked).to eq(job.pipeline.locked)
end
+ it 'sets accessibility level by default to public' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+
+ context 'when accessibility level passed as private' do
+ before do
+ params.merge!('accessibility' => 'private')
+ end
+
+ it 'sets accessibility level to private' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_private_accessibility
+ end
+ end
+
+ context 'when accessibility passed as public' do
+ before do
+ params.merge!('accessibility' => 'public')
+ end
+
+ it 'sets accessibility to public level' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(1)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+ end
+
+ context 'when accessibility passed as invalid value' do
+ before do
+ params.merge!('accessibility' => 'invalid_value')
+ end
+
+ it 'fails with argument error' do
+ expect { subject }.to raise_error(ArgumentError)
+ end
+ end
+
context 'when metadata file is also uploaded' do
let(:metadata_file) do
file_to_upload('spec/fixtures/ci_build_artifacts_metadata.gz', sha256: artifacts_sha256)
@@ -82,6 +125,39 @@ RSpec.describe Ci::JobArtifacts::CreateService do
expect(new_artifact.locked).to eq(job.pipeline.locked)
end
+ it 'sets accessibility by default to public' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+
+ context 'when accessibility level passed as private' do
+ before do
+ params.merge!('accessibility' => 'private')
+ end
+
+ it 'sets accessibility to private level' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_private_accessibility
+ end
+ end
+
+ context 'when accessibility passed as public' do
+ before do
+ params.merge!('accessibility' => 'public')
+ end
+
+ it 'sets accessibility level to public' do
+ expect { subject }.to change { Ci::JobArtifact.count }.by(2)
+
+ new_artifact = job.job_artifacts.last
+ expect(new_artifact).to be_public_accessibility
+ end
+ end
+
it 'sets expiration date according to application settings' do
expected_expire_at = 1.day.from_now
diff --git a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
index 4f7663d7996..dd10c0df374 100644
--- a/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_all_expired_service_spec.rb
@@ -87,12 +87,9 @@ RSpec.describe Ci::JobArtifacts::DestroyAllExpiredService, :clean_gitlab_redis_s
expect { subject }.to change { Ci::DeletedObject.count }.by(1)
end
- it 'resets project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact.project, :build_artifacts_size, -artifact.file.size)
- .and_call_original
-
- subject
+ it 'resets project statistics', :sidekiq_inline do
+ expect { subject }
+ .to change { artifact.project.statistics.reload.build_artifacts_size }.by(-artifact.file.size)
end
it 'does not remove the files' do
diff --git a/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb b/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
index b1a4741851b..ca36c923dcf 100644
--- a/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_associations_service_spec.rb
@@ -3,23 +3,23 @@
require 'spec_helper'
RSpec.describe Ci::JobArtifacts::DestroyAssociationsService do
- let(:artifacts) { Ci::JobArtifact.all }
- let(:service) { described_class.new(artifacts) }
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
- let_it_be(:artifact, refind: true) do
- create(:ci_job_artifact)
- end
+ let_it_be(:artifact_1, refind: true) { create(:ci_job_artifact, :zip, project: project_1) }
+ let_it_be(:artifact_2, refind: true) { create(:ci_job_artifact, :zip, project: project_2) }
+ let_it_be(:artifact_3, refind: true) { create(:ci_job_artifact, :zip, project: project_1) }
- before do
- artifact.file = fixture_file_upload(Rails.root.join('spec/fixtures/ci_build_artifacts.zip'), 'application/zip')
- artifact.save!
- end
+ let(:artifacts) { Ci::JobArtifact.where(id: [artifact_1.id, artifact_2.id, artifact_3.id]) }
+ let(:service) { described_class.new(artifacts) }
describe '#destroy_records' do
it 'removes artifacts without updating statistics' do
- expect(ProjectStatistics).not_to receive(:increment_statistic)
+ expect_next_instance_of(Ci::JobArtifacts::DestroyBatchService) do |service|
+ expect(service).to receive(:execute).with(update_stats: false).and_call_original
+ end
- expect { service.destroy_records }.to change { Ci::JobArtifact.count }
+ expect { service.destroy_records }.to change { Ci::JobArtifact.count }.by(-3)
end
context 'when there are no artifacts' do
@@ -33,12 +33,21 @@ RSpec.describe Ci::JobArtifacts::DestroyAssociationsService do
describe '#update_statistics' do
before do
+ stub_const("#{described_class}::BATCH_SIZE", 2)
service.destroy_records
end
it 'updates project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact.project, :build_artifacts_size, -artifact.file.size)
+ project1_increments = [
+ have_attributes(amount: -artifact_1.size, ref: artifact_1.id),
+ have_attributes(amount: -artifact_3.size, ref: artifact_3.id)
+ ]
+ project2_increments = [have_attributes(amount: -artifact_2.size, ref: artifact_2.id)]
+
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).once
+ .with(project_1, :build_artifacts_size, match_array(project1_increments))
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).once
+ .with(project_2, :build_artifacts_size, match_array(project2_increments))
service.update_statistics
end
diff --git a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
index 79920dcb2c7..cde42783d8c 100644
--- a/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
+++ b/spec/services/ci/job_artifacts/destroy_batch_service_spec.rb
@@ -29,7 +29,7 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
create(:ci_job_artifact, :trace, :expired)
end
- describe '.execute' do
+ describe '#execute' do
subject(:execute) { service.execute }
it 'creates a deleted object for artifact with attached file' do
@@ -207,44 +207,58 @@ RSpec.describe Ci::JobArtifacts::DestroyBatchService do
end
end
- context 'ProjectStatistics' do
- it 'resets project statistics' do
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact_with_file.project, :build_artifacts_size, -artifact_with_file.file.size)
- .and_call_original
- expect(ProjectStatistics).to receive(:increment_statistic).once
- .with(artifact_without_file.project, :build_artifacts_size, 0)
- .and_call_original
+ context 'ProjectStatistics', :sidekiq_inline do
+ let_it_be(:project_1) { create(:project) }
+ let_it_be(:project_2) { create(:project) }
+
+ let(:artifact_with_file) { create(:ci_job_artifact, :zip, project: project_1) }
+ let(:artifact_with_file_2) { create(:ci_job_artifact, :zip, project: project_1) }
+ let(:artifact_without_file) { create(:ci_job_artifact, project: project_2) }
+ let!(:artifacts) { Ci::JobArtifact.where(id: [artifact_with_file.id, artifact_without_file.id, artifact_with_file_2.id]) }
+
+ it 'updates project statistics by the relevant amount' do
+ expected_amount = -(artifact_with_file.size + artifact_with_file_2.size)
+
+ expect { execute }
+ .to change { project_1.statistics.reload.build_artifacts_size }.by(expected_amount)
+ .and change { project_2.statistics.reload.build_artifacts_size }.by(0)
+ end
+
+ it 'increments project statistics with artifact size as amount and job artifact id as ref' do
+ project_1_increments = [
+ have_attributes(amount: -artifact_with_file.size, ref: artifact_with_file.id),
+ have_attributes(amount: -artifact_with_file_2.file.size, ref: artifact_with_file_2.id)
+ ]
+ project_2_increments = [have_attributes(amount: 0, ref: artifact_without_file.id)]
+
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).with(project_1, :build_artifacts_size, match_array(project_1_increments))
+ expect(ProjectStatistics).to receive(:bulk_increment_statistic).with(project_2, :build_artifacts_size, match_array(project_2_increments))
execute
end
context 'with update_stats: false' do
- let_it_be(:extra_artifact_with_file) do
- create(:ci_job_artifact, :zip, project: artifact_with_file.project)
- end
-
- let(:artifacts) do
- Ci::JobArtifact.where(id: [artifact_with_file.id, extra_artifact_with_file.id,
- artifact_without_file.id, trace_artifact.id])
- end
+ subject(:execute) { service.execute(update_stats: false) }
it 'does not update project statistics' do
- expect(ProjectStatistics).not_to receive(:increment_statistic)
-
- service.execute(update_stats: false)
+ expect { execute }.not_to change { [project_1.statistics.reload.build_artifacts_size, project_2.statistics.reload.build_artifacts_size] }
end
- it 'returns size statistics' do
+ it 'returns statistic updates per project' do
+ project_1_updates = [
+ have_attributes(amount: -artifact_with_file.size, ref: artifact_with_file.id),
+ have_attributes(amount: -artifact_with_file_2.file.size, ref: artifact_with_file_2.id)
+ ]
+ project_2_updates = [have_attributes(amount: 0, ref: artifact_without_file.id)]
+
expected_updates = {
statistics_updates: {
- artifact_with_file.project => -(artifact_with_file.file.size + extra_artifact_with_file.file.size),
- artifact_without_file.project => 0
+ project_1 => match_array(project_1_updates),
+ project_2 => project_2_updates
}
}
- expect(service.execute(update_stats: false)).to match(
- a_hash_including(expected_updates))
+ expect(execute).to match(a_hash_including(expected_updates))
end
end
end
diff --git a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
index 2f2af9f6c85..c1669e0424a 100644
--- a/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
+++ b/spec/services/ci/pipeline_processing/atomic_processing_service_spec.rb
@@ -3,6 +3,8 @@
require 'spec_helper'
RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category: :continuous_integration do
+ include RepoHelpers
+
describe 'Pipeline Processing Service Tests With Yaml' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { project.first_owner }
@@ -956,17 +958,16 @@ RSpec.describe Ci::PipelineProcessing::AtomicProcessingService, feature_category
Ci::CreatePipelineService.new(project, user, { ref: 'master' }).execute(:push).payload
end
- before do
- allow_next_instance_of(Repository) do |repository|
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.gitlab-ci.yml')
- .and_return(parent_config)
-
- allow(repository)
- .to receive(:blob_data_at)
- .with(an_instance_of(String), '.child.yml')
- .and_return(child_config)
+ let(:project_files) do
+ {
+ '.gitlab-ci.yml' => parent_config,
+ '.child.yml' => child_config
+ }
+ end
+
+ around do |example|
+ create_and_delete_files(project, project_files) do
+ example.run
end
end
diff --git a/spec/services/clusters/aws/authorize_role_service_spec.rb b/spec/services/clusters/aws/authorize_role_service_spec.rb
deleted file mode 100644
index 17bbc372675..00000000000
--- a/spec/services/clusters/aws/authorize_role_service_spec.rb
+++ /dev/null
@@ -1,102 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::AuthorizeRoleService do
- subject { described_class.new(user, params: params).execute }
-
- let(:role) { create(:aws_role) }
- let(:user) { role.user }
- let(:credentials) { instance_double(Aws::Credentials) }
- let(:credentials_service) { instance_double(Clusters::Aws::FetchCredentialsService, execute: credentials) }
-
- let(:role_arn) { 'arn:my-role' }
- let(:region) { 'region' }
- let(:params) do
- params = ActionController::Parameters.new({
- cluster: {
- role_arn: role_arn,
- region: region
- }
- })
-
- params.require(:cluster).permit(:role_arn, :region)
- end
-
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .with(instance_of(Aws::Role)).and_return(credentials_service)
- end
-
- context 'role exists' do
- it 'updates the existing Aws::Role record and returns a set of credentials' do
- expect(subject.status).to eq(:ok)
- expect(subject.body).to eq(credentials)
- expect(role.reload.role_arn).to eq(role_arn)
- end
- end
-
- context 'errors' do
- shared_examples 'bad request' do
- it 'returns an empty hash' do
- expect(subject.status).to eq(:unprocessable_entity)
- expect(subject.body).to eq({ message: message })
- end
-
- it 'logs the error' do
- expect(::Gitlab::ErrorTracking).to receive(:track_exception)
-
- subject
- end
- end
-
- context 'role does not exist' do
- let(:user) { create(:user) }
- let(:message) { 'Error: Unable to find AWS role for current user' }
-
- include_examples 'bad request'
- end
-
- context 'supplied ARN is invalid' do
- let(:role_arn) { 'invalid' }
- let(:message) { 'Validation failed: Role arn must be a valid Amazon Resource Name' }
-
- include_examples 'bad request'
- end
-
- context 'client errors' do
- before do
- allow(credentials_service).to receive(:execute).and_raise(error)
- end
-
- context 'error fetching credentials' do
- let(:error) { Aws::STS::Errors::ServiceError.new(nil, 'error message') }
- let(:message) { 'AWS service error: error message' }
-
- include_examples 'bad request'
- end
-
- context 'error in assuming role' do
- let(:raw_message) { "User foo is not authorized to perform: sts:AssumeRole on resource bar" }
- let(:error) { Aws::STS::Errors::AccessDenied.new(nil, raw_message) }
- let(:message) { "Access denied: #{raw_message}" }
-
- include_examples 'bad request'
- end
-
- context 'credentials not configured' do
- let(:error) { Aws::Errors::MissingCredentialsError.new('error message') }
- let(:message) { "Error: No AWS credentials were supplied" }
-
- include_examples 'bad request'
- end
-
- context 'role not configured' do
- let(:error) { Clusters::Aws::FetchCredentialsService::MissingRoleError.new('error message') }
- let(:message) { "Error: No AWS provision role found for user" }
-
- include_examples 'bad request'
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/fetch_credentials_service_spec.rb b/spec/services/clusters/aws/fetch_credentials_service_spec.rb
deleted file mode 100644
index 0358ca1f535..00000000000
--- a/spec/services/clusters/aws/fetch_credentials_service_spec.rb
+++ /dev/null
@@ -1,139 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::FetchCredentialsService do
- describe '#execute' do
- let(:user) { create(:user) }
- let(:provider) { create(:cluster_provider_aws, region: 'ap-southeast-2') }
-
- let(:gitlab_access_key_id) { 'gitlab-access-key-id' }
- let(:gitlab_secret_access_key) { 'gitlab-secret-access-key' }
-
- let(:gitlab_credentials) { Aws::Credentials.new(gitlab_access_key_id, gitlab_secret_access_key) }
- let(:sts_client) { Aws::STS::Client.new(credentials: gitlab_credentials, region: region) }
- let(:assumed_role) { instance_double(Aws::AssumeRoleCredentials, credentials: assumed_role_credentials) }
-
- let(:assumed_role_credentials) { double }
-
- subject { described_class.new(provision_role, provider: provider).execute }
-
- context 'provision role is configured' do
- let(:provision_role) { create(:aws_role, user: user, region: 'custom-region') }
-
- before do
- stub_application_setting(eks_access_key_id: gitlab_access_key_id)
- stub_application_setting(eks_secret_access_key: gitlab_secret_access_key)
-
- expect(Aws::Credentials).to receive(:new)
- .with(gitlab_access_key_id, gitlab_secret_access_key)
- .and_return(gitlab_credentials)
-
- expect(Aws::STS::Client).to receive(:new)
- .with(credentials: gitlab_credentials, region: region)
- .and_return(sts_client)
-
- expect(Aws::AssumeRoleCredentials).to receive(:new)
- .with(
- client: sts_client,
- role_arn: provision_role.role_arn,
- role_session_name: session_name,
- external_id: provision_role.role_external_id,
- policy: session_policy
- ).and_return(assumed_role)
- end
-
- context 'provider is specified' do
- let(:region) { provider.region }
- let(:session_name) { "gitlab-eks-cluster-#{provider.cluster_id}-user-#{user.id}" }
- let(:session_policy) { nil }
-
- it { is_expected.to eq assumed_role_credentials }
- end
-
- context 'provider is not specifed' do
- let(:provider) { nil }
- let(:region) { provision_role.region }
- let(:session_name) { "gitlab-eks-autofill-user-#{user.id}" }
- let(:session_policy) { 'policy-document' }
-
- subject { described_class.new(provision_role, provider: provider).execute }
-
- before do
- stub_file_read(Rails.root.join('vendor', 'aws', 'iam', 'eks_cluster_read_only_policy.json'), content: session_policy)
- end
-
- it { is_expected.to eq assumed_role_credentials }
-
- context 'region is not specifed' do
- let(:region) { Clusters::Providers::Aws::DEFAULT_REGION }
- let(:provision_role) { create(:aws_role, user: user, region: nil) }
-
- it { is_expected.to eq assumed_role_credentials }
- end
- end
- end
-
- context 'provision role is not configured' do
- let(:provision_role) { nil }
-
- it 'raises an error' do
- expect { subject }.to raise_error(described_class::MissingRoleError, 'AWS provisioning role not configured')
- end
- end
-
- context 'with an instance profile attached to an IAM role' do
- let(:sts_client) { Aws::STS::Client.new(region: region, stub_responses: true) }
- let(:provision_role) { create(:aws_role, user: user, region: 'custom-region') }
-
- before do
- stub_application_setting(eks_access_key_id: nil)
- stub_application_setting(eks_secret_access_key: nil)
-
- expect(Aws::STS::Client).to receive(:new)
- .with(region: region)
- .and_return(sts_client)
-
- expect(Aws::AssumeRoleCredentials).to receive(:new)
- .with(
- client: sts_client,
- role_arn: provision_role.role_arn,
- role_session_name: session_name,
- external_id: provision_role.role_external_id,
- policy: session_policy
- ).and_call_original
- end
-
- context 'provider is specified' do
- let(:region) { provider.region }
- let(:session_name) { "gitlab-eks-cluster-#{provider.cluster_id}-user-#{user.id}" }
- let(:session_policy) { nil }
-
- it 'returns credentials', :aggregate_failures do
- expect(subject.access_key_id).to be_present
- expect(subject.secret_access_key).to be_present
- expect(subject.session_token).to be_present
- end
- end
-
- context 'provider is not specifed' do
- let(:provider) { nil }
- let(:region) { provision_role.region }
- let(:session_name) { "gitlab-eks-autofill-user-#{user.id}" }
- let(:session_policy) { 'policy-document' }
-
- before do
- stub_file_read(Rails.root.join('vendor', 'aws', 'iam', 'eks_cluster_read_only_policy.json'), content: session_policy)
- end
-
- subject { described_class.new(provision_role, provider: provider).execute }
-
- it 'returns credentials', :aggregate_failures do
- expect(subject.access_key_id).to be_present
- expect(subject.secret_access_key).to be_present
- expect(subject.session_token).to be_present
- end
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/finalize_creation_service_spec.rb b/spec/services/clusters/aws/finalize_creation_service_spec.rb
deleted file mode 100644
index 6b0cb86eff0..00000000000
--- a/spec/services/clusters/aws/finalize_creation_service_spec.rb
+++ /dev/null
@@ -1,124 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::FinalizeCreationService do
- describe '#execute' do
- let(:provider) { create(:cluster_provider_aws, :creating) }
- let(:platform) { provider.cluster.platform_kubernetes }
-
- let(:create_service_account_service) { double(execute: true) }
- let(:fetch_token_service) { double(execute: gitlab_token) }
- let(:kube_client) { double(create_config_map: true) }
- let(:cluster_stack) { double(outputs: [endpoint_output, cert_output, node_role_output]) }
- let(:node_auth_config_map) { double }
-
- let(:endpoint_output) { double(output_key: 'ClusterEndpoint', output_value: api_url) }
- let(:cert_output) { double(output_key: 'ClusterCertificate', output_value: Base64.encode64(ca_pem)) }
- let(:node_role_output) { double(output_key: 'NodeInstanceRole', output_value: node_role) }
-
- let(:api_url) { 'https://kubernetes.example.com' }
- let(:ca_pem) { File.read(Rails.root.join('spec/fixtures/clusters/sample_cert.pem')) }
- let(:gitlab_token) { 'gitlab-token' }
- let(:iam_token) { 'iam-token' }
- let(:node_role) { 'arn::aws::iam::123456789012:role/node-role' }
-
- subject { described_class.new.execute(provider) }
-
- before do
- allow(Clusters::Kubernetes::CreateOrUpdateServiceAccountService).to receive(:gitlab_creator)
- .with(kube_client, rbac: true)
- .and_return(create_service_account_service)
-
- allow(Clusters::Kubernetes::FetchKubernetesTokenService).to receive(:new)
- .with(
- kube_client,
- Clusters::Kubernetes::GITLAB_ADMIN_TOKEN_NAME,
- Clusters::Kubernetes::GITLAB_SERVICE_ACCOUNT_NAMESPACE)
- .and_return(fetch_token_service)
-
- allow(Gitlab::Kubernetes::KubeClient).to receive(:new)
- .with(
- api_url,
- auth_options: { bearer_token: iam_token },
- ssl_options: {
- verify_ssl: OpenSSL::SSL::VERIFY_PEER,
- cert_store: instance_of(OpenSSL::X509::Store)
- },
- http_proxy_uri: nil
- )
- .and_return(kube_client)
-
- allow(provider.api_client).to receive(:describe_stacks)
- .with(stack_name: provider.cluster.name)
- .and_return(double(stacks: [cluster_stack]))
-
- allow(Kubeclient::AmazonEksCredentials).to receive(:token)
- .with(provider.credentials, provider.cluster.name)
- .and_return(iam_token)
-
- allow(Gitlab::Kubernetes::ConfigMaps::AwsNodeAuth).to receive(:new)
- .with(node_role).and_return(double(generate: node_auth_config_map))
- end
-
- it 'configures the provider and platform' do
- subject
-
- expect(provider).to be_created
- expect(platform.api_url).to eq(api_url)
- expect(platform.ca_pem).to eq(ca_pem)
- expect(platform.token).to eq(gitlab_token)
- expect(platform).to be_rbac
- end
-
- it 'calls the create_service_account_service' do
- expect(create_service_account_service).to receive(:execute).once
-
- subject
- end
-
- it 'configures cluster node authentication' do
- expect(kube_client).to receive(:create_config_map).with(node_auth_config_map).once
-
- subject
- end
-
- describe 'error handling' do
- shared_examples 'provision error' do |message|
- it "sets the status to :errored with an appropriate error message" do
- subject
-
- expect(provider).to be_errored
- expect(provider.status_reason).to include(message)
- end
- end
-
- context 'failed to request stack details from AWS' do
- before do
- allow(provider.api_client).to receive(:describe_stacks)
- .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, "Error message"))
- end
-
- include_examples 'provision error', 'Failed to fetch CloudFormation stack'
- end
-
- context 'failed to create auth config map' do
- before do
- allow(kube_client).to receive(:create_config_map)
- .and_raise(Kubeclient::HttpError.new(500, 'Error', nil))
- end
-
- include_examples 'provision error', 'Failed to run Kubeclient'
- end
-
- context 'failed to save records' do
- before do
- allow(provider.cluster).to receive(:save!)
- .and_raise(ActiveRecord::RecordInvalid)
- end
-
- include_examples 'provision error', 'Failed to configure EKS provider'
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/provision_service_spec.rb b/spec/services/clusters/aws/provision_service_spec.rb
deleted file mode 100644
index 5efac29ec1e..00000000000
--- a/spec/services/clusters/aws/provision_service_spec.rb
+++ /dev/null
@@ -1,130 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::ProvisionService do
- describe '#execute' do
- let(:provider) { create(:cluster_provider_aws) }
-
- let(:provision_role) { create(:aws_role, user: provider.created_by_user) }
- let(:client) { instance_double(Aws::CloudFormation::Client, create_stack: true) }
- let(:cloudformation_template) { double }
- let(:credentials) do
- instance_double(
- Aws::Credentials,
- access_key_id: 'key',
- secret_access_key: 'secret',
- session_token: 'token'
- )
- end
-
- let(:parameters) do
- [
- { parameter_key: 'ClusterName', parameter_value: provider.cluster.name },
- { parameter_key: 'ClusterRole', parameter_value: provider.role_arn },
- { parameter_key: 'KubernetesVersion', parameter_value: provider.kubernetes_version },
- { parameter_key: 'ClusterControlPlaneSecurityGroup', parameter_value: provider.security_group_id },
- { parameter_key: 'VpcId', parameter_value: provider.vpc_id },
- { parameter_key: 'Subnets', parameter_value: provider.subnet_ids.join(',') },
- { parameter_key: 'NodeAutoScalingGroupDesiredCapacity', parameter_value: provider.num_nodes.to_s },
- { parameter_key: 'NodeInstanceType', parameter_value: provider.instance_type },
- { parameter_key: 'KeyName', parameter_value: provider.key_name }
- ]
- end
-
- subject { described_class.new.execute(provider) }
-
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .with(provision_role, provider: provider)
- .and_return(double(execute: credentials))
-
- allow(provider).to receive(:api_client)
- .and_return(client)
-
- stub_file_read(Rails.root.join('vendor', 'aws', 'cloudformation', 'eks_cluster.yaml'), content: cloudformation_template)
- end
-
- it 'updates the provider status to :creating and configures the provider with credentials' do
- subject
-
- expect(provider).to be_creating
- expect(provider.access_key_id).to eq 'key'
- expect(provider.secret_access_key).to eq 'secret'
- expect(provider.session_token).to eq 'token'
- end
-
- it 'creates a CloudFormation stack' do
- expect(client).to receive(:create_stack).with(
- stack_name: provider.cluster.name,
- template_body: cloudformation_template,
- parameters: parameters,
- capabilities: ["CAPABILITY_IAM"]
- )
-
- subject
- end
-
- it 'schedules a worker to monitor creation status' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
- .with(Clusters::Aws::VerifyProvisionStatusService::INITIAL_INTERVAL, provider.cluster_id)
-
- subject
- end
-
- describe 'error handling' do
- shared_examples 'provision error' do |message|
- it "sets the status to :errored with an appropriate error message" do
- subject
-
- expect(provider).to be_errored
- expect(provider.status_reason).to include(message)
- end
- end
-
- context 'invalid state transition' do
- before do
- allow(provider).to receive(:make_creating).and_return(false)
- end
-
- include_examples 'provision error', 'Failed to update provider record'
- end
-
- context 'AWS role is not configured' do
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_raise(Clusters::Aws::FetchCredentialsService::MissingRoleError)
- end
-
- include_examples 'provision error', 'Amazon role is not configured'
- end
-
- context 'AWS credentials are not configured' do
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_raise(Aws::Errors::MissingCredentialsError)
- end
-
- include_examples 'provision error', 'Amazon credentials are not configured'
- end
-
- context 'Authentication failure' do
- before do
- allow(Clusters::Aws::FetchCredentialsService).to receive(:new)
- .and_raise(Aws::STS::Errors::ServiceError.new(double, 'Error message'))
- end
-
- include_examples 'provision error', 'Amazon authentication failed'
- end
-
- context 'CloudFormation failure' do
- before do
- allow(client).to receive(:create_stack)
- .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, 'Error message'))
- end
-
- include_examples 'provision error', 'Amazon CloudFormation request failed'
- end
- end
- end
-end
diff --git a/spec/services/clusters/aws/verify_provision_status_service_spec.rb b/spec/services/clusters/aws/verify_provision_status_service_spec.rb
deleted file mode 100644
index b9a58b97842..00000000000
--- a/spec/services/clusters/aws/verify_provision_status_service_spec.rb
+++ /dev/null
@@ -1,76 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Aws::VerifyProvisionStatusService do
- describe '#execute' do
- let(:provider) { create(:cluster_provider_aws) }
-
- let(:stack) { double(stack_status: stack_status, creation_time: creation_time) }
- let(:creation_time) { 1.minute.ago }
-
- subject { described_class.new.execute(provider) }
-
- before do
- allow(provider.api_client).to receive(:describe_stacks)
- .with(stack_name: provider.cluster.name)
- .and_return(double(stacks: [stack]))
- end
-
- shared_examples 'provision error' do |message|
- it "sets the status to :errored with an appropriate error message" do
- subject
-
- expect(provider).to be_errored
- expect(provider.status_reason).to include(message)
- end
- end
-
- context 'stack creation is still in progress' do
- let(:stack_status) { 'CREATE_IN_PROGRESS' }
- let(:verify_service) { double(execute: true) }
-
- it 'schedules a worker to check again later' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
- .with(described_class::POLL_INTERVAL, provider.cluster_id)
-
- subject
- end
-
- context 'stack creation is taking too long' do
- let(:creation_time) { 1.hour.ago }
-
- include_examples 'provision error', 'Kubernetes cluster creation time exceeds timeout'
- end
- end
-
- context 'stack creation is complete' do
- let(:stack_status) { 'CREATE_COMPLETE' }
- let(:finalize_service) { double(execute: true) }
-
- it 'finalizes creation' do
- expect(Clusters::Aws::FinalizeCreationService).to receive(:new).and_return(finalize_service)
- expect(finalize_service).to receive(:execute).with(provider).once
-
- subject
- end
- end
-
- context 'stack creation failed' do
- let(:stack_status) { 'CREATE_FAILED' }
-
- include_examples 'provision error', 'Unexpected status'
- end
-
- context 'error communicating with CloudFormation API' do
- let(:stack_status) { 'CREATE_IN_PROGRESS' }
-
- before do
- allow(provider.api_client).to receive(:describe_stacks)
- .and_raise(Aws::CloudFormation::Errors::ServiceError.new(double, 'Error message'))
- end
-
- include_examples 'provision error', 'Amazon CloudFormation request failed'
- end
- end
-end
diff --git a/spec/services/clusters/create_service_spec.rb b/spec/services/clusters/create_service_spec.rb
index 6e252bee7c0..95f10cdbd80 100644
--- a/spec/services/clusters/create_service_spec.rb
+++ b/spec/services/clusters/create_service_spec.rb
@@ -54,7 +54,6 @@ RSpec.describe Clusters::CreateService do
let!(:cluster) { create(:cluster, :provided_by_gcp, :production_environment, projects: [project]) }
it 'creates another cluster' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
expect { subject }.to change { Clusters::Cluster.count }.by(1)
end
end
diff --git a/spec/services/clusters/gcp/fetch_operation_service_spec.rb b/spec/services/clusters/gcp/fetch_operation_service_spec.rb
deleted file mode 100644
index 990cc745382..00000000000
--- a/spec/services/clusters/gcp/fetch_operation_service_spec.rb
+++ /dev/null
@@ -1,45 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::FetchOperationService do
- include GoogleApi::CloudPlatformHelpers
-
- describe '#execute' do
- let(:provider) { create(:cluster_provider_gcp, :creating) }
- let(:gcp_project_id) { provider.gcp_project_id }
- let(:zone) { provider.zone }
- let(:operation_id) { provider.operation_id }
-
- shared_examples 'success' do
- it 'yields' do
- expect { |b| described_class.new.execute(provider, &b) }
- .to yield_with_args
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- expect { |b| described_class.new.execute(provider, &b) }
- .not_to yield_with_args
- expect(provider.reload).to be_errored
- end
- end
-
- context 'when succeeded to fetch operation' do
- before do
- stub_cloud_platform_get_zone_operation(gcp_project_id, zone, operation_id)
- end
-
- it_behaves_like 'success'
- end
-
- context 'when Internal Server Error happened' do
- before do
- stub_cloud_platform_get_zone_operation_error(gcp_project_id, zone, operation_id)
- end
-
- it_behaves_like 'error'
- end
- end
-end
diff --git a/spec/services/clusters/gcp/finalize_creation_service_spec.rb b/spec/services/clusters/gcp/finalize_creation_service_spec.rb
deleted file mode 100644
index 9c553d0eec2..00000000000
--- a/spec/services/clusters/gcp/finalize_creation_service_spec.rb
+++ /dev/null
@@ -1,161 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::FinalizeCreationService, '#execute' do
- include GoogleApi::CloudPlatformHelpers
- include KubernetesHelpers
-
- let(:cluster) { create(:cluster, :project, :providing_by_gcp) }
- let(:provider) { cluster.provider }
- let(:platform) { cluster.platform }
- let(:endpoint) { '111.111.111.111' }
- let(:api_url) { 'https://' + endpoint }
- let(:secret_name) { 'gitlab-token' }
- let(:token) { 'sample-token' }
- let(:namespace) { "#{cluster.project.path}-#{cluster.project.id}" }
-
- subject { described_class.new.execute(provider) }
-
- shared_examples 'success' do
- it 'configures provider and kubernetes' do
- subject
-
- expect(provider).to be_created
- end
-
- it 'properly configures database models' do
- subject
-
- cluster.reload
-
- expect(provider.endpoint).to eq(endpoint)
- expect(platform.api_url).to eq(api_url)
- expect(platform.ca_cert).to eq(Base64.decode64(load_sample_cert).strip)
- expect(platform.token).to eq(token)
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- subject
-
- expect(provider.reload).to be_errored
- end
- end
-
- shared_examples 'kubernetes information not successfully fetched' do
- context 'when failed to fetch gke cluster info' do
- before do
- stub_cloud_platform_get_zone_cluster_error(provider.gcp_project_id, provider.zone, cluster.name)
- end
-
- it_behaves_like 'error'
- end
-
- context 'when token is empty' do
- let(:token) { '' }
-
- it_behaves_like 'error'
- end
-
- context 'when failed to fetch kubernetes token' do
- before do
- stub_kubeclient_get_secret_error(api_url, secret_name, namespace: 'default')
- end
-
- it_behaves_like 'error'
- end
-
- context 'when service account fails to create' do
- before do
- stub_kubeclient_create_service_account_error(api_url, namespace: 'default')
- end
-
- it_behaves_like 'error'
- end
- end
-
- shared_context 'kubernetes information successfully fetched' do
- before do
- stub_cloud_platform_get_zone_cluster(
- provider.gcp_project_id, provider.zone, cluster.name, { endpoint: endpoint }
- )
-
- stub_kubeclient_discover(api_url)
- stub_kubeclient_get_namespace(api_url)
- stub_kubeclient_create_namespace(api_url)
- stub_kubeclient_get_service_account_error(api_url, 'gitlab')
- stub_kubeclient_create_service_account(api_url)
- stub_kubeclient_create_secret(api_url)
- stub_kubeclient_put_secret(api_url, 'gitlab-token')
-
- stub_kubeclient_get_secret(
- api_url,
- metadata_name: secret_name,
- token: Base64.encode64(token),
- namespace: 'default'
- )
-
- stub_kubeclient_put_cluster_role_binding(api_url, 'gitlab-admin')
- end
- end
-
- context 'With a legacy ABAC cluster' do
- before do
- provider.legacy_abac = true
- end
-
- include_context 'kubernetes information successfully fetched'
-
- it_behaves_like 'success'
-
- it 'uses ABAC authorization type' do
- subject
- cluster.reload
-
- expect(platform).to be_abac
- expect(platform.authorization_type).to eq('abac')
- end
-
- it_behaves_like 'kubernetes information not successfully fetched'
- end
-
- context 'With an RBAC cluster' do
- before do
- provider.legacy_abac = false
- end
-
- include_context 'kubernetes information successfully fetched'
-
- it_behaves_like 'success'
-
- it 'uses RBAC authorization type' do
- subject
- cluster.reload
-
- expect(platform).to be_rbac
- expect(platform.authorization_type).to eq('rbac')
- end
-
- it_behaves_like 'kubernetes information not successfully fetched'
- end
-
- context 'With a Cloud Run cluster' do
- before do
- provider.cloud_run = true
- end
-
- include_context 'kubernetes information successfully fetched'
-
- it_behaves_like 'success'
-
- it 'has knative pre-installed' do
- subject
- cluster.reload
-
- expect(cluster.application_knative).to be_present
- expect(cluster.application_knative).to be_pre_installed
- end
- end
-end
diff --git a/spec/services/clusters/gcp/provision_service_spec.rb b/spec/services/clusters/gcp/provision_service_spec.rb
deleted file mode 100644
index c8b7f628e5b..00000000000
--- a/spec/services/clusters/gcp/provision_service_spec.rb
+++ /dev/null
@@ -1,71 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::ProvisionService do
- include GoogleApi::CloudPlatformHelpers
-
- describe '#execute' do
- let(:provider) { create(:cluster_provider_gcp, :scheduled) }
- let(:gcp_project_id) { provider.gcp_project_id }
- let(:zone) { provider.zone }
-
- shared_examples 'success' do
- it 'schedules a worker for status minitoring' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
-
- described_class.new.execute(provider)
-
- expect(provider.reload).to be_creating
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- described_class.new.execute(provider)
-
- expect(provider.reload).to be_errored
- end
- end
-
- context 'when succeeded to request provision' do
- before do
- stub_cloud_platform_create_cluster(gcp_project_id, zone)
- end
-
- it_behaves_like 'success'
- end
-
- context 'when operation status is unexpected' do
- before do
- stub_cloud_platform_create_cluster(
- gcp_project_id, zone,
- {
- "status": 'unexpected'
- })
- end
-
- it_behaves_like 'error'
- end
-
- context 'when selfLink is unexpected' do
- before do
- stub_cloud_platform_create_cluster(
- gcp_project_id, zone,
- {
- "selfLink": 'unexpected'
- })
- end
-
- it_behaves_like 'error'
- end
-
- context 'when Internal Server Error happened' do
- before do
- stub_cloud_platform_create_cluster_error(gcp_project_id, zone)
- end
-
- it_behaves_like 'error'
- end
- end
-end
diff --git a/spec/services/clusters/gcp/verify_provision_status_service_spec.rb b/spec/services/clusters/gcp/verify_provision_status_service_spec.rb
deleted file mode 100644
index ffe4516c02b..00000000000
--- a/spec/services/clusters/gcp/verify_provision_status_service_spec.rb
+++ /dev/null
@@ -1,111 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe Clusters::Gcp::VerifyProvisionStatusService do
- include GoogleApi::CloudPlatformHelpers
-
- describe '#execute' do
- let(:provider) { create(:cluster_provider_gcp, :creating) }
- let(:gcp_project_id) { provider.gcp_project_id }
- let(:zone) { provider.zone }
- let(:operation_id) { provider.operation_id }
-
- shared_examples 'continue_creation' do
- it 'schedules a worker for status minitoring' do
- expect(WaitForClusterCreationWorker).to receive(:perform_in)
-
- described_class.new.execute(provider)
- end
- end
-
- shared_examples 'finalize_creation' do
- it 'schedules a worker for status minitoring' do
- expect_next_instance_of(Clusters::Gcp::FinalizeCreationService) do |instance|
- expect(instance).to receive(:execute)
- end
-
- described_class.new.execute(provider)
- end
- end
-
- shared_examples 'error' do
- it 'sets an error to provider object' do
- described_class.new.execute(provider)
-
- expect(provider.reload).to be_errored
- end
- end
-
- context 'when operation status is RUNNING' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'RUNNING',
- "startTime": 1.minute.ago.strftime("%FT%TZ")
- })
- end
-
- it_behaves_like 'continue_creation'
-
- context 'when cluster creation time exceeds timeout' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'RUNNING',
- "startTime": 30.minutes.ago.strftime("%FT%TZ")
- })
- end
-
- it_behaves_like 'error'
- end
- end
-
- context 'when operation status is PENDING' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'PENDING',
- "startTime": 1.minute.ago.strftime("%FT%TZ")
- })
- end
-
- it_behaves_like 'continue_creation'
- end
-
- context 'when operation status is DONE' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'DONE'
- })
- end
-
- it_behaves_like 'finalize_creation'
- end
-
- context 'when operation status is unexpected' do
- before do
- stub_cloud_platform_get_zone_operation(
- gcp_project_id, zone, operation_id,
- {
- "status": 'unexpected'
- })
- end
-
- it_behaves_like 'error'
- end
-
- context 'when failed to get operation status' do
- before do
- stub_cloud_platform_get_zone_operation_error(gcp_project_id, zone, operation_id)
- end
-
- it_behaves_like 'error'
- end
- end
-end
diff --git a/spec/services/database/consistency_check_service_spec.rb b/spec/services/database/consistency_check_service_spec.rb
index d7dee50f7c2..6288fedfb59 100644
--- a/spec/services/database/consistency_check_service_spec.rb
+++ b/spec/services/database/consistency_check_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Database::ConsistencyCheckService, feature_category: :database do
+RSpec.describe Database::ConsistencyCheckService, feature_category: :pods do
let(:batch_size) { 5 }
let(:max_batches) { 2 }
diff --git a/spec/services/design_management/save_designs_service_spec.rb b/spec/services/design_management/save_designs_service_spec.rb
index c69df5f2eb9..a87494d87f7 100644
--- a/spec/services/design_management/save_designs_service_spec.rb
+++ b/spec/services/design_management/save_designs_service_spec.rb
@@ -1,7 +1,7 @@
# frozen_string_literal: true
require 'spec_helper'
-RSpec.describe DesignManagement::SaveDesignsService do
+RSpec.describe DesignManagement::SaveDesignsService, feature_category: :design_management do
include DesignManagementTestHelpers
include ConcurrentHelpers
@@ -242,6 +242,27 @@ RSpec.describe DesignManagement::SaveDesignsService do
expect(updated_designs.first.versions.size).to eq(1)
end
end
+
+ context 'when detecting content type' do
+ it 'detects content type when feature flag is enabled' do
+ expect_next_instance_of(::Lfs::FileTransformer) do |file_transformer|
+ expect(file_transformer).to receive(:new_file)
+ .with(anything, anything, hash_including(detect_content_type: true)).and_call_original
+ end
+
+ run_service
+ end
+
+ it 'skips content type detection when feature flag is disabled' do
+ stub_feature_flags(design_management_allow_dangerous_images: false)
+ expect_next_instance_of(::Lfs::FileTransformer) do |file_transformer|
+ expect(file_transformer).to receive(:new_file)
+ .with(anything, anything, hash_including(detect_content_type: false)).and_call_original
+ end
+
+ run_service
+ end
+ end
end
context 'when a design has not changed since its previous version' do
diff --git a/spec/services/discussions/resolve_service_spec.rb b/spec/services/discussions/resolve_service_spec.rb
index 9cc27973bcb..a6e1bad30ce 100644
--- a/spec/services/discussions/resolve_service_spec.rb
+++ b/spec/services/discussions/resolve_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Discussions::ResolveService do
+RSpec.describe Discussions::ResolveService, feature_category: :code_review_workflow do
describe '#execute' do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
@@ -46,6 +46,12 @@ RSpec.describe Discussions::ResolveService do
service.execute
end
+ it 'sends GraphQL triggers' do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(discussion.noteable)
+
+ service.execute
+ end
+
context 'with a project that requires all discussion to be resolved' do
before do
project.update!(only_allow_merge_if_all_discussions_are_resolved: true)
@@ -122,6 +128,12 @@ RSpec.describe Discussions::ResolveService do
service.execute
end
+
+ it 'does not send GraphQL triggers' do
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated).with(discussion.noteable)
+
+ service.execute
+ end
end
context 'when resolving a discussion' do
diff --git a/spec/services/discussions/unresolve_service_spec.rb b/spec/services/discussions/unresolve_service_spec.rb
index 0009239232c..e9f58e4e10e 100644
--- a/spec/services/discussions/unresolve_service_spec.rb
+++ b/spec/services/discussions/unresolve_service_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Discussions::UnresolveService do
+RSpec.describe Discussions::UnresolveService, feature_category: :code_review_workflow do
describe "#execute" do
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user, developer_projects: [project]) }
@@ -29,5 +29,32 @@ RSpec.describe Discussions::UnresolveService do
service.execute
end
+
+ it "sends GraphQL triggers" do
+ expect(GraphqlTriggers).to receive(:merge_request_merge_status_updated).with(discussion.noteable)
+
+ service.execute
+ end
+
+ context "when there are existing unresolved discussions" do
+ before do
+ create(:diff_note_on_merge_request, noteable: merge_request, project: project).to_discussion
+ end
+
+ it "does not send a GraphQL triggers" do
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
+
+ service.execute
+ end
+ end
+
+ context "when the noteable is not a merge request" do
+ it "does not send a GraphQL triggers" do
+ expect(discussion).to receive(:for_merge_request?).and_return(false)
+ expect(GraphqlTriggers).not_to receive(:merge_request_merge_status_updated)
+
+ service.execute
+ end
+ end
end
end
diff --git a/spec/services/draft_notes/publish_service_spec.rb b/spec/services/draft_notes/publish_service_spec.rb
index 81443eed7d3..44fe9063ac9 100644
--- a/spec/services/draft_notes/publish_service_spec.rb
+++ b/spec/services/draft_notes/publish_service_spec.rb
@@ -78,6 +78,10 @@ RSpec.describe DraftNotes::PublishService do
end
end
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { publish }
+ end
+
it 'does not publish any draft note' do
expect { publish }.not_to change { DraftNote.count }
end
@@ -97,6 +101,10 @@ RSpec.describe DraftNotes::PublishService do
end
end
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { publish }
+ end
+
it 'returns success' do
result = publish
diff --git a/spec/services/environments/stop_stale_service_spec.rb b/spec/services/environments/stop_stale_service_spec.rb
new file mode 100644
index 00000000000..46d770c30cc
--- /dev/null
+++ b/spec/services/environments/stop_stale_service_spec.rb
@@ -0,0 +1,49 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Environments::StopStaleService,
+ :clean_gitlab_redis_shared_state,
+ :sidekiq_inline,
+ feature_category: :continuous_delivery do
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+
+ let(:params) { { after: nil } }
+ let(:service) { described_class.new(project, user, params) }
+
+ describe '#execute' do
+ subject { service.execute }
+
+ let_it_be(:project) { create(:project, :repository) }
+ let_it_be(:user) { create(:user) }
+ let_it_be(:stale_environment) { create(:environment, project: project, updated_at: 2.weeks.ago) }
+ let_it_be(:stale_environment2) { create(:environment, project: project, updated_at: 2.weeks.ago) }
+ let_it_be(:recent_environment) { create(:environment, project: project, updated_at: Date.today) }
+
+ let_it_be(:params) { { before: 1.week.ago } }
+
+ before do
+ allow(service).to receive(:can?).with(user, :stop_environment, project).and_return(true)
+ end
+
+ it 'only stops stale environments' do
+ spy_service = Environments::AutoStopWorker.new
+
+ allow(Environments::AutoStopWorker).to receive(:new) { spy_service }
+
+ expect(spy_service).to receive(:perform).with(stale_environment.id).and_call_original
+ expect(spy_service).to receive(:perform).with(stale_environment2.id).and_call_original
+ expect(spy_service).not_to receive(:perform).with(recent_environment.id)
+
+ expect(Environment).to receive(:deployed_and_updated_before).with(project.id, params[:before]).and_call_original
+ expect(Environment).to receive(:without_protected).with(project).and_call_original
+
+ expect(subject.success?).to be_truthy
+
+ expect(stale_environment.reload).to be_stopped
+ expect(stale_environment2.reload).to be_stopped
+ expect(recent_environment.reload).to be_available
+ end
+ end
+end
diff --git a/spec/services/feature_flags/create_service_spec.rb b/spec/services/feature_flags/create_service_spec.rb
index 1c9bde70af3..1a32faad948 100644
--- a/spec/services/feature_flags/create_service_spec.rb
+++ b/spec/services/feature_flags/create_service_spec.rb
@@ -86,7 +86,7 @@ RSpec.describe FeatureFlags::CreateService do
end
end
- it 'creates audit event' do
+ it 'creates audit event', :with_license do
expect { subject }.to change { AuditEvent.count }.by(1)
expect(AuditEvent.last.details[:custom_message]).to start_with('Created feature flag feature_flag with description "description".')
expect(AuditEvent.last.details[:custom_message]).to include('Created strategy "default" with scopes "*".')
diff --git a/spec/services/feature_flags/destroy_service_spec.rb b/spec/services/feature_flags/destroy_service_spec.rb
index 740923db9b6..b2793dc0560 100644
--- a/spec/services/feature_flags/destroy_service_spec.rb
+++ b/spec/services/feature_flags/destroy_service_spec.rb
@@ -31,7 +31,7 @@ RSpec.describe FeatureFlags::DestroyService do
expect { subject }.to change { Operations::FeatureFlag.count }.by(-1)
end
- it 'creates audit log' do
+ it 'creates audit log', :with_license do
expect { subject }.to change { AuditEvent.count }.by(1)
expect(audit_event_message).to eq("Deleted feature flag #{feature_flag.name}.")
end
diff --git a/spec/services/feature_flags/update_service_spec.rb b/spec/services/feature_flags/update_service_spec.rb
index 8f985d34961..1c5af71a50a 100644
--- a/spec/services/feature_flags/update_service_spec.rb
+++ b/spec/services/feature_flags/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe FeatureFlags::UpdateService do
+RSpec.describe FeatureFlags::UpdateService, :with_license do
let_it_be(:project) { create(:project) }
let_it_be(:developer) { create(:user) }
let_it_be(:reporter) { create(:user) }
diff --git a/spec/services/files/base_service_spec.rb b/spec/services/files/base_service_spec.rb
new file mode 100644
index 00000000000..57fb378f1a0
--- /dev/null
+++ b/spec/services/files/base_service_spec.rb
@@ -0,0 +1,59 @@
+# frozen_string_literal: true
+
+require "spec_helper"
+
+RSpec.describe Files::BaseService, feature_category: :source_code_management do
+ let_it_be(:group) { create(:group) }
+ let_it_be(:project) { create(:project, group: group) }
+ let_it_be(:user) { create(:user) }
+ let(:params) { {} }
+
+ subject(:author_email) { described_class.new(project, user, params).instance_variable_get(:@author_email) }
+
+ before do
+ group.add_developer(user)
+ end
+
+ context 'with no namespace_commit_emails' do
+ it 'sets @author_email to user default email' do
+ expect(author_email).to eq(user.email)
+ end
+ end
+
+ context 'with an author_email in params and namespace_commit_email' do
+ let(:params) { { author_email: 'email_from_params@example.com' } }
+
+ before do
+ create(:namespace_commit_email, user: user, namespace: group)
+ end
+
+ it 'gives precedence to the parameter value for @author_email' do
+ expect(author_email).to eq('email_from_params@example.com')
+ end
+ end
+
+ context 'with a project namespace_commit_email' do
+ it 'sets @author_email to the project namespace_commit_email' do
+ namespace_commit_email = create(:namespace_commit_email, user: user, namespace: project.project_namespace)
+
+ expect(author_email).to eq(namespace_commit_email.email.email)
+ end
+ end
+
+ context 'with a group namespace_commit_email' do
+ it 'sets @author_email to the group namespace_commit_email' do
+ namespace_commit_email = create(:namespace_commit_email, user: user, namespace: group)
+
+ expect(author_email).to eq(namespace_commit_email.email.email)
+ end
+ end
+
+ context 'with a project and group namespace_commit_email' do
+ it 'sets @author_email to the project namespace_commit_email' do
+ namespace_commit_email = create(:namespace_commit_email, user: user, namespace: project.project_namespace)
+ create(:namespace_commit_email, user: user, namespace: group)
+
+ expect(author_email).to eq(namespace_commit_email.email.email)
+ end
+ end
+end
diff --git a/spec/services/groups/import_export/export_service_spec.rb b/spec/services/groups/import_export/export_service_spec.rb
index d6ce40f413b..ec42a728409 100644
--- a/spec/services/groups/import_export/export_service_spec.rb
+++ b/spec/services/groups/import_export/export_service_spec.rb
@@ -56,21 +56,11 @@ RSpec.describe Groups::ImportExport::ExportService do
end
it 'saves the models using ndjson tree saver' do
- stub_feature_flags(group_export_ndjson: true)
-
expect(Gitlab::ImportExport::Group::TreeSaver).to receive(:new).and_call_original
service.execute
end
- it 'saves the models using legacy tree saver' do
- stub_feature_flags(group_export_ndjson: false)
-
- expect(Gitlab::ImportExport::Group::LegacyTreeSaver).to receive(:new).and_call_original
-
- service.execute
- end
-
it 'compresses and removes tmp files' do
expect(group.import_export_upload).to be_nil
expect(Gitlab::ImportExport::Saver).to receive(:new).and_call_original
diff --git a/spec/services/groups/import_export/import_service_spec.rb b/spec/services/groups/import_export/import_service_spec.rb
index d41acbcc2de..972b12d7ee5 100644
--- a/spec/services/groups/import_export/import_service_spec.rb
+++ b/spec/services/groups/import_export/import_service_spec.rb
@@ -59,32 +59,32 @@ RSpec.describe Groups::ImportExport::ImportService do
end
end
- context 'with group_import_ndjson feature flag disabled' do
+ context 'when importing a ndjson export' do
let(:user) { create(:user) }
let(:group) { create(:group) }
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
+
let(:import_logger) { instance_double(Gitlab::Import::Logger) }
subject(:service) { described_class.new(group: group, user: user) }
before do
- stub_feature_flags(group_import_ndjson: false)
-
- group.add_owner(user)
-
ImportExportUpload.create!(group: group, import_file: import_file)
allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
allow(import_logger).to receive(:error)
allow(import_logger).to receive(:info)
+ allow(import_logger).to receive(:warn)
+ allow(FileUtils).to receive(:rm_rf).and_call_original
end
- context 'with a json file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
-
- it 'uses LegacyTreeRestorer to import the file' do
- expect(Gitlab::ImportExport::Group::LegacyTreeRestorer).to receive(:new).and_call_original
+ context 'when user has correct permissions' do
+ before do
+ group.add_owner(user)
+ end
- service.execute
+ it 'imports group structure successfully' do
+ expect(service.execute).to be_truthy
end
it 'tracks the event' do
@@ -95,317 +95,151 @@ RSpec.describe Groups::ImportExport::ImportService do
action: 'create',
label: 'import_group_from_file'
)
- end
- end
-
- context 'with a ndjson file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
- it 'fails to import' do
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error, 'Incorrect JSON format')
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
end
- end
- end
-
- context 'with group_import_ndjson feature flag enabled' do
- before do
- stub_feature_flags(group_import_ndjson: true)
- end
-
- context 'when importing a ndjson export' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export.tar.gz') }
- let(:import_logger) { instance_double(Gitlab::Import::Logger) }
-
- subject(:service) { described_class.new(group: group, user: user) }
-
- before do
- ImportExportUpload.create!(group: group, import_file: import_file)
+ it 'removes import file' do
+ service.execute
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
- allow(import_logger).to receive(:error)
- allow(import_logger).to receive(:info)
- allow(import_logger).to receive(:warn)
- allow(FileUtils).to receive(:rm_rf).and_call_original
+ expect(group.import_export_upload.import_file.file).to be_nil
end
- context 'when user has correct permissions' do
- before do
- group.add_owner(user)
- end
+ it 'removes tmp files' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- it 'imports group structure successfully' do
- expect(service.execute).to be_truthy
- end
-
- it 'tracks the event' do
- service.execute
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_group_from_file'
- )
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
-
- it 'removes import file' do
- service.execute
-
- expect(group.import_export_upload.import_file.file).to be_nil
- end
-
- it 'removes tmp files' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
-
- service.execute
-
- expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
- expect(Dir.exist?(shared.base_path)).to eq(false)
- end
-
- it 'logs the import success' do
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- ).once
+ service.execute
- service.execute
- end
+ expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
+ expect(Dir.exist?(shared.base_path)).to eq(false)
end
- context 'when user does not have correct permissions' do
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- )
+ it 'logs the import success' do
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ ).once
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
-
- it 'tracks the error' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
-
- expect(shared).to receive(:error) do |param|
- expect(param.message).to include 'does not have required permissions for'
- end
-
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ service.execute
end
+ end
- context 'when there are errors with the import file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
-
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- ).once
+ context 'when user does not have correct permissions' do
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ )
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
- context 'when there are errors with the sub-relations' do
- let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
+ it 'tracks the error' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- before do
- group.add_owner(user)
+ expect(shared).to receive(:error) do |param|
+ expect(param.message).to include 'does not have required permissions for'
end
- it 'successfully imports the group' do
- expect(service.execute).to be_truthy
- end
-
- it 'logs the import success' do
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
+ end
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- )
+ context 'when there are errors with the import file' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/symlink_export.tar.gz') }
- service.execute
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ ).once
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
- context 'when importing a json export' do
- let(:user) { create(:user) }
- let(:group) { create(:group) }
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
-
- let(:import_logger) { instance_double(Gitlab::Import::Logger) }
-
- subject(:service) { described_class.new(group: group, user: user) }
+ context 'when there are errors with the sub-relations' do
+ let(:import_file) { fixture_file_upload('spec/fixtures/group_export_invalid_subrelations.tar.gz') }
before do
- ImportExportUpload.create!(group: group, import_file: import_file)
-
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
- allow(import_logger).to receive(:error)
- allow(import_logger).to receive(:warn)
- allow(import_logger).to receive(:info)
- allow(FileUtils).to receive(:rm_rf).and_call_original
+ group.add_owner(user)
end
- context 'when user has correct permissions' do
- before do
- group.add_owner(user)
- end
-
- it 'imports group structure successfully' do
- expect(service.execute).to be_truthy
- end
-
- it 'tracks the event' do
- service.execute
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_group_from_file'
- )
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
-
- it 'removes import file' do
- service.execute
-
- expect(group.import_export_upload.import_file.file).to be_nil
- end
-
- it 'removes tmp files' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
-
- service.execute
-
- expect(FileUtils).to have_received(:rm_rf).with(shared.base_path)
- expect(Dir.exist?(shared.base_path)).to eq(false)
- end
-
- it 'logs the import success' do
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- ).once
-
- service.execute
- end
+ it 'successfully imports the group' do
+ expect(service.execute).to be_truthy
end
- context 'when user does not have correct permissions' do
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- )
-
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ it 'logs the import success' do
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
- it 'tracks the error' do
- shared = Gitlab::ImportExport::Shared.new(group)
- allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
+ expect(import_logger).to receive(:info).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: 'Group Import/Export: Import succeeded'
+ )
- expect(shared).to receive(:error) do |param|
- expect(param.message).to include 'does not have required permissions for'
- end
+ service.execute
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
+ expect_snowplow_event(
+ category: 'Groups::ImportExport::ImportService',
+ action: 'create',
+ label: 'import_access_level',
+ user: user,
+ extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
+ )
end
+ end
+ end
- context 'when there are errors with the import file' do
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_symlink_export.tar.gz') }
-
- it 'logs the error and raises an exception' do
- expect(import_logger).to receive(:error).with(
- group_id: group.id,
- group_name: group.name,
- message: a_string_including('Errors occurred')
- ).once
+ context 'when importing a json export' do
+ let(:user) { create(:user) }
+ let(:group) { create(:group) }
+ let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export.tar.gz') }
- expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
- end
- end
+ let(:import_logger) { instance_double(Gitlab::Import::Logger) }
- context 'when there are errors with the sub-relations' do
- let(:import_file) { fixture_file_upload('spec/fixtures/legacy_group_export_invalid_subrelations.tar.gz') }
+ subject(:service) { described_class.new(group: group, user: user) }
- before do
- group.add_owner(user)
- end
+ before do
+ group.add_owner(user)
+ ImportExportUpload.create!(group: group, import_file: import_file)
- it 'successfully imports the group' do
- expect(service.execute).to be_truthy
- end
+ allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ allow(import_logger).to receive(:error)
+ allow(import_logger).to receive(:warn)
+ allow(import_logger).to receive(:info)
+ end
- it 'tracks the event' do
- service.execute
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_group_from_file'
- )
-
- expect_snowplow_event(
- category: 'Groups::ImportExport::ImportService',
- action: 'create',
- label: 'import_access_level',
- user: user,
- extra: { user_role: 'Owner', import_type: 'import_group_from_file' }
- )
- end
+ it 'logs the error and raises an exception' do
+ expect(import_logger).to receive(:error).with(
+ group_id: group.id,
+ group_name: group.name,
+ message: a_string_including('Errors occurred')
+ ).once
- it 'logs the import success' do
- allow(Gitlab::Import::Logger).to receive(:build).and_return(import_logger)
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
+ end
- expect(import_logger).to receive(:info).with(
- group_id: group.id,
- group_name: group.name,
- message: 'Group Import/Export: Import succeeded'
- )
+ it 'tracks the error' do
+ shared = Gitlab::ImportExport::Shared.new(group)
+ allow(Gitlab::ImportExport::Shared).to receive(:new).and_return(shared)
- service.execute
- end
+ expect(shared).to receive(:error) do |param|
+ expect(param.message).to include 'The import file is incompatible'
end
+
+ expect { service.execute }.to raise_error(Gitlab::ImportExport::Error)
end
end
end
diff --git a/spec/services/groups/transfer_service_spec.rb b/spec/services/groups/transfer_service_spec.rb
index 3cf2c875341..10399bed655 100644
--- a/spec/services/groups/transfer_service_spec.rb
+++ b/spec/services/groups/transfer_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Groups::TransferService, :sidekiq_inline do
+RSpec.describe Groups::TransferService, :sidekiq_inline, feature_category: :subgroups do
shared_examples 'project namespace path is in sync with project path' do
it 'keeps project and project namespace attributes in sync' do
projects_with_project_namespace.each do |project|
@@ -364,7 +364,7 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
let(:new_parent_group) { create(:group, shared_runners_enabled: false, allow_descendants_override_disabled_shared_runners: true) }
it 'calls update service' do
- expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE }).and_call_original
+ expect(Groups::UpdateSharedRunnersService).to receive(:new).with(group, user, { shared_runners_setting: Namespace::SR_DISABLED_AND_OVERRIDABLE }).and_call_original
transfer_service.execute(new_parent_group)
end
@@ -1005,5 +1005,38 @@ RSpec.describe Groups::TransferService, :sidekiq_inline do
end
end
end
+
+ context 'with namespace_commit_emails concerns' do
+ let_it_be(:group, reload: true) { create(:group) }
+ let_it_be(:target) { create(:group) }
+
+ before do
+ group.add_owner(user)
+ target.add_owner(user)
+ end
+
+ context 'when origin is a root group' do
+ before do
+ create_list(:namespace_commit_email, 2, namespace: group)
+ end
+
+ it 'deletes all namespace_commit_emails' do
+ expect { transfer_service.execute(target) }
+ .to change { group.namespace_commit_emails.count }.by(-2)
+ end
+
+ it_behaves_like 'publishes a GroupTransferedEvent'
+ end
+
+ context 'when origin is not a root group' do
+ let(:group) { create(:group, parent: create(:group)) }
+
+ it 'does not attempt to delete namespace_commit_emails' do
+ expect(Users::NamespaceCommitEmail).not_to receive(:delete_for_namespace)
+
+ transfer_service.execute(target)
+ end
+ end
+ end
end
end
diff --git a/spec/services/groups/update_shared_runners_service_spec.rb b/spec/services/groups/update_shared_runners_service_spec.rb
index 98eccedeace..a29f73a71c2 100644
--- a/spec/services/groups/update_shared_runners_service_spec.rb
+++ b/spec/services/groups/update_shared_runners_service_spec.rb
@@ -114,13 +114,13 @@ RSpec.describe Groups::UpdateSharedRunnersService do
end
context 'allow descendants to override' do
- let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE } }
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_AND_OVERRIDABLE } }
context 'top level group' do
let_it_be(:group) { create(:group, :shared_runners_disabled) }
it 'receives correct method and succeeds' do
- expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_WITH_OVERRIDE)
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_AND_OVERRIDABLE)
expect(subject[:status]).to eq(:success)
end
@@ -135,6 +135,30 @@ RSpec.describe Groups::UpdateSharedRunnersService do
expect(subject[:message]).to eq('Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
end
end
+
+ context 'when using DISABLED_WITH_OVERRIDE (deprecated)' do
+ let(:params) { { shared_runners_setting: Namespace::SR_DISABLED_WITH_OVERRIDE } }
+
+ context 'top level group' do
+ let_it_be(:group) { create(:group, :shared_runners_disabled) }
+
+ it 'receives correct method and succeeds' do
+ expect(group).to receive(:update_shared_runners_setting!).with(Namespace::SR_DISABLED_WITH_OVERRIDE)
+
+ expect(subject[:status]).to eq(:success)
+ end
+ end
+
+ context 'when parent does not allow' do
+ let_it_be(:parent) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false) }
+ let_it_be(:group) { create(:group, :shared_runners_disabled, allow_descendants_override_disabled_shared_runners: false, parent: parent) }
+
+ it 'results error' do
+ expect(subject[:status]).to eq(:error)
+ expect(subject[:message]).to eq('Validation failed: Allow descendants override disabled shared runners cannot be enabled because parent group does not allow it')
+ end
+ end
+ end
end
end
end
diff --git a/spec/services/ide/schemas_config_service_spec.rb b/spec/services/ide/schemas_config_service_spec.rb
index 69ad9b5cbea..f277b8e9954 100644
--- a/spec/services/ide/schemas_config_service_spec.rb
+++ b/spec/services/ide/schemas_config_service_spec.rb
@@ -20,35 +20,21 @@ RSpec.describe Ide::SchemasConfigService do
subject { described_class.new(project, user, filename: filename).execute }
- context 'feature flag schema_linting is enabled', unless: Gitlab.ee? do
- before do
- stub_feature_flags(schema_linting: true)
- end
-
- context 'when no predefined schema exists for the given filename' do
- it 'returns an empty object' do
- is_expected.to include(
- status: :success,
- schema: {})
- end
- end
-
- context 'when a predefined schema exists for the given filename' do
- let(:filename) { '.gitlab-ci.yml' }
-
- it 'uses predefined schema matches' do
- expect(Gitlab::HTTP).to receive(:get).with('https://json.schemastore.org/gitlab-ci')
- expect(subject[:schema]['title']).to eq "Sample schema"
- end
- end
- end
-
- context 'feature flag schema_linting is disabled', unless: Gitlab.ee? do
+ context 'when no predefined schema exists for the given filename', unless: Gitlab.ee? do
it 'returns an empty object' do
is_expected.to include(
status: :success,
schema: {})
end
end
+
+ context 'when a predefined schema exists for the given filename' do
+ let(:filename) { '.gitlab-ci.yml' }
+
+ it 'uses predefined schema matches' do
+ expect(Gitlab::HTTP).to receive(:get).with('https://json.schemastore.org/gitlab-ci')
+ expect(subject[:schema]['title']).to eq "Sample schema"
+ end
+ end
end
end
diff --git a/spec/services/import/github/gists_import_service_spec.rb b/spec/services/import/github/gists_import_service_spec.rb
index c5d73e6479d..32d04a580da 100644
--- a/spec/services/import/github/gists_import_service_spec.rb
+++ b/spec/services/import/github/gists_import_service_spec.rb
@@ -2,16 +2,19 @@
require 'spec_helper'
-RSpec.describe Import::Github::GistsImportService, feature_category: :importer do
- subject(:import) { described_class.new(user, params) }
+RSpec.describe Import::Github::GistsImportService, feature_category: :importers do
+ subject(:import) { described_class.new(user, client, params) }
let_it_be(:user) { create(:user) }
let(:params) { { github_access_token: 'token' } }
let(:import_status) { instance_double('Gitlab::GithubGistsImport::Status') }
+ let(:client) { Gitlab::GithubImport::Client.new(params[:github_access_token]) }
+ let(:octokit_user) { { login: 'user_login' } }
describe '#execute', :aggregate_failures do
before do
allow(Gitlab::GithubGistsImport::Status).to receive(:new).and_return(import_status)
+ allow(client.octokit).to receive(:user).and_return(octokit_user)
end
context 'when import in progress' do
@@ -43,5 +46,24 @@ RSpec.describe Import::Github::GistsImportService, feature_category: :importer d
expect(import.execute).to eq({ status: :success })
end
end
+
+ context 'when user token is invalid' do
+ before do
+ allow(client.octokit).to receive(:user).and_raise(Octokit::Unauthorized)
+ allow(import_status).to receive(:started?).and_return(false)
+ end
+
+ let(:expected_result) do
+ {
+ http_status: 401,
+ message: 'Access denied to the GitHub account.',
+ status: :error
+ }
+ end
+
+ it 'returns 401 error' do
+ expect(import.execute).to eq(expected_result)
+ end
+ end
end
end
diff --git a/spec/services/import/github_service_spec.rb b/spec/services/import/github_service_spec.rb
index d1b372c5e87..293e247c140 100644
--- a/spec/services/import/github_service_spec.rb
+++ b/spec/services/import/github_service_spec.rb
@@ -7,22 +7,19 @@ RSpec.describe Import::GithubService do
let_it_be(:token) { 'complex-token' }
let_it_be(:access_params) { { github_access_token: 'github-complex-token' } }
let(:settings) { instance_double(Gitlab::GithubImport::Settings) }
+ let(:user_namespace_path) { user.namespace_path }
let(:optional_stages) { nil }
let(:params) do
{
repo_id: 123,
new_name: 'new_repo',
- target_namespace: 'root',
+ target_namespace: user_namespace_path,
optional_stages: optional_stages
}
end
subject(:github_importer) { described_class.new(client, user, params) }
- before do
- allow(subject).to receive(:authorized?).and_return(true)
- end
-
shared_examples 'handles errors' do |klass|
let(:client) { klass.new(token) }
let(:project_double) { instance_double(Project, persisted?: true) }
@@ -74,6 +71,7 @@ RSpec.describe Import::GithubService do
let(:repository_double) { { name: 'repository', size: 99 } }
before do
+ allow(subject).to receive(:authorized?).and_return(true)
expect(client).to receive(:repository).and_return(repository_double)
allow_next_instance_of(Gitlab::LegacyGithubImport::ProjectCreator) do |creator|
@@ -215,6 +213,38 @@ RSpec.describe Import::GithubService do
end
end
end
+
+ context 'when target_namespace is blank' do
+ before do
+ params[:target_namespace] = ''
+ end
+
+ it 'raises an exception' do
+ expect { subject.execute(access_params, :github) }.to raise_error(ArgumentError, 'Target namespace is required')
+ end
+ end
+
+ context 'when namespace to import repository into does not exist' do
+ before do
+ params[:target_namespace] = 'unknown_path'
+ end
+
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(not_existed_namespace_error)
+ end
+ end
+
+ context 'when user has no permissions to import repository into the specified namespace' do
+ let_it_be(:group) { create(:group) }
+
+ before do
+ params[:target_namespace] = group.full_path
+ end
+
+ it 'returns an error' do
+ expect(github_importer.execute(access_params, :github)).to include(taken_namespace_error)
+ end
+ end
end
context 'when remove_legacy_github_client feature flag is enabled' do
@@ -248,4 +278,20 @@ RSpec.describe Import::GithubService do
message: "Invalid URL: #{url}"
}
end
+
+ def not_existed_namespace_error
+ {
+ status: :error,
+ http_status: :unprocessable_entity,
+ message: 'Namespace or group to import repository into does not exist.'
+ }
+ end
+
+ def taken_namespace_error
+ {
+ status: :error,
+ http_status: :unprocessable_entity,
+ message: 'This namespace has already been taken. Choose a different one.'
+ }
+ end
end
diff --git a/spec/services/issue_links/create_service_spec.rb b/spec/services/issue_links/create_service_spec.rb
index 88e8470658d..0629b8b091b 100644
--- a/spec/services/issue_links/create_service_spec.rb
+++ b/spec/services/issue_links/create_service_spec.rb
@@ -9,7 +9,7 @@ RSpec.describe IssueLinks::CreateService do
let_it_be(:project) { create :project, namespace: namespace }
let_it_be(:issuable) { create :issue, project: project }
let_it_be(:issuable2) { create :issue, project: project }
- let_it_be(:guest_issuable) { create :issue }
+ let_it_be(:restricted_issuable) { create :issue }
let_it_be(:another_project) { create :project, namespace: project.namespace }
let_it_be(:issuable3) { create :issue, project: another_project }
let_it_be(:issuable_a) { create :issue, project: project }
@@ -23,7 +23,7 @@ RSpec.describe IssueLinks::CreateService do
before do
project.add_developer(user)
- guest_issuable.project.add_guest(user)
+ restricted_issuable.project.add_guest(user)
another_project.add_developer(user)
end
diff --git a/spec/services/issues/close_service_spec.rb b/spec/services/issues/close_service_spec.rb
index e6ad755f911..ef24d1e940e 100644
--- a/spec/services/issues/close_service_spec.rb
+++ b/spec/services/issues/close_service_spec.rb
@@ -140,7 +140,7 @@ RSpec.describe Issues::CloseService do
end
context 'when the escalation status did not change to resolved' do
- let(:escalation_status) { instance_double('IncidentManagement::IssuableEscalationStatus', resolve: false) }
+ let(:escalation_status) { instance_double('IncidentManagement::IssuableEscalationStatus', resolve: false, status_name: 'acknowledged') }
before do
allow(issue).to receive(:incident_management_issuable_escalation_status).and_return(escalation_status)
diff --git a/spec/services/issues/export_csv_service_spec.rb b/spec/services/issues/export_csv_service_spec.rb
index 66d017464bf..d3359447fd8 100644
--- a/spec/services/issues/export_csv_service_spec.rb
+++ b/spec/services/issues/export_csv_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Issues::ExportCsvService do
+RSpec.describe Issues::ExportCsvService, :with_license do
let_it_be(:user) { create(:user) }
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, :public, group: group) }
diff --git a/spec/services/issues/update_service_spec.rb b/spec/services/issues/update_service_spec.rb
index 70fc6ffc38f..930766c520b 100644
--- a/spec/services/issues/update_service_spec.rb
+++ b/spec/services/issues/update_service_spec.rb
@@ -1168,6 +1168,7 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'triggers webhooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks)
update_issue(opts)
end
@@ -1281,6 +1282,7 @@ RSpec.describe Issues::UpdateService, :mailer do
it 'triggers webhooks' do
expect(project).to receive(:execute_hooks).with(an_instance_of(Hash), :issue_hooks)
expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :issue_hooks)
+ expect(project).to receive(:execute_integrations).with(an_instance_of(Hash), :incident_hooks)
update_issue(opts)
end
diff --git a/spec/services/lfs/file_transformer_spec.rb b/spec/services/lfs/file_transformer_spec.rb
index e87c80b4c6c..9d4d8851c2d 100644
--- a/spec/services/lfs/file_transformer_spec.rb
+++ b/spec/services/lfs/file_transformer_spec.rb
@@ -2,7 +2,7 @@
require "spec_helper"
-RSpec.describe Lfs::FileTransformer do
+RSpec.describe Lfs::FileTransformer, feature_category: :git_lfs do
let(:project) { create(:project, :repository, :wiki_repo) }
let(:repository) { project.repository }
let(:file_content) { 'Test file content' }
@@ -13,6 +13,10 @@ RSpec.describe Lfs::FileTransformer do
describe '#new_file' do
context 'with lfs disabled' do
+ before do
+ allow(project).to receive(:lfs_enabled?).and_return(false)
+ end
+
it 'skips gitattributes check' do
expect(repository.raw).not_to receive(:blob_at)
@@ -98,6 +102,38 @@ RSpec.describe Lfs::FileTransformer do
expect(project.lfs_objects_projects.first.repository_type).to eq('design')
end
end
+
+ context 'when content type detection enabled' do
+ let(:detect_content_type) { true }
+
+ before do
+ allow(Gitlab::Utils::MimeType).to receive(:from_string).with(file_content).and_return(mime_type)
+ end
+
+ context 'when mime type detected' do
+ let(:mime_type) { 'image/tiff' }
+
+ it 'creates a file with custom content type' do
+ expect(CarrierWaveStringFile).to receive(:new_file).with({
+ file_content: file_content,
+ filename: anything,
+ content_type: mime_type
+ })
+
+ subject.new_file(file_path, file, detect_content_type: detect_content_type)
+ end
+ end
+
+ context 'when mime type not detected' do
+ let(:mime_type) { nil }
+
+ it 'creates a file with default content type' do
+ expect(CarrierWaveStringFile).to receive(:new).with(file_content)
+
+ subject.new_file(file_path, file, detect_content_type: detect_content_type)
+ end
+ end
+ end
end
context "when doesn't use LFS" do
diff --git a/spec/services/members/destroy_service_spec.rb b/spec/services/members/destroy_service_spec.rb
index d0f009f1321..d8a8d5881bf 100644
--- a/spec/services/members/destroy_service_spec.rb
+++ b/spec/services/members/destroy_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Members::DestroyService do
+RSpec.describe Members::DestroyService, feature_category: :subgroups do
let(:current_user) { create(:user) }
let(:member_user) { create(:user) }
let(:group) { create(:group, :public) }
@@ -100,32 +100,104 @@ RSpec.describe Members::DestroyService do
end
context 'With ExclusiveLeaseHelpers' do
+ include ExclusiveLeaseHelpers
+
+ let(:lock_key) do
+ "delete_members:#{member_to_delete.source.class}:#{member_to_delete.source.id}"
+ end
+
+ let(:timeout) { 1.minute }
let(:service_object) { described_class.new(current_user) }
- let!(:member) { group_project.add_developer(member_user) }
- subject(:destroy_member) { service_object.execute(member, **opts) }
+ subject(:destroy_member) { service_object.execute(member_to_delete, **opts) }
- before do
- group_project.add_maintainer(current_user)
+ shared_examples_for 'deletes the member without using a lock' do
+ it 'does not try to perform the delete within a lock' do
+ # `UpdateHighestRole` concern also uses locks to peform work
+ # whenever a Member is committed, so that needs to be accounted for.
+ lock_key_for_update_highest_role = "update_highest_role:#{member_to_delete.user_id}"
+ expect(Gitlab::ExclusiveLease)
+ .to receive(:new).with(lock_key_for_update_highest_role, timeout: 10.minutes.to_i).and_call_original
+
+ # We do not use any locks for member deletion process.
+ expect(Gitlab::ExclusiveLease)
+ .not_to receive(:new).with(lock_key, timeout: timeout)
- allow(service_object).to receive(:in_lock) do |_, &block|
- block.call if lock_obtained
+ destroy_member
+ end
+
+ it 'destroys the membership' do
+ expect { destroy_member }.to change { entity.members.count }.by(-1)
end
end
- context 'when lock is obtained' do
- let(:lock_obtained) { true }
+ context 'for group members' do
+ before do
+ group.add_owner(current_user)
+ end
+
+ context 'deleting group owners' do
+ let!(:member_to_delete) { group.add_owner(member_user) }
- it 'destroys the membership' do
- expect { destroy_member }.to change { group_project.members.count }.by(-1)
+ context 'locking to avoid race conditions' do
+ it 'tries to perform the delete within a lock' do
+ expect_to_obtain_exclusive_lease(lock_key, timeout: timeout)
+
+ destroy_member
+ end
+
+ context 'based on status of the lock' do
+ context 'when lock is obtained' do
+ it 'destroys the membership' do
+ expect_to_obtain_exclusive_lease(lock_key, timeout: timeout)
+
+ expect { destroy_member }.to change { group.members.count }.by(-1)
+ end
+ end
+
+ context 'when the lock cannot be obtained' do
+ before do
+ stub_exclusive_lease_taken(lock_key, timeout: timeout)
+ end
+
+ it 'raises error' do
+ expect { destroy_member }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
+ end
+ end
+ end
+
+ context 'deleting group members that are not owners' do
+ let!(:member_to_delete) { group.add_developer(member_user) }
+
+ it_behaves_like 'deletes the member without using a lock' do
+ let(:entity) { group }
+ end
end
end
- context 'when the lock can not be obtained' do
- let(:lock_obtained) { false }
+ context 'for project members' do
+ before do
+ group_project.add_owner(current_user)
+ end
+
+ context 'deleting project owners' do
+ context 'deleting project owners' do
+ let!(:member_to_delete) { entity.add_owner(member_user) }
- it 'does not destroy the membership' do
- expect { destroy_member }.not_to change { group_project.members.count }
+ it_behaves_like 'deletes the member without using a lock' do
+ let(:entity) { group_project }
+ end
+ end
+ end
+
+ context 'deleting project memebrs that are not owners' do
+ let!(:member_to_delete) { group_project.add_developer(member_user) }
+
+ it_behaves_like 'deletes the member without using a lock' do
+ let(:entity) { group_project }
+ end
end
end
end
diff --git a/spec/services/members/update_service_spec.rb b/spec/services/members/update_service_spec.rb
index eb8fae03c39..8a7f9a84c77 100644
--- a/spec/services/members/update_service_spec.rb
+++ b/spec/services/members/update_service_spec.rb
@@ -14,10 +14,7 @@ RSpec.describe Members::UpdateService do
let(:members) { source.members_and_requesters.where(user_id: member_users).to_a }
let(:update_service) { described_class.new(current_user, params) }
let(:params) { { access_level: access_level } }
- let(:updated_members) do
- result = subject
- Array.wrap(result[:members] || result[:member])
- end
+ let(:updated_members) { subject[:members] }
before do
member_users.first.tap do |member_user|
@@ -255,40 +252,6 @@ RSpec.describe Members::UpdateService do
end
end
- context 'when :bulk_update_membership_roles feature flag is disabled' do
- let(:member) { source.members_and_requesters.find_by!(user_id: member_user1.id) }
- let(:members) { [member] }
-
- subject { update_service.execute(member, permission: permission) }
-
- shared_examples 'a service returning an error' do
- before do
- allow(member).to receive(:save) do
- member.errors.add(:user_id)
- member.errors.add(:access_level)
- end
- .and_return(false)
- end
-
- it_behaves_like 'returns error status when params are invalid'
-
- it 'returns the error' do
- response = subject
-
- expect(response[:status]).to eq(:error)
- expect(response[:message]).to eq('User is invalid and Access level is invalid')
- end
- end
-
- before do
- stub_feature_flags(bulk_update_membership_roles: false)
- end
-
- it_behaves_like 'current user cannot update the given members'
- it_behaves_like 'updating a project'
- it_behaves_like 'updating a group'
- end
-
subject { update_service.execute(members, permission: permission) }
shared_examples 'a service returning an error' do
@@ -326,15 +289,14 @@ RSpec.describe Members::UpdateService do
it_behaves_like 'updating a group'
context 'with a single member' do
- let(:member) { create(:group_member, group: group) }
- let(:members) { member }
+ let(:members) { create(:group_member, group: group) }
before do
group.add_owner(current_user)
end
it 'returns the correct response' do
- expect(subject[:member]).to eq(member)
+ expect(subject[:members]).to contain_exactly(members)
end
end
diff --git a/spec/services/merge_requests/base_service_spec.rb b/spec/services/merge_requests/base_service_spec.rb
index 6eeba3029ae..bd907ba6015 100644
--- a/spec/services/merge_requests/base_service_spec.rb
+++ b/spec/services/merge_requests/base_service_spec.rb
@@ -2,7 +2,15 @@
require 'spec_helper'
-RSpec.describe MergeRequests::BaseService do
+module MergeRequests
+ class ExampleService < MergeRequests::BaseService
+ def execute(merge_request, async: false, allow_duplicate: false)
+ create_pipeline_for(merge_request, current_user, async: async, allow_duplicate: allow_duplicate)
+ end
+ end
+end
+
+RSpec.describe MergeRequests::BaseService, feature_category: :code_review_workflow do
include ProjectForksHelper
let_it_be(:project) { create(:project, :repository) }
@@ -57,4 +65,62 @@ RSpec.describe MergeRequests::BaseService do
it_behaves_like 'does not enqueue Jira sync worker'
end
end
+
+ describe `#create_pipeline_for` do
+ let_it_be(:merge_request) { create(:merge_request) }
+
+ subject { MergeRequests::ExampleService.new(project: project, current_user: project.first_owner, params: params) }
+
+ context 'async: false' do
+ it 'creates a pipeline directly' do
+ expect(MergeRequests::CreatePipelineService)
+ .to receive(:new)
+ .with(hash_including(project: project, current_user: project.first_owner, params: { allow_duplicate: false }))
+ .and_call_original
+ expect(MergeRequests::CreatePipelineWorker).not_to receive(:perform_async)
+
+ subject.execute(merge_request, async: false)
+ end
+
+ context 'allow_duplicate: true' do
+ it 'passes :allow_duplicate as true' do
+ expect(MergeRequests::CreatePipelineService)
+ .to receive(:new)
+ .with(hash_including(project: project, current_user: project.first_owner, params: { allow_duplicate: true }))
+ .and_call_original
+ expect(MergeRequests::CreatePipelineWorker).not_to receive(:perform_async)
+
+ subject.execute(merge_request, async: false, allow_duplicate: true)
+ end
+ end
+ end
+
+ context 'async: true' do
+ it 'enques a CreatePipelineWorker' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, project.first_owner.id, merge_request.id, { "allow_duplicate" => false })
+ .and_call_original
+
+ Sidekiq::Testing.fake! do
+ expect { subject.execute(merge_request, async: true) }.to change(MergeRequests::CreatePipelineWorker.jobs, :size).by(1)
+ end
+ end
+
+ context 'allow_duplicate: true' do
+ it 'passes :allow_duplicate as true' do
+ expect(MergeRequests::CreatePipelineService).not_to receive(:new)
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, project.first_owner.id, merge_request.id, { "allow_duplicate" => true })
+ .and_call_original
+
+ Sidekiq::Testing.fake! do
+ expect { subject.execute(merge_request, async: true, allow_duplicate: true) }.to change(MergeRequests::CreatePipelineWorker.jobs, :size).by(1)
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/services/merge_requests/rebase_service_spec.rb b/spec/services/merge_requests/rebase_service_spec.rb
index e7aa6e74246..316f20d8276 100644
--- a/spec/services/merge_requests/rebase_service_spec.rb
+++ b/spec/services/merge_requests/rebase_service_spec.rb
@@ -24,6 +24,45 @@ RSpec.describe MergeRequests::RebaseService do
project.add_maintainer(user)
end
+ describe '#validate' do
+ subject { service.validate(merge_request) }
+
+ it { is_expected.to be_success }
+
+ context 'when source branch does not exist' do
+ before do
+ merge_request.update!(source_branch: 'does_not_exist')
+ end
+
+ it 'returns an error' do
+ is_expected.to be_error
+ expect(subject.message).to eq('Source branch does not exist')
+ end
+ end
+
+ context 'when user has no permissions to rebase' do
+ before do
+ project.add_guest(user)
+ end
+
+ it 'returns an error' do
+ is_expected.to be_error
+ expect(subject.message).to eq('Cannot push to source branch')
+ end
+ end
+
+ context 'when branch is protected' do
+ before do
+ create(:protected_branch, project: project, name: merge_request.source_branch, allow_force_push: false)
+ end
+
+ it 'returns an error' do
+ is_expected.to be_error
+ expect(subject.message).to eq('Source branch is protected from force push')
+ end
+ end
+ end
+
describe '#execute' do
shared_examples 'sequence of failure and success' do
it 'properly clears the error message' do
diff --git a/spec/services/merge_requests/refresh_service_spec.rb b/spec/services/merge_requests/refresh_service_spec.rb
index 5174ceaaa82..0814942b6b7 100644
--- a/spec/services/merge_requests/refresh_service_spec.rb
+++ b/spec/services/merge_requests/refresh_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::RefreshService do
+RSpec.describe MergeRequests::RefreshService, feature_category: :code_review_workflow do
include ProjectForksHelper
include UserHelpers
@@ -138,7 +138,7 @@ RSpec.describe MergeRequests::RefreshService do
refresh_service.execute(@oldrev, @newrev, 'refs/heads/master')
expect { refresh_service.execute(@oldrev, @newrev, 'refs/heads/master') }.to change {
- refresh_service.instance_variable_get("@source_merge_requests").first.merge_request_diff
+ refresh_service.instance_variable_get(:@source_merge_requests).first.merge_request_diff
}
end
@@ -799,7 +799,7 @@ RSpec.describe MergeRequests::RefreshService do
it 'does not mark as draft based on commits that do not belong to an MR' do
allow(refresh_service).to receive(:find_new_commits)
- refresh_service.instance_variable_set("@commits",
+ refresh_service.instance_variable_set(:@commits,
[
double(
id: 'aaaaaaa',
diff --git a/spec/services/merge_requests/update_service_spec.rb b/spec/services/merge_requests/update_service_spec.rb
index da78f86c7c8..344d93fc5ca 100644
--- a/spec/services/merge_requests/update_service_spec.rb
+++ b/spec/services/merge_requests/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::UpdateService, :mailer do
+RSpec.describe MergeRequests::UpdateService, :mailer, feature_category: :code_review_workflow do
include ProjectForksHelper
let(:group) { create(:group, :public) }
@@ -479,6 +479,16 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
+ shared_examples_for "creates a new pipeline" do
+ it "creates a new pipeline" do
+ expect(MergeRequests::CreatePipelineWorker)
+ .to receive(:perform_async)
+ .with(project.id, user.id, merge_request.id, { "allow_duplicate" => true })
+
+ update_merge_request(target_branch: new_target_branch)
+ end
+ end
+
shared_examples_for 'correct merge behavior' do
let(:opts) do
{
@@ -784,7 +794,7 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
end
end
- context 'when the target branch change' do
+ context 'when the target branch changes' do
it 'calls MergeRequests::ResolveTodosService#async_execute' do
expect_next_instance_of(MergeRequests::ResolveTodosService, merge_request, user) do |service|
expect(service).to receive(:async_execute)
@@ -799,6 +809,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
update_merge_request({ target_branch: "target" })
end
+
+ it_behaves_like "creates a new pipeline" do
+ let(:new_target_branch) { "target" }
+ end
end
context 'when auto merge is enabled and target branch changed' do
@@ -813,6 +827,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
update_merge_request({ target_branch: 'target' })
end
+
+ it_behaves_like "creates a new pipeline" do
+ let(:new_target_branch) { "target" }
+ end
end
end
@@ -1237,6 +1255,10 @@ RSpec.describe MergeRequests::UpdateService, :mailer do
expect { update_merge_request(target_branch: 'master', target_branch_was_deleted: true) }
.to change { merge_request.reload.target_branch }.from('mr-a').to('master')
end
+
+ it_behaves_like "creates a new pipeline" do
+ let(:new_target_branch) { "target" }
+ end
end
it_behaves_like 'issuable record that supports quick actions' do
diff --git a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
index ff3b295d185..e3c05178025 100644
--- a/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
+++ b/spec/services/ml/experiment_tracking/candidate_repository_spec.rb
@@ -31,17 +31,37 @@ RSpec.describe ::Ml::ExperimentTracking::CandidateRepository do
end
describe '#create!' do
- subject { repository.create!(experiment, 1234, [{ key: 'hello', value: 'world' }]) }
+ let(:tags) { [{ key: 'hello', value: 'world' }] }
+ let(:name) { 'some_candidate' }
+
+ subject { repository.create!(experiment, 1234, tags, name) }
it 'creates the candidate' do
expect(subject.start_time).to eq(1234)
expect(subject.iid).not_to be_nil
expect(subject.end_time).to be_nil
+ expect(subject.name).to eq('some_candidate')
end
it 'creates with tag' do
expect(subject.metadata.length).to eq(1)
end
+
+ context 'when name is passed as tag' do
+ let(:tags) { [{ key: 'mlflow.runName', value: 'blah' }] }
+
+ it 'ignores if name is not nil' do
+ expect(subject.name).to eq('some_candidate')
+ end
+
+ context 'when name is nil' do
+ let(:name) { nil }
+
+ it 'sets the mlflow.runName as candidate name' do
+ expect(subject.name).to eq('blah')
+ end
+ end
+ end
end
describe '#update' do
diff --git a/spec/services/notes/create_service_spec.rb b/spec/services/notes/create_service_spec.rb
index 2f1c5a5b0f3..22606cc2461 100644
--- a/spec/services/notes/create_service_spec.rb
+++ b/spec/services/notes/create_service_spec.rb
@@ -18,6 +18,10 @@ RSpec.describe Notes::CreateService do
end
context "valid params" do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { note }
+ end
+
it 'returns a valid note' do
expect(note).to be_valid
end
@@ -230,6 +234,10 @@ RSpec.describe Notes::CreateService do
confidential: false)
end
+ it_behaves_like 'triggers GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) { described_class.new(project_with_repo, user, new_opts).execute }
+ end
+
it 'note is associated with a note diff file' do
MergeRequests::MergeToRefService.new(project: merge_request.project, current_user: merge_request.author).execute(merge_request)
@@ -248,6 +256,16 @@ RSpec.describe Notes::CreateService do
end
end
+ context 'when skip_merge_status_trigger execute option is set to true' do
+ it_behaves_like 'does not trigger GraphQL subscription mergeRequestMergeStatusUpdated' do
+ let(:action) do
+ described_class
+ .new(project_with_repo, user, new_opts)
+ .execute(skip_merge_status_trigger: true)
+ end
+ end
+ end
+
it 'does not track ipynb note usage data' do
expect(::Gitlab::UsageDataCounters::IpynbDiffActivityCounter).not_to receive(:note_created)
diff --git a/spec/services/notification_service_spec.rb b/spec/services/notification_service_spec.rb
index 1ca14cd430b..1ad9234c939 100644
--- a/spec/services/notification_service_spec.rb
+++ b/spec/services/notification_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe NotificationService, :mailer do
+RSpec.describe NotificationService, :mailer, feature_category: :team_planning do
include EmailSpec::Matchers
include ExternalAuthorizationServiceHelpers
include NotificationHelpers
@@ -337,11 +337,12 @@ RSpec.describe NotificationService, :mailer do
describe '#access_token_expired' do
let_it_be(:user) { create(:user) }
+ let_it_be(:pat) { create(:personal_access_token, user: user) }
- subject { notification.access_token_expired(user) }
+ subject { notification.access_token_expired(user, pat.name) }
it 'sends email to the token owner' do
- expect { subject }.to have_enqueued_email(user, mail: "access_token_expired_email")
+ expect { subject }.to have_enqueued_email(user, pat.name, mail: "access_token_expired_email")
end
context 'when user is not allowed to receive notifications' do
@@ -350,7 +351,7 @@ RSpec.describe NotificationService, :mailer do
end
it 'does not send email to the token owner' do
- expect { subject }.not_to have_enqueued_email(user, mail: "access_token_expired_email")
+ expect { subject }.not_to have_enqueued_email(user, pat.name, mail: "access_token_expired_email")
end
end
end
diff --git a/spec/services/packages/conan/search_service_spec.rb b/spec/services/packages/conan/search_service_spec.rb
index 55dcdfe646d..9e8be164d8c 100644
--- a/spec/services/packages/conan/search_service_spec.rb
+++ b/spec/services/packages/conan/search_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Packages::Conan::SearchService do
+RSpec.describe Packages::Conan::SearchService, feature_category: :package_registry do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :public) }
diff --git a/spec/services/pages_domains/create_service_spec.rb b/spec/services/pages_domains/create_service_spec.rb
index cac941fb134..4dd9bd8f3bb 100644
--- a/spec/services/pages_domains/create_service_spec.rb
+++ b/spec/services/pages_domains/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::PagesDomains::CreateService do
+RSpec.describe ::PagesDomains::CreateService, feature_category: :pages do
let_it_be(:user) { create(:user) }
let_it_be(:project) { create(:project, :in_subgroup) }
@@ -37,6 +37,7 @@ RSpec.describe ::PagesDomains::CreateService do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: kind_of(Numeric),
domain: domain
)
diff --git a/spec/services/pages_domains/delete_service_spec.rb b/spec/services/pages_domains/delete_service_spec.rb
index 5f98fe3c7f7..43d59961637 100644
--- a/spec/services/pages_domains/delete_service_spec.rb
+++ b/spec/services/pages_domains/delete_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe ::PagesDomains::DeleteService do
+RSpec.describe ::PagesDomains::DeleteService, feature_category: :pages do
let_it_be(:user) { create(:user) }
let_it_be(:pages_domain) { create(:pages_domain, :with_project) }
@@ -39,6 +39,7 @@ RSpec.describe ::PagesDomains::DeleteService do
project_id: pages_domain.project.id,
namespace_id: pages_domain.project.namespace.id,
root_namespace_id: pages_domain.project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
end
diff --git a/spec/services/pages_domains/retry_acme_order_service_spec.rb b/spec/services/pages_domains/retry_acme_order_service_spec.rb
index 3152e05f2f1..4860d57475b 100644
--- a/spec/services/pages_domains/retry_acme_order_service_spec.rb
+++ b/spec/services/pages_domains/retry_acme_order_service_spec.rb
@@ -18,6 +18,7 @@ RSpec.describe PagesDomains::RetryAcmeOrderService, feature_category: :pages do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: domain.id,
domain: domain.domain
)
end
@@ -31,6 +32,7 @@ RSpec.describe PagesDomains::RetryAcmeOrderService, feature_category: :pages do
project_id: project.id,
namespace_id: project.namespace.id,
root_namespace_id: project.root_namespace.id,
+ domain_id: domain.id,
domain: domain.domain
)
end
diff --git a/spec/services/pages_domains/update_service_spec.rb b/spec/services/pages_domains/update_service_spec.rb
index f6558f56422..c317a2c68f6 100644
--- a/spec/services/pages_domains/update_service_spec.rb
+++ b/spec/services/pages_domains/update_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PagesDomains::UpdateService do
+RSpec.describe PagesDomains::UpdateService, feature_category: :pages do
let_it_be(:user) { create(:user) }
let_it_be(:pages_domain) { create(:pages_domain, :with_project) }
@@ -40,6 +40,7 @@ RSpec.describe PagesDomains::UpdateService do
project_id: pages_domain.project.id,
namespace_id: pages_domain.project.namespace.id,
root_namespace_id: pages_domain.project.root_namespace.id,
+ domain_id: pages_domain.id,
domain: pages_domain.domain
)
end
diff --git a/spec/services/personal_access_tokens/revoke_service_spec.rb b/spec/services/personal_access_tokens/revoke_service_spec.rb
index 562d6017405..a9b4df9749f 100644
--- a/spec/services/personal_access_tokens/revoke_service_spec.rb
+++ b/spec/services/personal_access_tokens/revoke_service_spec.rb
@@ -71,26 +71,30 @@ RSpec.describe PersonalAccessTokens::RevokeService do
let_it_be(:current_user) { nil }
context 'when source is valid' do
- let_it_be(:source) { 'secret_detection' }
+ let_it_be(:source) { :secret_detection }
let_it_be(:token) { create(:personal_access_token) }
it_behaves_like 'a successfully revoked token' do
- let(:revoked_by) { 'secret_detection' }
+ let(:revoked_by) { :secret_detection }
end
end
context 'when source is invalid' do
- let_it_be(:source) { 'external_request' }
+ let_it_be(:source) { :external_request }
let_it_be(:token) { create(:personal_access_token) }
- it_behaves_like 'an unsuccessfully revoked token'
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error ArgumentError
+ end
end
context 'when source is missing' do
let_it_be(:source) { nil }
let_it_be(:token) { create(:personal_access_token) }
- it_behaves_like 'an unsuccessfully revoked token'
+ it 'raises ArgumentError' do
+ expect { subject }.to raise_error ArgumentError
+ end
end
end
end
diff --git a/spec/services/projects/create_service_spec.rb b/spec/services/projects/create_service_spec.rb
index f42ab198a04..f85a8eda7ee 100644
--- a/spec/services/projects/create_service_spec.rb
+++ b/spec/services/projects/create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::CreateService, '#execute' do
+RSpec.describe Projects::CreateService, '#execute', feature_category: :projects do
include ExternalAuthorizationServiceHelpers
let(:user) { create :user }
@@ -995,6 +995,7 @@ RSpec.describe Projects::CreateService, '#execute' do
where(:shared_runners_setting, :desired_config_for_new_project, :expected_result_for_project) do
Namespace::SR_ENABLED | nil | true
Namespace::SR_DISABLED_WITH_OVERRIDE | nil | false
+ Namespace::SR_DISABLED_AND_OVERRIDABLE | nil | false
Namespace::SR_DISABLED_AND_UNOVERRIDABLE | nil | false
end
@@ -1017,6 +1018,8 @@ RSpec.describe Projects::CreateService, '#execute' do
Namespace::SR_ENABLED | false | false
Namespace::SR_DISABLED_WITH_OVERRIDE | false | false
Namespace::SR_DISABLED_WITH_OVERRIDE | true | true
+ Namespace::SR_DISABLED_AND_OVERRIDABLE | false | false
+ Namespace::SR_DISABLED_AND_OVERRIDABLE | true | true
Namespace::SR_DISABLED_AND_UNOVERRIDABLE | false | false
end
diff --git a/spec/services/projects/import_service_spec.rb b/spec/services/projects/import_service_spec.rb
index bb11b2e617e..38ab7b6e2ee 100644
--- a/spec/services/projects/import_service_spec.rb
+++ b/spec/services/projects/import_service_spec.rb
@@ -373,6 +373,28 @@ RSpec.describe Projects::ImportService do
expect(result[:status]).to eq(:success)
end
+
+ context 'when host resolves to an IPv6 address' do
+ before do
+ project.import_url = 'https://gitlab.com/gitlab-org/gitlab-development-kit'
+
+ allow(Gitlab::UrlBlocker).to receive(:validate!)
+ .with(project.import_url, ports: Project::VALID_IMPORT_PORTS, schemes: Project::VALID_IMPORT_PROTOCOLS, dns_rebind_protection: true)
+ .and_return([Addressable::URI.parse('https://[2606:4700:90:0:f22e:fbec:5bed:a9b9]/gitlab-org/gitlab-development-kit'), 'gitlab.com'])
+ end
+
+ it 'imports repository with url and additional resolved bare IPv6 address' do
+ expect(project.repository).to receive(:import_repository).with('https://gitlab.com/gitlab-org/gitlab-development-kit', resolved_address: '2606:4700:90:0:f22e:fbec:5bed:a9b9').and_return(true)
+
+ expect_next_instance_of(Projects::LfsPointers::LfsImportService) do |service|
+ expect(service).to receive(:execute).and_return(status: :success)
+ end
+
+ result = subject.execute
+
+ expect(result[:status]).to eq(:success)
+ end
+ end
end
context 'when http url is provided' do
diff --git a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
index a3cff345f68..62330441d2f 100644
--- a/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
+++ b/spec/services/projects/refresh_build_artifacts_size_statistics_service_spec.rb
@@ -6,7 +6,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
let(:service) { described_class.new }
describe '#execute' do
- let_it_be(:project) { create(:project) }
+ let_it_be(:project, reload: true) { create(:project) }
let_it_be(:artifact_1) { create(:ci_job_artifact, project: project, size: 1, created_at: 14.days.ago) }
let_it_be(:artifact_2) { create(:ci_job_artifact, project: project, size: 2, created_at: 13.days.ago) }
@@ -29,6 +29,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
let(:now) { Time.zone.now }
let(:statistics) { project.statistics }
+ let(:increment) { Gitlab::Counters::Increment.new(amount: 30) }
around do |example|
freeze_time { example.run }
@@ -36,17 +37,19 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
before do
stub_const("#{described_class}::BATCH_SIZE", 3)
+ stub_const("#{described_class}::REFRESH_INTERVAL_SECONDS", 0)
stats = create(:project_statistics, project: project, build_artifacts_size: 120)
- stats.increment_counter(:build_artifacts_size, 30)
+ stats.increment_counter(:build_artifacts_size, increment)
end
it 'resets the build artifacts size stats' do
- expect { service.execute }.to change { project.statistics.reload.build_artifacts_size }.to(0)
+ expect { service.execute }.to change { statistics.reload.build_artifacts_size }.from(120).to(0)
end
- it 'increments the counter attribute by the total size of the current batch of artifacts' do
- expect { service.execute }.to change { statistics.counter(:build_artifacts_size).get }.to(3)
+ it 'resets the buffered counter' do
+ expect { service.execute }
+ .to change { Gitlab::Counters::BufferedCounter.new(statistics, :build_artifacts_size).get }.to(0)
end
it 'updates the last_job_artifact_id to the ID of the last artifact from the batch' do
@@ -56,7 +59,7 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
it 'updates the last_job_artifact_id to the ID of the last artifact from the project' do
expect { service.execute }
.to change { refresh.reload.last_job_artifact_id_on_refresh_start.to_i }
- .to(project.job_artifacts.last.id)
+ .to(project.job_artifacts.last.id)
end
it 'requeues the refresh job' do
@@ -106,9 +109,10 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsService, :clean_gitl
)
end
- it 'deletes the refresh record' do
+ it 'schedules the refresh to be finalized' do
service.execute
- expect(Projects::BuildArtifactsSizeRefresh.where(id: refresh.id)).not_to exist
+
+ expect(refresh.reload.finalizing?).to be(true)
end
end
end
diff --git a/spec/services/projects/transfer_service_spec.rb b/spec/services/projects/transfer_service_spec.rb
index 4d75786a4c3..5171836f917 100644
--- a/spec/services/projects/transfer_service_spec.rb
+++ b/spec/services/projects/transfer_service_spec.rb
@@ -535,8 +535,8 @@ RSpec.describe Projects::TransferService do
where(:project_shared_runners_enabled, :shared_runners_setting, :expected_shared_runners_enabled) do
true | :disabled_and_unoverridable | false
false | :disabled_and_unoverridable | false
- true | :disabled_with_override | true
- false | :disabled_with_override | false
+ true | :disabled_and_overridable | true
+ false | :disabled_and_overridable | false
true | :shared_runners_enabled | true
false | :shared_runners_enabled | false
end
diff --git a/spec/services/repositories/changelog_service_spec.rb b/spec/services/repositories/changelog_service_spec.rb
index 47ebd55022f..42b586637ad 100644
--- a/spec/services/repositories/changelog_service_spec.rb
+++ b/spec/services/repositories/changelog_service_spec.rb
@@ -79,7 +79,7 @@ RSpec.describe Repositories::ChangelogService do
recorder = ActiveRecord::QueryRecorder.new { service.execute(commit_to_changelog: commit_to_changelog) }
changelog = project.repository.blob_at('master', 'CHANGELOG.md')&.data
- expect(recorder.count).to eq(10)
+ expect(recorder.count).to eq(12)
expect(changelog).to include('Title 1', 'Title 2')
end
diff --git a/spec/services/search_service_spec.rb b/spec/services/search_service_spec.rb
index 90e80a45515..d11fc377d83 100644
--- a/spec/services/search_service_spec.rb
+++ b/spec/services/search_service_spec.rb
@@ -471,4 +471,32 @@ RSpec.describe SearchService, feature_category: :global_search do
end
end
end
+
+ describe '.global_search_enabled_for_scope?' do
+ using RSpec::Parameterized::TableSyntax
+ let(:search) { 'foobar' }
+
+ where(:scope, :feature_flag, :enabled, :expected) do
+ 'blobs' | :global_search_code_tab | false | false
+ 'blobs' | :global_search_code_tab | true | true
+ 'commits' | :global_search_commits_tab | false | false
+ 'commits' | :global_search_commits_tab | true | true
+ 'issues' | :global_search_issues_tab | false | false
+ 'issues' | :global_search_issues_tab | true | true
+ 'merge_requests' | :global_search_merge_requests_tab | false | false
+ 'merge_requests' | :global_search_merge_requests_tab | true | true
+ 'wiki_blobs' | :global_search_wiki_tab | false | false
+ 'wiki_blobs' | :global_search_wiki_tab | true | true
+ 'users' | :global_search_users_tab | false | false
+ 'users' | :global_search_users_tab | true | true
+ 'random' | :random | nil | true
+ end
+
+ with_them do
+ it 'returns false when feature_flag is not enabled and returns true when feature_flag is enabled' do
+ stub_feature_flags(feature_flag => enabled)
+ expect(subject.global_search_enabled_for_scope?).to eq expected
+ end
+ end
+ end
end
diff --git a/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
new file mode 100644
index 00000000000..719a2cf24e9
--- /dev/null
+++ b/spec/services/security/ci_configuration/dependency_scanning_create_service_spec.rb
@@ -0,0 +1,20 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Security::CiConfiguration::DependencyScanningCreateService, :snowplow,
+ feature_category: :dependency_scanning do
+ subject(:result) { described_class.new(project, user).execute }
+
+ let(:branch_name) { 'set-dependency-scanning-config-1' }
+
+ let(:snowplow_event) do
+ {
+ category: 'Security::CiConfiguration::DependencyScanningCreateService',
+ action: 'create',
+ label: ''
+ }
+ end
+
+ include_examples 'services security ci configuration create service', true
+end
diff --git a/spec/services/security/ci_configuration/sast_create_service_spec.rb b/spec/services/security/ci_configuration/sast_create_service_spec.rb
index c7e732dc79a..1e6dc367146 100644
--- a/spec/services/security/ci_configuration/sast_create_service_spec.rb
+++ b/spec/services/security/ci_configuration/sast_create_service_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow do
+RSpec.describe Security::CiConfiguration::SastCreateService, :snowplow, feature_category: :sast do
subject(:result) { described_class.new(project, user, params).execute }
let(:branch_name) { 'set-sast-config-1' }
diff --git a/spec/services/service_ping/submit_service_ping_service_spec.rb b/spec/services/service_ping/submit_service_ping_service_spec.rb
index 37231307156..b02f1e84d25 100644
--- a/spec/services/service_ping/submit_service_ping_service_spec.rb
+++ b/spec/services/service_ping/submit_service_ping_service_spec.rb
@@ -305,13 +305,20 @@ RSpec.describe ServicePing::SubmitService do
stub_response(body: with_conv_index_params)
end
- let(:metric_double) { instance_double(Gitlab::Usage::ServicePing::LegacyMetricTimingDecorator, duration: 123) }
+ let(:metric_double) do
+ instance_double(Gitlab::Usage::ServicePing::LegacyMetricMetadataDecorator, duration: 123, error: nil)
+ end
+
+ let(:metric_double_with_error) do
+ instance_double(Gitlab::Usage::ServicePing::LegacyMetricMetadataDecorator, duration: 123, error: 'Error')
+ end
+
let(:usage_data) do
{
uuid: 'uuid',
metric_a: metric_double,
metric_group: {
- metric_b: metric_double
+ metric_b: metric_double_with_error
},
metric_without_timing: "value",
recorded_at: Time.current
@@ -324,7 +331,7 @@ RSpec.describe ServicePing::SubmitService do
uuid: 'uuid',
metrics: [
{ name: 'metric_a', time_elapsed: 123 },
- { name: 'metric_group.metric_b', time_elapsed: 123 }
+ { name: 'metric_group.metric_b', time_elapsed: 123, error: 'Error' }
]
}
}
diff --git a/spec/services/service_response_spec.rb b/spec/services/service_response_spec.rb
index 2d70979dd3a..58dd2fd4c5e 100644
--- a/spec/services/service_response_spec.rb
+++ b/spec/services/service_response_spec.rb
@@ -178,4 +178,40 @@ RSpec.describe ServiceResponse do
end
end
end
+
+ describe '#log_and_raise_exception' do
+ context 'when successful' do
+ let(:response) { described_class.success }
+
+ it 'returns self' do
+ expect(response.log_and_raise_exception).to be response
+ end
+ end
+
+ context 'when an error' do
+ let(:response) { described_class.error(message: 'bang') }
+
+ it 'logs' do
+ expect(::Gitlab::ErrorTracking).to receive(:log_and_raise_exception)
+ .with(StandardError.new('bang'), {})
+
+ response.log_and_raise_exception
+ end
+
+ it 'allows specification of error class' do
+ error = Class.new(StandardError)
+ expect(::Gitlab::ErrorTracking).to receive(:log_and_raise_exception)
+ .with(error.new('bang'), {})
+
+ response.log_and_raise_exception(as: error)
+ end
+
+ it 'allows extra data for tracking' do
+ expect(::Gitlab::ErrorTracking).to receive(:log_and_raise_exception)
+ .with(StandardError.new('bang'), { foo: 1, bar: 2 })
+
+ response.log_and_raise_exception(foo: 1, bar: 2)
+ end
+ end
+ end
end
diff --git a/spec/services/test_hooks/project_service_spec.rb b/spec/services/test_hooks/project_service_spec.rb
index d97a6f15270..13f863dbbdb 100644
--- a/spec/services/test_hooks/project_service_spec.rb
+++ b/spec/services/test_hooks/project_service_spec.rb
@@ -26,7 +26,7 @@ RSpec.describe TestHooks::ProjectService do
context 'hook with not implemented test' do
it 'returns error message' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Testing not available for this hook' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Testing not available for this hook')
end
end
@@ -60,7 +60,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has notes.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has notes.')
end
it 'executes hook' do
@@ -79,7 +79,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has issues.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has issues.')
end
it 'executes hook' do
@@ -112,7 +112,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has merge requests.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has merge requests.')
end
it 'executes hook' do
@@ -131,7 +131,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has CI jobs.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has CI jobs.')
end
it 'executes hook' do
@@ -150,7 +150,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has CI pipelines.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has CI pipelines.')
end
it 'executes hook' do
@@ -172,12 +172,12 @@ RSpec.describe TestHooks::ProjectService do
allow(project).to receive(:wiki_enabled?).and_return(false)
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the wiki is enabled and has pages.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the wiki is enabled and has pages.')
end
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the wiki is enabled and has pages.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the wiki is enabled and has pages.')
end
it 'executes hook' do
@@ -196,7 +196,7 @@ RSpec.describe TestHooks::ProjectService do
it 'returns error message if not enough data' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure the project has releases.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure the project has releases.')
end
it 'executes hook' do
diff --git a/spec/services/test_hooks/system_service_spec.rb b/spec/services/test_hooks/system_service_spec.rb
index 66a1218d123..e94ea4669c6 100644
--- a/spec/services/test_hooks/system_service_spec.rb
+++ b/spec/services/test_hooks/system_service_spec.rb
@@ -21,7 +21,7 @@ RSpec.describe TestHooks::SystemService do
it 'returns error message' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Testing not available for this hook' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Testing not available for this hook')
end
end
@@ -70,7 +70,7 @@ RSpec.describe TestHooks::SystemService do
it 'returns error message if the user does not have any repository with a merge request' do
expect(hook).not_to receive(:execute)
- expect(service.execute).to include({ status: :error, message: 'Ensure one of your projects has merge requests.' })
+ expect(service.execute).to have_attributes(status: :error, message: 'Ensure one of your projects has merge requests.')
end
it 'executes hook' do
diff --git a/spec/services/todo_service_spec.rb b/spec/services/todo_service_spec.rb
index c4ed34a693e..596ca9495ff 100644
--- a/spec/services/todo_service_spec.rb
+++ b/spec/services/todo_service_spec.rb
@@ -1259,92 +1259,85 @@ RSpec.describe TodoService do
end
end
- describe '#create_member_access_request' do
- context 'snowplow event tracking' do
- it 'does not track snowplow event when todos are for access request for project', :snowplow do
- user = create(:user)
- project = create(:project)
- requester = create(:project_member, project: project, user: assignee)
- project.add_owner(user)
-
- expect_no_snowplow_event
+ describe '#create_member_access_request_todos' do
+ let_it_be(:group) { create(:group, :public) }
+ let_it_be(:project) { create(:project, :public, group: group) }
+
+ shared_examples 'member access request is raised' do
+ context 'when the source has more than 10 owners' do
+ it 'creates todos for 10 recently active source owners' do
+ users = create_list(:user, 12, :with_sign_ins)
+ users.each do |user|
+ source.add_owner(user)
+ end
+ ten_most_recently_active_source_owners = users.sort_by(&:last_sign_in_at).last(10)
+ excluded_source_owners = users - ten_most_recently_active_source_owners
- service.create_member_access_request(requester)
- end
- end
+ service.create_member_access_request_todos(requester1)
- context 'when the group has more than 10 owners' do
- it 'creates todos for 10 recently active group owners' do
- group = create(:group, :public)
+ ten_most_recently_active_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
+ end
- users = create_list(:user, 12, :with_sign_ins)
- users.each do |user|
- group.add_owner(user)
+ excluded_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 0
+ end
end
- ten_most_recently_active_group_owners = users.sort_by(&:last_sign_in_at).last(10)
- excluded_group_owners = users - ten_most_recently_active_group_owners
-
- requester = create(:group_member, group: group, user: assignee)
+ end
- service.create_member_access_request(requester)
+ context 'when total owners are less than 10' do
+ it 'creates todos for all source owners' do
+ users = create_list(:user, 4, :with_sign_ins)
+ users.map do |user|
+ source.add_owner(user)
+ end
- ten_most_recently_active_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
- end
+ service.create_member_access_request_todos(requester1)
- excluded_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 0
+ users.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
+ end
end
end
- end
-
- context 'when total owners are less than 10' do
- it 'creates todos for all group owners' do
- group = create(:group, :public)
- users = create_list(:user, 4, :with_sign_ins)
- users.map do |user|
- group.add_owner(user)
- end
+ context 'when multiple access requests are raised' do
+ it 'creates todos for 10 recently active source owners for multiple requests' do
+ users = create_list(:user, 12, :with_sign_ins)
+ users.each do |user|
+ source.add_owner(user)
+ end
+ ten_most_recently_active_source_owners = users.sort_by(&:last_sign_in_at).last(10)
+ excluded_source_owners = users - ten_most_recently_active_source_owners
- requester = create(:group_member, user: assignee, group: group)
- requester.requested_at = Time.now.utc
- requester.save!
+ service.create_member_access_request_todos(requester1)
+ service.create_member_access_request_todos(requester2)
- service.create_member_access_request(requester)
+ ten_most_recently_active_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 1
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester2.user).count).to eq 1
+ end
- users.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
+ excluded_source_owners.each do |owner|
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester1.user).count).to eq 0
+ expect(Todo.where(user: owner, target: source, action: Todo::MEMBER_ACCESS_REQUESTED, author: requester2.user).count).to eq 0
+ end
end
end
end
- context 'when multiple access requests are raised' do
- it 'creates todos for 10 recently active group owners for multiple requests' do
- group = create(:group, :public)
-
- users = create_list(:user, 12, :with_sign_ins)
- users.each do |user|
- group.add_owner(user)
- end
- ten_most_recently_active_group_owners = users.sort_by(&:last_sign_in_at).last(10)
- excluded_group_owners = users - ten_most_recently_active_group_owners
-
- requester1 = create(:group_member, group: group, user: assignee)
- requester2 = create(:group_member, group: group, user: non_member)
-
- service.create_member_access_request(requester1)
- service.create_member_access_request(requester2)
-
- ten_most_recently_active_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 1
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: non_member).count).to eq 1
- end
+ context 'when request is raised for group' do
+ it_behaves_like 'member access request is raised' do
+ let_it_be(:source) { create(:group, :public) }
+ let_it_be(:requester1) { create(:group_member, :access_request, group: source, user: assignee) }
+ let_it_be(:requester2) { create(:group_member, :access_request, group: source, user: non_member) }
+ end
+ end
- excluded_group_owners.each do |owner|
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: assignee).count).to eq 0
- expect(Todo.where(user: owner, target: group, action: Todo::MEMBER_ACCESS_REQUESTED, author: non_member).count).to eq 0
- end
+ context 'when request is raised for project' do
+ it_behaves_like 'member access request is raised' do
+ let_it_be(:source) { create(:project, :public) }
+ let_it_be(:requester1) { create(:project_member, :access_request, project: source, user: assignee) }
+ let_it_be(:requester2) { create(:project_member, :access_request, project: source, user: non_member) }
end
end
end
diff --git a/spec/services/users/block_service_spec.rb b/spec/services/users/block_service_spec.rb
index 45a5b1e5100..7ff9a887f38 100644
--- a/spec/services/users/block_service_spec.rb
+++ b/spec/services/users/block_service_spec.rb
@@ -3,7 +3,7 @@
require 'spec_helper'
RSpec.describe Users::BlockService do
- let(:current_user) { create(:admin) }
+ let_it_be(:current_user) { create(:admin) }
subject(:service) { described_class.new(current_user) }
@@ -18,6 +18,15 @@ RSpec.describe Users::BlockService do
it "change the user's state" do
expect { operation }.to change { user.state }.to('blocked')
end
+
+ it 'saves a custom attribute', :aggregate_failures, :freeze_time, feature_category: :insider_threat do
+ operation
+
+ custom_attribute = user.custom_attributes.last
+
+ expect(custom_attribute.key).to eq(UserCustomAttribute::BLOCKED_BY)
+ expect(custom_attribute.value).to eq("#{current_user.username}/#{current_user.id}+#{Time.current}")
+ end
end
context 'when failed' do
diff --git a/spec/services/users/signup_service_spec.rb b/spec/services/users/signup_service_spec.rb
index 7169401ab34..ef532e01d0b 100644
--- a/spec/services/users/signup_service_spec.rb
+++ b/spec/services/users/signup_service_spec.rb
@@ -10,7 +10,7 @@ RSpec.describe Users::SignupService do
it 'updates the name attribute' do
result = update_user(user, name: 'New Name')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.name).to eq('New Name')
end
@@ -18,8 +18,8 @@ RSpec.describe Users::SignupService do
result = update_user(user, name: '')
expect(user.reload.name).not_to be_blank
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to include("Name can't be blank")
+ expect(result.success?).to be(false)
+ expect(result.message).to include("Name can't be blank")
end
end
@@ -27,7 +27,7 @@ RSpec.describe Users::SignupService do
it 'updates the role attribute' do
result = update_user(user, role: 'development_team_lead')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.role).to eq('development_team_lead')
end
@@ -35,8 +35,8 @@ RSpec.describe Users::SignupService do
result = update_user(user, role: '')
expect(user.reload.role).not_to be_blank
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Role can't be blank")
+ expect(result.success?).to be(false)
+ expect(result.message).to eq("Role can't be blank")
end
end
@@ -44,7 +44,7 @@ RSpec.describe Users::SignupService do
it 'updates the setup_for_company attribute' do
result = update_user(user, setup_for_company: 'false')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.setup_for_company).to be(false)
end
@@ -57,8 +57,8 @@ RSpec.describe Users::SignupService do
result = update_user(user, setup_for_company: '')
expect(user.reload.setup_for_company).not_to be_blank
- expect(result[:status]).to eq(:error)
- expect(result[:message]).to eq("Setup for company can't be blank")
+ expect(result.success?).to be(false)
+ expect(result.message).to eq("Setup for company can't be blank")
end
end
@@ -66,7 +66,7 @@ RSpec.describe Users::SignupService do
it 'returns success when setup_for_company is blank' do
result = update_user(user, setup_for_company: '')
- expect(result).to eq(status: :success)
+ expect(result.success?).to be(true)
expect(user.reload.setup_for_company).to be(nil)
end
end
diff --git a/spec/services/users/unblock_service_spec.rb b/spec/services/users/unblock_service_spec.rb
new file mode 100644
index 00000000000..25ee99427ab
--- /dev/null
+++ b/spec/services/users/unblock_service_spec.rb
@@ -0,0 +1,45 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Users::UnblockService do
+ let_it_be(:current_user) { create(:admin) }
+
+ subject(:service) { described_class.new(current_user) }
+
+ describe '#execute' do
+ subject(:operation) { service.execute(user) }
+
+ context 'when successful' do
+ let(:user) { create(:user, :blocked) }
+
+ it { expect(operation.success?).to eq(true) }
+
+ it "change the user's state" do
+ expect { operation }.to change { user.active? }.to(true)
+ end
+
+ it 'saves a custom attribute', :aggregate_failures, :freeze_time, feature_category: :insider_threat do
+ operation
+
+ custom_attribute = user.custom_attributes.last
+
+ expect(custom_attribute.key).to eq(UserCustomAttribute::UNBLOCKED_BY)
+ expect(custom_attribute.value).to eq("#{current_user.username}/#{current_user.id}+#{Time.current}")
+ end
+ end
+
+ context 'when failed' do
+ let(:user) { create(:user) }
+
+ it 'returns error result', :aggregate_failures do
+ expect(operation.error?).to eq(true)
+ expect(operation[:message]).to include(/State cannot transition/)
+ end
+
+ it "does not change the user's state" do
+ expect { operation }.not_to change { user.state }
+ end
+ end
+ end
+end
diff --git a/spec/services/work_items/create_service_spec.rb b/spec/services/work_items/create_service_spec.rb
index a952486ee64..049c90f20b0 100644
--- a/spec/services/work_items/create_service_spec.rb
+++ b/spec/services/work_items/create_service_spec.rb
@@ -193,7 +193,7 @@ RSpec.describe WorkItems::CreateService do
end
it_behaves_like 'fails creating work item and returns errors' do
- let(:error_message) { 'No matching task found. Make sure that you are adding a valid task ID.' }
+ let(:error_message) { 'No matching work item found. Make sure that you are adding a valid work item ID.' }
end
end
end
diff --git a/spec/services/work_items/parent_links/create_service_spec.rb b/spec/services/work_items/parent_links/create_service_spec.rb
index 2f2e830845a..5884847eac3 100644
--- a/spec/services/work_items/parent_links/create_service_spec.rb
+++ b/spec/services/work_items/parent_links/create_service_spec.rb
@@ -30,7 +30,7 @@ RSpec.describe WorkItems::ParentLinks::CreateService, feature_category: :portfol
shared_examples 'returns not found error' do
it 'returns error' do
- error = "No matching #{issuable_type} found. Make sure that you are adding a valid #{issuable_type} ID."
+ error = "No matching work item found. Make sure that you are adding a valid work item ID."
is_expected.to eq(service_error(error))
end
diff --git a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
index 5a5bb8a1674..6285b43311d 100644
--- a/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
+++ b/spec/services/work_items/widgets/hierarchy_service/update_service_spec.rb
@@ -12,7 +12,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate
let_it_be(:existing_link) { create(:parent_link, work_item: child_work_item, work_item_parent: work_item) }
let(:widget) { work_item.widgets.find { |widget| widget.is_a?(WorkItems::Widgets::Hierarchy) } }
- let(:not_found_error) { 'No matching task found. Make sure that you are adding a valid task ID.' }
+ let(:not_found_error) { 'No matching work item found. Make sure that you are adding a valid work item ID.' }
shared_examples 'raises a WidgetError' do
it { expect { subject }.to raise_error(described_class::WidgetError, message) }
@@ -70,7 +70,7 @@ RSpec.describe WorkItems::Widgets::HierarchyService::UpdateService, feature_cate
let(:params) { { children: [child_work_item] } }
it_behaves_like 'raises a WidgetError' do
- let(:message) { 'Task(s) already assigned' }
+ let(:message) { 'Work item(s) already assigned' }
end
end
diff --git a/spec/simplecov_env.rb b/spec/simplecov_env.rb
index 70bd01091ba..07a23021ef5 100644
--- a/spec/simplecov_env.rb
+++ b/spec/simplecov_env.rb
@@ -57,6 +57,7 @@ module SimpleCovEnv
add_filter '/vendor/ruby/'
add_filter '/bin/'
add_filter 'db/fixtures/development/' # Matches EE files as well
+ add_filter %r|db/migrate/\d{14}_init_schema\.rb\z|
add_group 'Channels', 'app/channels' # Matches EE files as well
add_group 'Controllers', 'app/controllers' # Matches EE files as well
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index 23083203cfe..f33c6e64b0c 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -109,12 +109,12 @@ RSpec.configure do |config|
# Do not overwrite migration if it's already set
unless metadata.key?(:migration)
- metadata[:migration] = true if metadata[:level] == :migration
+ metadata[:migration] = true if metadata[:level] == :migration || metadata[:level] == :background_migration
end
# Do not overwrite schema if it's already set
unless metadata.key?(:schema)
- metadata[:schema] = :latest if quality_level.background_migration?(location)
+ metadata[:schema] = :latest if metadata[:level] == :background_migration
end
# Do not overwrite type if it's already set
diff --git a/spec/support/caching.rb b/spec/support/caching.rb
index 11e4f534971..b18223523db 100644
--- a/spec/support/caching.rb
+++ b/spec/support/caching.rb
@@ -37,8 +37,8 @@ RSpec.configure do |config|
end
config.around(:each, :use_sql_query_cache) do |example|
- ActiveRecord::Base.cache do
- example.run
- end
+ base_models = Gitlab::Database.database_base_models_with_gitlab_shared.values
+ inner_proc = proc { example.run }
+ base_models.inject(inner_proc) { |proc, base_model| proc { base_model.cache { proc.call } } }.call
end
end
diff --git a/spec/support/capybara.rb b/spec/support/capybara.rb
index aea853d1c23..fe9bff827dc 100644
--- a/spec/support/capybara.rb
+++ b/spec/support/capybara.rb
@@ -32,6 +32,8 @@ CAPYBARA_WINDOW_SIZE = [1366, 768].freeze
SCREENSHOT_FILENAME_LENGTH = ENV['CI'] || ENV['CI_SERVER'] ? 255 : 99
+@blackhole_tcp_server = nil
+
# Run Workhorse on the given host and port, proxying to Puma on a UNIX socket,
# for a closer-to-production experience
Capybara.register_server :puma_via_workhorse do |app, port, host, **options|
@@ -83,6 +85,17 @@ Capybara.register_driver :chrome do |app|
# Chrome 75 defaults to W3C mode which doesn't allow console log access
options.add_option(:w3c, false)
+ # Set up a proxy server to block all external traffic.
+ @blackhole_tcp_server = TCPServer.new(0)
+ Thread.new do
+ loop do
+ Thread.start(@blackhole_tcp_server.accept, &:close)
+ end
+ end
+
+ options.add_argument("--proxy-server=http://127.0.0.1:#{@blackhole_tcp_server.addr[1]}")
+ options.add_argument("--proxy-bypass-list=127.0.0.1,localhost,#{Gitlab.config.gitlab.host}")
+
Capybara::Selenium::Driver.new(
app,
browser: :chrome,
diff --git a/spec/support/helpers/api_helpers.rb b/spec/support/helpers/api_helpers.rb
index 62bb9576695..a9d7c6af959 100644
--- a/spec/support/helpers/api_helpers.rb
+++ b/spec/support/helpers/api_helpers.rb
@@ -19,7 +19,7 @@ module ApiHelpers
# => "/api/v2/issues?foo=bar&private_token=..."
#
# Returns the relative path to the requested API resource
- def api(path, user = nil, version: API::API.version, personal_access_token: nil, oauth_access_token: nil, job_token: nil, access_token: nil)
+ def api(path, user = nil, version: API::API.version, personal_access_token: nil, oauth_access_token: nil, job_token: nil, access_token: nil, admin_mode: false)
full_path = "/api/#{version}#{path}"
if oauth_access_token
@@ -31,7 +31,12 @@ module ApiHelpers
elsif access_token
query_string = "access_token=#{access_token.token}"
elsif user
- personal_access_token = create(:personal_access_token, user: user)
+ personal_access_token = if admin_mode && user.admin?
+ create(:personal_access_token, :admin_mode, user: user)
+ else
+ create(:personal_access_token, user: user)
+ end
+
query_string = "private_token=#{personal_access_token.token}"
end
diff --git a/spec/support/helpers/cycle_analytics_helpers.rb b/spec/support/helpers/cycle_analytics_helpers.rb
index 6d41d7b7414..632f3ea28ee 100644
--- a/spec/support/helpers/cycle_analytics_helpers.rb
+++ b/spec/support/helpers/cycle_analytics_helpers.rb
@@ -92,7 +92,7 @@ module CycleAnalyticsHelpers
end
def create_value_stream_group_aggregation(group)
- aggregation = Analytics::CycleAnalytics::Aggregation.safe_create_for_group(group)
+ aggregation = Analytics::CycleAnalytics::Aggregation.safe_create_for_namespace(group)
Analytics::CycleAnalytics::AggregatorService.new(aggregation: aggregation).execute
end
diff --git a/spec/support/helpers/database/database_helpers.rb b/spec/support/helpers/database/database_helpers.rb
index f3b2a2a6147..ecc42041e93 100644
--- a/spec/support/helpers/database/database_helpers.rb
+++ b/spec/support/helpers/database/database_helpers.rb
@@ -4,9 +4,7 @@ module Database
module DatabaseHelpers
# In order to directly work with views using factories,
# we can swapout the view for a table of identical structure.
- def swapout_view_for_table(view, connection: nil)
- connection ||= ActiveRecord::Base.connection
-
+ def swapout_view_for_table(view, connection:)
connection.execute(<<~SQL.squish)
CREATE TABLE #{view}_copy (LIKE #{view});
DROP VIEW #{view};
@@ -28,21 +26,20 @@ module Database
# with_statement_timeout(0.1) do
# model.select('pg_sleep(0.11)')
# end
- def with_statement_timeout(timeout)
+ def with_statement_timeout(timeout, connection:)
# Force a positive value and a minimum of 1ms for very small values.
timeout = (timeout * 1000).abs.ceil
raise ArgumentError, 'Using a timeout of `0` means to disable statement timeout.' if timeout == 0
- previous_timeout = ActiveRecord::Base.connection
- .exec_query('SHOW statement_timeout')[0].fetch('statement_timeout')
+ previous_timeout = connection.select_value('SHOW statement_timeout')
- set_statement_timeout("#{timeout}ms")
+ connection.execute(format(%(SET LOCAL statement_timeout = '%s'), timeout))
yield
ensure
begin
- set_statement_timeout(previous_timeout)
+ connection.execute(format(%(SET LOCAL statement_timeout = '%s'), previous_timeout))
rescue ActiveRecord::StatementInvalid
# After a transaction was canceled/aborted due to e.g. a statement
# timeout commands are ignored and will raise in PG::InFailedSqlTransaction.
@@ -50,22 +47,5 @@ module Database
# for the currrent transaction which will be closed anyway.
end
end
-
- # Set statement timeout for the current transaction.
- #
- # Note, that it does not restore the previous statement timeout.
- # Use `with_statement_timeout` instead.
- #
- # @param timeout - Statement timeout in seconds
- #
- # Example:
- #
- # set_statement_timeout(0.1)
- # model.select('pg_sleep(0.11)')
- def set_statement_timeout(timeout)
- ActiveRecord::Base.connection.execute(
- format(%(SET LOCAL statement_timeout = '%s'), timeout)
- )
- end
end
end
diff --git a/spec/support/helpers/database/table_schema_helpers.rb b/spec/support/helpers/database/table_schema_helpers.rb
index 472eaa45b4b..815c37e00e5 100644
--- a/spec/support/helpers/database/table_schema_helpers.rb
+++ b/spec/support/helpers/database/table_schema_helpers.rb
@@ -3,7 +3,9 @@
module Database
module TableSchemaHelpers
def connection
- ActiveRecord::Base.connection
+ # We use ActiveRecord::Base.connection here because this is mainly used for database migrations
+ # where we override the connection on ActiveRecord::Base.connection
+ ActiveRecord::Base.connection # rubocop:disable Database/MultipleDatabases
end
def expect_table_to_be_replaced(original_table:, replacement_table:, archived_table:)
diff --git a/spec/support/helpers/features/members_helpers.rb b/spec/support/helpers/features/members_helpers.rb
index bdadcb8af43..2d3f0902a3c 100644
--- a/spec/support/helpers/features/members_helpers.rb
+++ b/spec/support/helpers/features/members_helpers.rb
@@ -56,6 +56,22 @@ module Spec
click_button 'Search'
end
end
+
+ def user_action_dropdown
+ '[data-testid="user-action-dropdown"]'
+ end
+
+ def show_actions
+ within user_action_dropdown do
+ find('button').click
+ end
+ end
+
+ def show_actions_for_username(user)
+ within find_username_row(user) do
+ show_actions
+ end
+ end
end
end
end
diff --git a/spec/support/helpers/features/web_ide_spec_helpers.rb b/spec/support/helpers/features/web_ide_spec_helpers.rb
index 551749a43de..4793c9479fe 100644
--- a/spec/support/helpers/features/web_ide_spec_helpers.rb
+++ b/spec/support/helpers/features/web_ide_spec_helpers.rb
@@ -13,14 +13,18 @@
module WebIdeSpecHelpers
include Spec::Support::Helpers::Features::SourceEditorSpecHelpers
+ # Open the IDE from anywhere by first visiting the given project's page
def ide_visit(project)
visit project_path(project)
- wait_for_requests
+ ide_visit_from_link
+ end
- click_link('Web IDE')
+ # Open the IDE from the current page by clicking the Web IDE link
+ def ide_visit_from_link(link_sel = 'Web IDE')
+ new_tab = window_opened_by { click_link(link_sel) }
- wait_for_requests
+ switch_to_window new_tab
end
def ide_tree_body
diff --git a/spec/support/helpers/listbox_helpers.rb b/spec/support/helpers/listbox_helpers.rb
new file mode 100644
index 00000000000..5fcd05f31fb
--- /dev/null
+++ b/spec/support/helpers/listbox_helpers.rb
@@ -0,0 +1,24 @@
+# frozen_string_literal: true
+
+module ListboxHelpers
+ def select_from_listbox(text, from:, exact_item_text: false)
+ click_button from
+ select_listbox_item(text, exact_text: exact_item_text)
+ end
+
+ def select_listbox_item(text, exact_text: false)
+ find('.gl-listbox-item[role="option"]', text: text, exact_text: exact_text).click
+ end
+
+ def expect_listbox_item(text)
+ expect(page).to have_css('.gl-listbox-item[role="option"]', text: text)
+ end
+
+ def expect_no_listbox_item(text)
+ expect(page).not_to have_css('.gl-listbox-item[role="option"]', text: text)
+ end
+
+ def expect_listbox_items(items)
+ expect(find_all('.gl-listbox-item[role="option"]').map(&:text)).to eq(items)
+ end
+end
diff --git a/spec/support/helpers/listbox_input_helper.rb b/spec/support/helpers/listbox_input_helper.rb
deleted file mode 100644
index ca7fbac5daa..00000000000
--- a/spec/support/helpers/listbox_input_helper.rb
+++ /dev/null
@@ -1,18 +0,0 @@
-# frozen_string_literal: true
-
-module ListboxInputHelper
- include WaitForRequests
-
- def listbox_input(value, from:)
- open_listbox_input(from) do
- find('[role="option"]', text: value).click
- end
- end
-
- def open_listbox_input(selector)
- page.within(selector) do
- page.find('button[aria-haspopup="listbox"]').click
- yield
- end
- end
-end
diff --git a/spec/support/helpers/login_helpers.rb b/spec/support/helpers/login_helpers.rb
index 44237b821c3..5fde80e6dc9 100644
--- a/spec/support/helpers/login_helpers.rb
+++ b/spec/support/helpers/login_helpers.rb
@@ -101,6 +101,8 @@ module LoginHelpers
fill_in "user_password", with: (password || user.password)
check 'user_remember_me' if remember
+ wait_for_all_requests
+
find('[data-testid="sign-in-button"]:enabled').click
if two_factor_auth
diff --git a/spec/support/helpers/migrations_helpers.rb b/spec/support/helpers/migrations_helpers.rb
index e1d28a807e3..6fc5904fc83 100644
--- a/spec/support/helpers/migrations_helpers.rb
+++ b/spec/support/helpers/migrations_helpers.rb
@@ -104,7 +104,7 @@ module MigrationsHelpers
# We stub this way because we can't stub on
# `current_application_settings` due to `method_missing` is
# depending on current_application_settings...
- allow(ActiveRecord::Base.connection)
+ allow(Gitlab::Database::Migration::V1_0::MigrationRecord.connection)
.to receive(:active?)
.and_return(false)
allow(Gitlab::Runtime)
@@ -158,10 +158,10 @@ module MigrationsHelpers
end
def migrate!
- open_transactions = ActiveRecord::Base.connection.open_transactions
+ open_transactions = Gitlab::Database::Migration::V1_0::MigrationRecord.connection.open_transactions
allow_next_instance_of(described_class) do |migration|
allow(migration).to receive(:transaction_open?) do
- ActiveRecord::Base.connection.open_transactions > open_transactions
+ Gitlab::Database::Migration::V1_0::MigrationRecord.connection.open_transactions > open_transactions
end
end
diff --git a/spec/support/helpers/navbar_structure_helper.rb b/spec/support/helpers/navbar_structure_helper.rb
index e1ed3ffacec..48c6e590e1b 100644
--- a/spec/support/helpers/navbar_structure_helper.rb
+++ b/spec/support/helpers/navbar_structure_helper.rb
@@ -91,9 +91,8 @@ module NavbarStructureHelper
new_nav_item: {
nav_item: _('Observability'),
nav_sub_items: [
- _('Dashboards'),
- _('Explore'),
- _('Manage Dashboards')
+ _('Explore telemetry data'),
+ _('Data sources')
]
}
)
diff --git a/spec/support/helpers/query_recorder.rb b/spec/support/helpers/query_recorder.rb
index dd124ed9c7f..5be9ba9ae1e 100644
--- a/spec/support/helpers/query_recorder.rb
+++ b/spec/support/helpers/query_recorder.rb
@@ -19,9 +19,7 @@ module ActiveRecord
def record(&block)
# force replacement of bind parameters to give tests the ability to check for ids
- ActiveRecord::Base.connection.unprepared_statement do
- ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
- end
+ ActiveSupport::Notifications.subscribed(method(:callback), 'sql.active_record', &block)
end
def show_backtrace(values, duration)
diff --git a/spec/support/helpers/usage_data_helpers.rb b/spec/support/helpers/usage_data_helpers.rb
index 78ceaf297a8..438f0d129b9 100644
--- a/spec/support/helpers/usage_data_helpers.rb
+++ b/spec/support/helpers/usage_data_helpers.rb
@@ -116,8 +116,9 @@ module UsageDataHelpers
).freeze
def stub_usage_data_connections
- allow(ActiveRecord::Base.connection).to receive(:transaction_open?).and_return(false)
- allow(::Ci::ApplicationRecord.connection).to receive(:transaction_open?).and_return(false) if ::Ci::ApplicationRecord.connection_class?
+ Gitlab::Database.database_base_models_with_gitlab_shared.each_value do |base_model|
+ allow(base_model.connection).to receive(:transaction_open?).and_return(false)
+ end
allow(Gitlab::Prometheus::Internal).to receive(:prometheus_enabled?).and_return(false)
end
diff --git a/spec/support/matchers/be_boolean.rb b/spec/support/matchers/be_boolean.rb
new file mode 100644
index 00000000000..b8c2c385504
--- /dev/null
+++ b/spec/support/matchers/be_boolean.rb
@@ -0,0 +1,10 @@
+# frozen_string_literal: true
+
+# Assert that this value is a boolean, i.e. true or false
+#
+# ```
+# expect(value).to be_boolean
+# ```
+RSpec::Matchers.define :be_boolean do
+ match { |value| value.in? [true, false] }
+end
diff --git a/spec/support/matchers/exceed_query_limit.rb b/spec/support/matchers/exceed_query_limit.rb
index a5a017828b3..4b08c13945c 100644
--- a/spec/support/matchers/exceed_query_limit.rb
+++ b/spec/support/matchers/exceed_query_limit.rb
@@ -380,3 +380,32 @@ RSpec::Matchers.define :exceed_query_limit do |expected|
failure_message
end
end
+
+RSpec::Matchers.define :match_query_count do |expected|
+ supports_block_expectations
+
+ include ExceedQueryLimitHelpers
+
+ def verify_count(&block)
+ @subject_block = block
+ actual_count == maximum
+ end
+
+ def failure_message
+ threshold_message = threshold > 0 ? " (+#{threshold})" : ''
+ counts = "#{expected_count}#{threshold_message}"
+ "Expected exactly #{counts} queries, got #{actual_count}:\n\n#{log_message}"
+ end
+
+ def skip_cached
+ false
+ end
+
+ match do |block|
+ verify_count(&block)
+ end
+
+ failure_message_when_negated do |actual|
+ failure_message
+ end
+end
diff --git a/spec/support/redis/redis_helpers.rb b/spec/support/redis/redis_helpers.rb
index 34ac69236ee..2c5ceb2f09e 100644
--- a/spec/support/redis/redis_helpers.rb
+++ b/spec/support/redis/redis_helpers.rb
@@ -6,11 +6,4 @@ module RedisHelpers
instance_class.with(&:flushdb)
end
end
-
- # Usage: reset cached instance config
- def redis_clear_raw_config!(instance_class)
- instance_class.remove_instance_variable(:@_raw_config)
- rescue NameError
- # raised if @_raw_config was not set; ignore
- end
end
diff --git a/spec/support/redis/redis_new_instance_shared_examples.rb b/spec/support/redis/redis_new_instance_shared_examples.rb
index 943fe0f11ba..0f2de78b2cb 100644
--- a/spec/support/redis/redis_new_instance_shared_examples.rb
+++ b/spec/support/redis/redis_new_instance_shared_examples.rb
@@ -3,27 +3,22 @@
require 'spec_helper'
RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_class|
+ include TmpdirHelper
+
let(:instance_specific_config_file) { "config/redis.#{name}.yml" }
let(:environment_config_file_name) { "GITLAB_REDIS_#{name.upcase}_CONFIG_FILE" }
let(:fallback_config_file) { nil }
+ let(:rails_root) { mktmpdir }
before do
- redis_clear_raw_config!(fallback_class)
-
allow(fallback_class).to receive(:config_file_name).and_return(fallback_config_file)
end
- after do
- redis_clear_raw_config!(fallback_class)
- end
-
it_behaves_like "redis_shared_examples"
describe '.config_file_name' do
subject { described_class.config_file_name }
- let(:rails_root) { Dir.mktmpdir('redis_shared_examples') }
-
before do
# Undo top-level stub of config_file_name because we are testing that method now.
allow(described_class).to receive(:config_file_name).and_call_original
@@ -32,10 +27,6 @@ RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_cl
FileUtils.mkdir_p(File.join(rails_root, 'config'))
end
- after do
- FileUtils.rm_rf(rails_root)
- end
-
context 'when there is only a resque.yml' do
before do
FileUtils.touch(File.join(rails_root, 'config/resque.yml'))
@@ -58,4 +49,49 @@ RSpec.shared_examples "redis_new_instance_shared_examples" do |name, fallback_cl
end
end
end
+
+ describe '#fetch_config' do
+ context 'when redis.yml exists' do
+ subject { described_class.new('test').send(:fetch_config) }
+
+ before do
+ allow(described_class).to receive(:config_file_name).and_call_original
+ allow(described_class).to receive(:redis_yml_path).and_call_original
+ allow(described_class).to receive(:rails_root).and_return(rails_root)
+ FileUtils.mkdir_p(File.join(rails_root, 'config'))
+ end
+
+ context 'when the fallback has a redis.yml entry' do
+ before do
+ File.write(File.join(rails_root, 'config/redis.yml'), {
+ 'test' => {
+ described_class.config_fallback.store_name.underscore => { 'fallback redis.yml' => 123 }
+ }
+ }.to_json)
+ end
+
+ it { expect(subject).to eq({ 'fallback redis.yml' => 123 }) }
+
+ context 'and an instance config file exists' do
+ before do
+ File.write(File.join(rails_root, instance_specific_config_file), {
+ 'test' => { 'instance specific file' => 456 }
+ }.to_json)
+ end
+
+ it { expect(subject).to eq({ 'instance specific file' => 456 }) }
+
+ context 'and the instance has a redis.yml entry' do
+ before do
+ File.write(File.join(rails_root, 'config/redis.yml'), {
+ 'test' => { name => { 'instance redis.yml' => 789 } }
+ }.to_json)
+ end
+
+ it { expect(subject).to eq({ 'instance redis.yml' => 789 }) }
+ end
+ end
+ end
+ end
+ end
end
diff --git a/spec/support/redis/redis_shared_examples.rb b/spec/support/redis/redis_shared_examples.rb
index 0368fd63357..43c118a362d 100644
--- a/spec/support/redis/redis_shared_examples.rb
+++ b/spec/support/redis/redis_shared_examples.rb
@@ -2,6 +2,7 @@
RSpec.shared_examples "redis_shared_examples" do
include StubENV
+ include TmpdirHelper
let(:test_redis_url) { "redis://redishost:#{redis_port}" }
let(:test_cluster_config) { { cluster: [{ host: "redis://redishost", port: redis_port }] } }
@@ -18,15 +19,11 @@ RSpec.shared_examples "redis_shared_examples" do
let(:sentinel_port) { 26379 }
let(:config_with_environment_variable_inside) { "spec/fixtures/config/redis_config_with_env.yml" }
let(:config_env_variable_url) { "TEST_GITLAB_REDIS_URL" }
- let(:rails_root) { Dir.mktmpdir('redis_shared_examples') }
+ let(:rails_root) { mktmpdir }
before do
allow(described_class).to receive(:config_file_name).and_return(Rails.root.join(config_file_name).to_s)
- redis_clear_raw_config!(described_class)
- end
-
- after do
- redis_clear_raw_config!(described_class)
+ allow(described_class).to receive(:redis_yml_path).and_return('/dev/null')
end
describe '.config_file_name' do
@@ -40,10 +37,6 @@ RSpec.shared_examples "redis_shared_examples" do
FileUtils.mkdir_p(File.join(rails_root, 'config'))
end
- after do
- FileUtils.rm_rf(rails_root)
- end
-
context 'when there is no config file anywhere' do
it { expect(subject).to be_nil }
@@ -250,26 +243,6 @@ RSpec.shared_examples "redis_shared_examples" do
end
end
- describe '._raw_config' do
- subject { described_class._raw_config }
-
- let(:config_file_name) { '/var/empty/doesnotexist' }
-
- it 'is frozen' do
- expect(subject).to be_frozen
- end
-
- it 'returns false when the file does not exist' do
- expect(subject).to eq(false)
- end
-
- it "returns false when the filename can't be determined" do
- expect(described_class).to receive(:config_file_name).and_return(nil)
-
- expect(subject).to eq(false)
- end
- end
-
describe '.with' do
let(:config_file_name) { config_old_format_socket }
@@ -313,10 +286,6 @@ RSpec.shared_examples "redis_shared_examples" do
allow(described_class).to receive(:rails_root).and_return(rails_root)
end
- after do
- FileUtils.rm_rf(rails_root)
- end
-
it 'can run an empty block' do
expect { described_class.with { nil } }.not_to raise_error
end
@@ -408,9 +377,7 @@ RSpec.shared_examples "redis_shared_examples" do
context 'when sentinels are not defined' do
let(:config_file_name) { config_old_format_host }
- it 'returns false' do
- is_expected.to be_falsey
- end
+ it { expect(subject).to eq(nil) }
end
context 'when cluster is defined' do
@@ -435,22 +402,39 @@ RSpec.shared_examples "redis_shared_examples" do
end
describe '#fetch_config' do
- it 'returns false when no config file is present' do
- allow(described_class).to receive(:_raw_config) { false }
+ it 'raises an exception when the config file contains invalid yaml' do
+ Tempfile.open('bad.yml') do |file|
+ file.write('{"not":"yaml"')
+ file.flush
+ allow(described_class).to receive(:config_file_name) { file.path }
- expect(subject.send(:fetch_config)).to eq false
+ expect { subject.send(:fetch_config) }.to raise_error(Psych::SyntaxError)
+ end
end
- it 'returns false when config file is present but has invalid YAML' do
- allow(described_class).to receive(:_raw_config) { "# development: true" }
+ it 'has a value for the legacy default URL' do
+ allow(subject).to receive(:fetch_config) { nil }
- expect(subject.send(:fetch_config)).to eq false
+ expect(subject.send(:raw_config_hash)).to include(url: a_string_matching(%r{\Aredis://localhost:638[012]\Z}))
end
- it 'has a value for the legacy default URL' do
- allow(subject).to receive(:fetch_config) { false }
+ context 'when redis.yml exists' do
+ subject { described_class.new('test').send(:fetch_config) }
- expect(subject.send(:raw_config_hash)).to include(url: a_string_matching(%r{\Aredis://localhost:638[012]\Z}))
+ before do
+ allow(described_class).to receive(:config_file_name).and_call_original
+ allow(described_class).to receive(:redis_yml_path).and_call_original
+ allow(described_class).to receive(:rails_root).and_return(rails_root)
+ FileUtils.mkdir_p(File.join(rails_root, 'config'))
+ end
+
+ it 'uses config/redis.yml' do
+ File.write(File.join(rails_root, 'config/redis.yml'), {
+ 'test' => { described_class.store_name.underscore => { 'foobar' => 123 } }
+ }.to_json)
+
+ expect(subject).to eq({ 'foobar' => 123 })
+ end
end
end
diff --git a/spec/support/rspec_order_todo.yml b/spec/support/rspec_order_todo.yml
index 489ed89c048..2f3f0feb87e 100644
--- a/spec/support/rspec_order_todo.yml
+++ b/spec/support/rspec_order_todo.yml
@@ -30,11 +30,6 @@
- './ee/spec/controllers/admin/runners_controller_spec.rb'
- './ee/spec/controllers/admin/users_controller_spec.rb'
- './ee/spec/controllers/autocomplete_controller_spec.rb'
-- './ee/spec/controllers/boards/issues_controller_spec.rb'
-- './ee/spec/controllers/boards/lists_controller_spec.rb'
-- './ee/spec/controllers/boards/milestones_controller_spec.rb'
-- './ee/spec/controllers/boards/users_controller_spec.rb'
-- './ee/spec/controllers/concerns/boards_responses_spec.rb'
- './ee/spec/controllers/concerns/ee/routable_actions/sso_enforcement_redirect_spec.rb'
- './ee/spec/controllers/concerns/geo_instrumentation_spec.rb'
- './ee/spec/controllers/concerns/gitlab_subscriptions/seat_count_alert_spec.rb'
@@ -107,7 +102,6 @@
- './ee/spec/controllers/groups/security/vulnerabilities_controller_spec.rb'
- './ee/spec/controllers/groups/sso_controller_spec.rb'
- './ee/spec/controllers/groups/todos_controller_spec.rb'
-- './ee/spec/controllers/groups/usage_quotas_controller_spec.rb'
- './ee/spec/controllers/groups/wikis_controller_spec.rb'
- './ee/spec/controllers/ldap/omniauth_callbacks_controller_spec.rb'
- './ee/spec/controllers/oauth/applications_controller_spec.rb'
@@ -141,7 +135,6 @@
- './ee/spec/controllers/projects/issues_controller_spec.rb'
- './ee/spec/controllers/projects/iteration_cadences_controller_spec.rb'
- './ee/spec/controllers/projects/iterations_controller_spec.rb'
-- './ee/spec/controllers/projects/legacy_pipelines_controller_spec.rb'
- './ee/spec/controllers/projects/licenses_controller_spec.rb'
- './ee/spec/controllers/projects/merge_requests_controller_spec.rb'
- './ee/spec/controllers/projects/merge_requests/creations_controller_spec.rb'
@@ -171,11 +164,8 @@
- './ee/spec/controllers/projects/subscriptions_controller_spec.rb'
- './ee/spec/controllers/projects/vulnerability_feedback_controller_spec.rb'
- './ee/spec/controllers/registrations/company_controller_spec.rb'
-- './ee/spec/controllers/registrations/groups_controller_spec.rb'
- './ee/spec/controllers/registrations/groups_projects_controller_spec.rb'
-- './ee/spec/controllers/registrations/projects_controller_spec.rb'
- './ee/spec/controllers/registrations/verification_controller_spec.rb'
-- './ee/spec/controllers/registrations/welcome_controller_spec.rb'
- './ee/spec/controllers/repositories/git_http_controller_spec.rb'
- './ee/spec/controllers/security/dashboard_controller_spec.rb'
- './ee/spec/controllers/security/projects_controller_spec.rb'
@@ -210,7 +200,6 @@
- './ee/spec/elastic/migrate/20220512150000_pause_indexing_for_unsupported_es_versions_spec.rb'
- './ee/spec/elastic/migrate/20220613120500_migrate_commits_to_separate_index_spec.rb'
- './ee/spec/elastic/migrate/20220713103500_delete_commits_from_original_index_spec.rb'
-- './ee/spec/factories/lfs_object_spec.rb'
- './ee/spec/features/account_recovery_regular_check_spec.rb'
- './ee/spec/features/admin/admin_credentials_inventory_spec.rb'
- './ee/spec/features/admin/admin_dashboard_spec.rb'
@@ -260,7 +249,6 @@
- './ee/spec/features/ci_shared_runner_settings_spec.rb'
- './ee/spec/features/ci_shared_runner_warnings_spec.rb'
- './ee/spec/features/clusters/cluster_detail_page_spec.rb'
-- './ee/spec/features/contextual_sidebar_spec.rb'
- './ee/spec/features/dashboards/activity_spec.rb'
- './ee/spec/features/dashboards/groups_spec.rb'
- './ee/spec/features/dashboards/issues_spec.rb'
@@ -312,8 +300,6 @@
- './ee/spec/features/groups/hooks/user_views_hooks_spec.rb'
- './ee/spec/features/groups/insights_spec.rb'
- './ee/spec/features/groups/issues_spec.rb'
-- './ee/spec/features/groups/iterations/iterations_list_spec.rb'
-- './ee/spec/features/groups/iteration_spec.rb'
- './ee/spec/features/groups/iterations/user_creates_iteration_in_cadence_spec.rb'
- './ee/spec/features/groups/iterations/user_edits_iteration_cadence_spec.rb'
- './ee/spec/features/groups/iterations/user_edits_iteration_spec.rb'
@@ -333,7 +319,6 @@
- './ee/spec/features/groups/saml_group_links_spec.rb'
- './ee/spec/features/groups/saml_providers_spec.rb'
- './ee/spec/features/groups/scim_token_spec.rb'
-- './ee/spec/features/groups/seat_usage/seat_usage_spec.rb'
- './ee/spec/features/groups/security/compliance_dashboards_spec.rb'
- './ee/spec/features/groups/settings/ci_cd_spec.rb'
- './ee/spec/features/groups/settings/protected_environments_spec.rb'
@@ -342,10 +327,8 @@
- './ee/spec/features/groups/settings/user_searches_in_settings_spec.rb'
- './ee/spec/features/groups_spec.rb'
- './ee/spec/features/groups/sso_spec.rb'
-- './ee/spec/features/groups/usage_quotas_spec.rb'
- './ee/spec/features/groups/wikis_spec.rb'
- './ee/spec/features/groups/wiki/user_views_wiki_empty_spec.rb'
-- './ee/spec/features/ide/user_commits_changes_spec.rb'
- './ee/spec/features/ide/user_opens_ide_spec.rb'
- './ee/spec/features/integrations/jira/jira_issues_list_spec.rb'
- './ee/spec/features/invites_spec.rb'
@@ -432,7 +415,6 @@
- './ee/spec/features/projects/issues/user_creates_issue_spec.rb'
- './ee/spec/features/projects/issues/viewing_relocated_issues_spec.rb'
- './ee/spec/features/projects/iterations/iteration_cadences_list_spec.rb'
-- './ee/spec/features/projects/iterations/iterations_list_spec.rb'
- './ee/spec/features/projects/iterations/user_views_iteration_spec.rb'
- './ee/spec/features/projects/jobs/blocked_deployment_job_page_spec.rb'
- './ee/spec/features/projects/jobs_spec.rb'
@@ -450,7 +432,6 @@
- './ee/spec/features/projects/new_project_from_template_spec.rb'
- './ee/spec/features/projects/new_project_spec.rb'
- './ee/spec/features/projects/path_locks_spec.rb'
-- './ee/spec/features/projects/pipelines/legacy_pipeline_spec.rb'
- './ee/spec/features/projects/pipelines/pipeline_csp_spec.rb'
- './ee/spec/features/projects/pipelines/pipeline_spec.rb'
- './ee/spec/features/projects/pipelines/pipelines_spec.rb'
@@ -479,7 +460,6 @@
- './ee/spec/features/projects/settings/user_manages_approval_settings_spec.rb'
- './ee/spec/features/projects/settings/user_manages_issues_template_spec.rb'
- './ee/spec/features/projects/settings/user_manages_members_spec.rb'
-- './ee/spec/features/projects/settings/user_manages_merge_pipelines_spec.rb'
- './ee/spec/features/projects/settings/user_manages_merge_requests_template_spec.rb'
- './ee/spec/features/projects/settings/user_manages_merge_trains_spec.rb'
- './ee/spec/features/projects/show/developer_views_empty_project_instructions_spec.rb'
@@ -494,9 +474,6 @@
- './ee/spec/features/read_only_spec.rb'
- './ee/spec/features/registrations/combined_registration_spec.rb'
- './ee/spec/features/registrations/one_trust_spec.rb'
-- './ee/spec/features/registrations/saas_user_registration_spec.rb'
-- './ee/spec/features/registrations/trial_during_signup_flow_spec.rb'
-- './ee/spec/features/registrations/user_sees_new_onboarding_flow_spec.rb'
- './ee/spec/features/registrations/welcome_spec.rb'
- './ee/spec/features/search/elastic/global_search_spec.rb'
- './ee/spec/features/search/elastic/group_search_spec.rb'
@@ -595,7 +572,6 @@
- './ee/spec/finders/productivity_analytics_finder_spec.rb'
- './ee/spec/finders/projects/integrations/jira/by_ids_finder_spec.rb'
- './ee/spec/finders/projects/integrations/jira/issues_finder_spec.rb'
-- './ee/spec/finders/requirements_management/requirements_finder_spec.rb'
- './ee/spec/finders/scim_finder_spec.rb'
- './ee/spec/finders/security/findings_finder_spec.rb'
- './ee/spec/finders/security/pipeline_vulnerabilities_finder_spec.rb'
@@ -641,7 +617,6 @@
- './ee/spec/graphql/ee/mutations/concerns/mutations/resolves_issuable_spec.rb'
- './ee/spec/graphql/ee/resolvers/board_list_issues_resolver_spec.rb'
- './ee/spec/graphql/ee/resolvers/board_lists_resolver_spec.rb'
-- './ee/spec/graphql/ee/resolvers/issues_resolver_spec.rb'
- './ee/spec/graphql/ee/resolvers/namespace_projects_resolver_spec.rb'
- './ee/spec/graphql/ee/types/alert_management/http_integration_type_spec.rb'
- './ee/spec/graphql/ee/types/board_list_type_spec.rb'
@@ -718,7 +693,6 @@
- './ee/spec/graphql/mutations/requirements_management/update_requirement_spec.rb'
- './ee/spec/graphql/mutations/security/ci_configuration/configure_container_scanning_spec.rb'
- './ee/spec/graphql/mutations/security/ci_configuration/configure_dependency_scanning_spec.rb'
-- './ee/spec/graphql/mutations/security_finding/dismiss_spec.rb'
- './ee/spec/graphql/mutations/security_policy/assign_security_policy_project_spec.rb'
- './ee/spec/graphql/mutations/security_policy/commit_scan_execution_policy_spec.rb'
- './ee/spec/graphql/mutations/security_policy/create_security_policy_project_spec.rb'
@@ -958,7 +932,6 @@
- './ee/spec/graphql/types/vulnerable_projects_by_grade_type_spec.rb'
- './ee/spec/graphql/types/work_items/type_spec.rb'
- './ee/spec/graphql/types/work_items/widget_interface_spec.rb'
-- './ee/spec/graphql/types/work_items/widgets/verification_status_type_spec.rb'
- './ee/spec/helpers/admin/emails_helper_spec.rb'
- './ee/spec/helpers/admin/ip_restriction_helper_spec.rb'
- './ee/spec/helpers/admin/repo_size_limit_helper_spec.rb'
@@ -1020,7 +993,6 @@
- './ee/spec/helpers/ee/trial_registration_helper_spec.rb'
- './ee/spec/helpers/ee/users/callouts_helper_spec.rb'
- './ee/spec/helpers/ee/version_check_helper_spec.rb'
-- './ee/spec/helpers/ee/welcome_helper_spec.rb'
- './ee/spec/helpers/ee/wiki_helper_spec.rb'
- './ee/spec/helpers/epics_helper_spec.rb'
- './ee/spec/helpers/gitlab_subscriptions/upcoming_reconciliation_helper_spec.rb'
@@ -1132,13 +1104,6 @@
- './ee/spec/lib/ee/api/helpers/scim_pagination_spec.rb'
- './ee/spec/lib/ee/api/helpers_spec.rb'
- './ee/spec/lib/ee/api/helpers/variables_helpers_spec.rb'
-- './ee/spec/lib/ee/audit/compliance_framework_changes_auditor_spec.rb'
-- './ee/spec/lib/ee/audit/group_changes_auditor_spec.rb'
-- './ee/spec/lib/ee/audit/project_changes_auditor_spec.rb'
-- './ee/spec/lib/ee/audit/project_ci_cd_setting_changes_auditor_spec.rb'
-- './ee/spec/lib/ee/audit/project_feature_changes_auditor_spec.rb'
-- './ee/spec/lib/ee/audit/project_setting_changes_auditor_spec.rb'
-- './ee/spec/lib/ee/audit/protected_branches_changes_auditor_spec.rb'
- './ee/spec/lib/ee/backup/repositories_spec.rb'
- './ee/spec/lib/ee/banzai/filter/sanitization_filter_spec.rb'
- './ee/spec/lib/ee/bulk_imports/groups/stage_spec.rb'
@@ -1170,7 +1135,6 @@
- './ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_check_progress_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_in_batch_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/migrate_approver_to_approval_rules_spec.rb'
-- './ee/spec/lib/ee/gitlab/background_migration/migrate_job_artifact_registry_to_ssf_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/migrate_shared_vulnerability_scanners_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/populate_latest_pipeline_ids_spec.rb'
- './ee/spec/lib/ee/gitlab/background_migration/populate_namespace_statistics_spec.rb'
@@ -1197,7 +1161,6 @@
- './ee/spec/lib/ee/gitlab/ci/pipeline/chain/validate/external_spec.rb'
- './ee/spec/lib/ee/gitlab/ci/pipeline/chain/validate/security_orchestration_policy_spec.rb'
- './ee/spec/lib/ee/gitlab/ci/pipeline/quota/activity_spec.rb'
-- './ee/spec/lib/ee/gitlab/ci/pipeline/quota/job_activity_spec.rb'
- './ee/spec/lib/ee/gitlab/ci/pipeline/quota/size_spec.rb'
- './ee/spec/lib/ee/gitlab/ci/reports/security/reports_spec.rb'
- './ee/spec/lib/ee/gitlab/ci/status/build/manual_spec.rb'
@@ -1219,8 +1182,6 @@
- './ee/spec/lib/ee/gitlab/hook_data/issue_builder_spec.rb'
- './ee/spec/lib/ee/gitlab/hook_data/user_builder_spec.rb'
- './ee/spec/lib/ee/gitlab/import_export/after_export_strategies/custom_template_export_import_strategy_spec.rb'
-- './ee/spec/lib/ee/gitlab/import_export/group/legacy_tree_restorer_spec.rb'
-- './ee/spec/lib/ee/gitlab/import_export/group/legacy_tree_saver_spec.rb'
- './ee/spec/lib/ee/gitlab/import_export/group/tree_restorer_spec.rb'
- './ee/spec/lib/ee/gitlab/import_export/group/tree_saver_spec.rb'
- './ee/spec/lib/ee/gitlab/import_export/project/tree_restorer_spec.rb'
@@ -1241,11 +1202,9 @@
- './ee/spec/lib/ee/gitlab/repo_path_spec.rb'
- './ee/spec/lib/ee/gitlab/repository_size_checker_spec.rb'
- './ee/spec/lib/ee/gitlab/scim/attribute_transform_spec.rb'
-- './ee/spec/lib/ee/gitlab/scim/deprovision_service_spec.rb'
- './ee/spec/lib/ee/gitlab/scim/filter_parser_spec.rb'
- './ee/spec/lib/ee/gitlab/scim/params_parser_spec.rb'
- './ee/spec/lib/ee/gitlab/scim/provisioning_service_spec.rb'
-- './ee/spec/lib/ee/gitlab/scim/reprovision_service_spec.rb'
- './ee/spec/lib/ee/gitlab/scim/value_parser_spec.rb'
- './ee/spec/lib/ee/gitlab/search_results_spec.rb'
- './ee/spec/lib/ee/gitlab/security/scan_configuration_spec.rb'
@@ -1385,13 +1344,11 @@
- './ee/spec/lib/gitlab/ci/parsers/security/dast_spec.rb'
- './ee/spec/lib/gitlab/ci/parsers/security/dependency_list_spec.rb'
- './ee/spec/lib/gitlab/ci/parsers/security/dependency_scanning_spec.rb'
-- './ee/spec/lib/gitlab/ci/parsers/security/formatters/dast_spec.rb'
- './ee/spec/lib/gitlab/ci/parsers/security/formatters/dependency_list_spec.rb'
- './ee/spec/lib/gitlab/ci/parsers/security/validators/default_branch_image_validator_spec.rb'
- './ee/spec/lib/gitlab/ci/pipeline/chain/config/content_spec.rb'
- './ee/spec/lib/gitlab/ci/pipeline/chain/create_cross_database_associations_spec.rb'
- './ee/spec/lib/gitlab/ci/pipeline/chain/limit/activity_spec.rb'
-- './ee/spec/lib/gitlab/ci/pipeline/chain/limit/job_activity_spec.rb'
- './ee/spec/lib/gitlab/ci/pipeline/chain/limit/size_spec.rb'
- './ee/spec/lib/gitlab/ci/reports/coverage_fuzzing/report_spec.rb'
- './ee/spec/lib/gitlab/ci/reports/dependency_list/dependency_spec.rb'
@@ -1533,7 +1490,6 @@
- './ee/spec/lib/gitlab/insights/reducers/base_reducer_spec.rb'
- './ee/spec/lib/gitlab/insights/reducers/count_per_label_reducer_spec.rb'
- './ee/spec/lib/gitlab/insights/reducers/count_per_period_reducer_spec.rb'
-- './ee/spec/lib/gitlab/insights/reducers/dora_reducer_spec.rb'
- './ee/spec/lib/gitlab/insights/reducers/label_count_per_period_reducer_spec.rb'
- './ee/spec/lib/gitlab/insights/serializers/chartjs/bar_serializer_spec.rb'
- './ee/spec/lib/gitlab/insights/serializers/chartjs/bar_time_series_serializer_spec.rb'
@@ -1552,7 +1508,6 @@
- './ee/spec/lib/gitlab/middleware/ip_restrictor_spec.rb'
- './ee/spec/lib/gitlab/mirror_spec.rb'
- './ee/spec/lib/gitlab/object_hierarchy_spec.rb'
-- './ee/spec/lib/gitlab/pagination_delegate_spec.rb'
- './ee/spec/lib/gitlab/pagination/keyset/simple_order_builder_spec.rb'
- './ee/spec/lib/gitlab/patch/database_config_spec.rb'
- './ee/spec/lib/gitlab/patch/draw_route_spec.rb'
@@ -1590,7 +1545,6 @@
- './ee/spec/lib/gitlab/usage_data_counters/epic_activity_unique_counter_spec.rb'
- './ee/spec/lib/gitlab/usage_data_counters/licenses_list_spec.rb'
- './ee/spec/lib/gitlab/usage_data_metrics_spec.rb'
-- './ee/spec/lib/gitlab/usage/metrics/aggregates/aggregate_spec.rb'
- './ee/spec/lib/gitlab/usage/metrics/instrumentations/advanced_search/build_type_metric_spec.rb'
- './ee/spec/lib/gitlab/usage/metrics/instrumentations/advanced_search/distribution_metric_spec.rb'
- './ee/spec/lib/gitlab/usage/metrics/instrumentations/advanced_search/lucene_version_metric_spec.rb'
@@ -1670,12 +1624,10 @@
- './ee/spec/migrations/add_non_null_constraint_for_escalation_rule_on_pending_alert_escalations_spec.rb'
- './ee/spec/migrations/async_build_trace_expire_at_index_spec.rb'
- './ee/spec/migrations/backfill_delayed_group_deletion_spec.rb'
-- './ee/spec/migrations/backfill_namespace_statistics_with_wiki_size_spec.rb'
- './ee/spec/migrations/drop_invalid_remediations_spec.rb'
- './ee/spec/migrations/geo/fix_state_column_in_file_registry_spec.rb'
- './ee/spec/migrations/geo/fix_state_column_in_lfs_object_registry_spec.rb'
- './ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb'
-- './ee/spec/migrations/geo/migrate_job_artifact_registry_spec.rb'
- './ee/spec/migrations/geo/migrate_lfs_objects_to_separate_registry_spec.rb'
- './ee/spec/migrations/geo/set_resync_flag_for_retried_projects_spec.rb'
- './ee/spec/migrations/remove_schedule_and_status_null_constraints_from_pending_escalations_alert_spec.rb'
@@ -1683,14 +1635,11 @@
- './ee/spec/migrations/schedule_populate_test_reports_issue_id_spec.rb'
- './ee/spec/migrations/schedule_requirements_migration_spec.rb'
- './ee/spec/migrations/schedule_trace_expiry_removal_spec.rb'
-- './ee/spec/migrations/update_gitlab_subscriptions_start_at_post_eoa_spec.rb'
- './ee/spec/migrations/update_vulnerability_occurrences_location_spec.rb'
- './ee/spec/models/alert_management/alert_payload_field_spec.rb'
- './ee/spec/models/allowed_email_domain_spec.rb'
- './ee/spec/models/analytics/cycle_analytics/aggregation_context_spec.rb'
- './ee/spec/models/analytics/cycle_analytics/group_level_spec.rb'
-- './ee/spec/models/analytics/cycle_analytics/group_stage_spec.rb'
-- './ee/spec/models/analytics/cycle_analytics/group_value_stream_spec.rb'
- './ee/spec/models/analytics/cycle_analytics/project_stage_spec.rb'
- './ee/spec/models/analytics/cycle_analytics/runtime_limiter_spec.rb'
- './ee/spec/models/analytics/devops_adoption/enabled_namespace_spec.rb'
@@ -1698,7 +1647,6 @@
- './ee/spec/models/analytics/issues_analytics_spec.rb'
- './ee/spec/models/analytics/language_trend/repository_language_spec.rb'
- './ee/spec/models/application_setting_spec.rb'
-- './ee/spec/models/approvable_spec.rb'
- './ee/spec/models/approval_merge_request_rule_spec.rb'
- './ee/spec/models/approval_project_rule_spec.rb'
- './ee/spec/models/approvals/scan_finding_wrapped_rule_set_spec.rb'
@@ -1730,7 +1678,6 @@
- './ee/spec/models/ci/daily_build_group_report_result_spec.rb'
- './ee/spec/models/ci/minutes/additional_pack_spec.rb'
- './ee/spec/models/ci/minutes/context_spec.rb'
-- './ee/spec/models/ci/minutes/limit_spec.rb'
- './ee/spec/models/ci/minutes/namespace_monthly_usage_spec.rb'
- './ee/spec/models/ci/minutes/notification_spec.rb'
- './ee/spec/models/ci/minutes/project_monthly_usage_spec.rb'
@@ -1942,7 +1889,6 @@
- './ee/spec/models/namespace_limit_spec.rb'
- './ee/spec/models/namespace_setting_spec.rb'
- './ee/spec/models/namespaces/free_user_cap_spec.rb'
-- './ee/spec/models/namespaces/free_user_cap/standard_spec.rb'
- './ee/spec/models/namespaces/storage/root_excess_size_spec.rb'
- './ee/spec/models/namespaces/storage/root_size_spec.rb'
- './ee/spec/models/note_spec.rb'
@@ -1963,9 +1909,7 @@
- './ee/spec/models/project_team_spec.rb'
- './ee/spec/models/protected_branch/required_code_owners_section_spec.rb'
- './ee/spec/models/protected_branch/unprotect_access_level_spec.rb'
-- './ee/spec/models/protected_environment/deploy_access_level_spec.rb'
- './ee/spec/models/protected_environments/approval_rule_spec.rb'
-- './ee/spec/models/protected_environments/approval_summary_spec.rb'
- './ee/spec/models/protected_environment_spec.rb'
- './ee/spec/models/push_rule_spec.rb'
- './ee/spec/models/release_highlight_spec.rb'
@@ -2030,7 +1974,6 @@
- './ee/spec/models/vulnerability_user_mention_spec.rb'
- './ee/spec/models/weight_note_spec.rb'
- './ee/spec/models/work_item_spec.rb'
-- './ee/spec/models/work_items/widgets/verification_status_spec.rb'
- './ee/spec/policies/approval_merge_request_rule_policy_spec.rb'
- './ee/spec/policies/approval_project_rule_policy_spec.rb'
- './ee/spec/policies/approval_state_policy_spec.rb'
@@ -2265,7 +2208,6 @@
- './ee/spec/requests/api/graphql/mutations/requirements_management/create_requirement_spec.rb'
- './ee/spec/requests/api/graphql/mutations/requirements_management/export_requirements_spec.rb'
- './ee/spec/requests/api/graphql/mutations/requirements_management/update_requirement_spec.rb'
-- './ee/spec/requests/api/graphql/mutations/security_finding/create_issue_spec.rb'
- './ee/spec/requests/api/graphql/mutations/security_policy/assign_security_policy_project_spec.rb'
- './ee/spec/requests/api/graphql/mutations/security_policy/commit_scan_execution_policy_spec.rb'
- './ee/spec/requests/api/graphql/mutations/security_policy/create_security_policy_project_spec.rb'
@@ -2337,7 +2279,6 @@
- './ee/spec/requests/api/ldap_spec.rb'
- './ee/spec/requests/api/license_spec.rb'
- './ee/spec/requests/api/managed_licenses_spec.rb'
-- './ee/spec/requests/api/markdown_golden_master_spec.rb'
- './ee/spec/requests/api/members_spec.rb'
- './ee/spec/requests/api/merge_request_approval_rules_spec.rb'
- './ee/spec/requests/api/merge_request_approval_settings_spec.rb'
@@ -2367,7 +2308,6 @@
- './ee/spec/requests/api/resource_label_events_spec.rb'
- './ee/spec/requests/api/resource_weight_events_spec.rb'
- './ee/spec/requests/api/saml_group_links_spec.rb'
-- './ee/spec/requests/api/scim_spec.rb'
- './ee/spec/requests/api/search_spec.rb'
- './ee/spec/requests/api/settings_spec.rb'
- './ee/spec/requests/api/status_checks_spec.rb'
@@ -2449,14 +2389,12 @@
- './ee/spec/routing/user_routing_spec.rb'
- './ee/spec/routing/webhook_routes_spec.rb'
- './ee/spec/serializers/analytics/cycle_analytics/event_entity_spec.rb'
-- './ee/spec/serializers/analytics/cycle_analytics/stage_entity_spec.rb'
- './ee/spec/serializers/analytics/cycle_analytics/value_stream_errors_serializer_spec.rb'
- './ee/spec/serializers/audit_event_entity_spec.rb'
- './ee/spec/serializers/audit_event_serializer_spec.rb'
- './ee/spec/serializers/autocomplete/group_entity_spec.rb'
- './ee/spec/serializers/autocomplete/group_serializer_spec.rb'
- './ee/spec/serializers/blocking_merge_request_entity_spec.rb'
-- './ee/spec/serializers/board_serializer_spec.rb'
- './ee/spec/serializers/clusters/deployment_entity_spec.rb'
- './ee/spec/serializers/clusters/environment_entity_spec.rb'
- './ee/spec/serializers/clusters/environment_serializer_spec.rb'
@@ -2469,7 +2407,6 @@
- './ee/spec/serializers/dependency_list_serializer_spec.rb'
- './ee/spec/serializers/ee/admin/user_entity_spec.rb'
- './ee/spec/serializers/ee/blob_entity_spec.rb'
-- './ee/spec/serializers/ee/board_simple_entity_spec.rb'
- './ee/spec/serializers/ee/build_details_entity_spec.rb'
- './ee/spec/serializers/ee/ci/job_entity_spec.rb'
- './ee/spec/serializers/ee/ci/pipeline_entity_spec.rb'
@@ -2596,7 +2533,6 @@
- './ee/spec/services/app_sec/fuzzing/api/ci_configuration_create_service_spec.rb'
- './ee/spec/services/app_sec/fuzzing/coverage/corpuses/create_service_spec.rb'
- './ee/spec/services/arkose/blocked_users_report_service_spec.rb'
-- './ee/spec/services/arkose/user_verification_service_spec.rb'
- './ee/spec/services/audit_events/build_service_spec.rb'
- './ee/spec/services/audit_events/custom_audit_event_service_spec.rb'
- './ee/spec/services/audit_event_service_spec.rb'
@@ -2744,11 +2680,9 @@
- './ee/spec/services/ee/issues/after_create_service_spec.rb'
- './ee/spec/services/ee/issues/build_from_vulnerability_service_spec.rb'
- './ee/spec/services/ee/issues/clone_service_spec.rb'
-- './ee/spec/services/ee/issues/close_service_spec.rb'
- './ee/spec/services/ee/issues/create_from_vulnerability_data_service_spec.rb'
- './ee/spec/services/ee/issues/create_service_spec.rb'
- './ee/spec/services/ee/issues/move_service_spec.rb'
-- './ee/spec/services/ee/issues/reopen_service_spec.rb'
- './ee/spec/services/ee/issues/update_service_spec.rb'
- './ee/spec/services/ee/keys/destroy_service_spec.rb'
- './ee/spec/services/ee/labels/create_service_spec.rb'
@@ -2806,7 +2740,6 @@
- './ee/spec/services/ee/users/build_service_spec.rb'
- './ee/spec/services/ee/users/create_service_spec.rb'
- './ee/spec/services/ee/users/destroy_service_spec.rb'
-- './ee/spec/services/ee/users/migrate_to_ghost_user_service_spec.rb'
- './ee/spec/services/ee/users/reject_service_spec.rb'
- './ee/spec/services/ee/users/update_service_spec.rb'
- './ee/spec/services/ee/vulnerability_feedback_module/update_service_spec.rb'
@@ -2887,9 +2820,7 @@
- './ee/spec/services/geo/repository_verification_secondary_service_spec.rb'
- './ee/spec/services/geo/reset_checksum_event_store_spec.rb'
- './ee/spec/services/geo/wiki_sync_service_spec.rb'
-- './ee/spec/services/gitlab_subscriptions/activate_awaiting_users_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/activate_service_spec.rb'
-- './ee/spec/services/gitlab_subscriptions/apply_trial_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/check_future_renewal_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/create_hand_raise_lead_service_spec.rb'
- './ee/spec/services/gitlab_subscriptions/create_service_spec.rb'
@@ -2972,10 +2903,6 @@
- './ee/spec/services/milestones/destroy_service_spec.rb'
- './ee/spec/services/milestones/promote_service_spec.rb'
- './ee/spec/services/milestones/update_service_spec.rb'
-- './ee/spec/services/namespaces/free_user_cap/deactivate_members_over_limit_service_spec.rb'
-- './ee/spec/services/namespaces/free_user_cap/remove_group_group_links_outside_hierarchy_service_spec.rb'
-- './ee/spec/services/namespaces/free_user_cap/remove_project_group_links_outside_hierarchy_service_spec.rb'
-- './ee/spec/services/namespaces/free_user_cap/update_prevent_sharing_outside_hierarchy_service_spec.rb'
- './ee/spec/services/namespaces/in_product_marketing_emails_service_spec.rb'
- './ee/spec/services/namespaces/storage/email_notification_service_spec.rb'
- './ee/spec/services/path_locks/lock_service_spec.rb'
@@ -3004,8 +2931,6 @@
- './ee/spec/services/projects/hashed_storage/migrate_repository_service_spec.rb'
- './ee/spec/services/projects/import_export/export_service_spec.rb'
- './ee/spec/services/projects/import_service_spec.rb'
-- './ee/spec/services/projects/licenses/create_policy_service_spec.rb'
-- './ee/spec/services/projects/licenses/update_policy_service_spec.rb'
- './ee/spec/services/projects/mark_for_deletion_service_spec.rb'
- './ee/spec/services/projects/open_issues_count_service_spec.rb'
- './ee/spec/services/projects/operations/update_service_spec.rb'
@@ -3034,7 +2959,6 @@
- './ee/spec/services/requirements_management/map_export_fields_service_spec.rb'
- './ee/spec/services/requirements_management/prepare_import_csv_service_spec.rb'
- './ee/spec/services/requirements_management/process_test_reports_service_spec.rb'
-- './ee/spec/services/requirements_management/update_requirement_service_spec.rb'
- './ee/spec/services/resource_access_tokens/create_service_spec.rb'
- './ee/spec/services/resource_access_tokens/revoke_service_spec.rb'
- './ee/spec/services/resource_events/change_weight_service_spec.rb'
@@ -3047,7 +2971,6 @@
- './ee/spec/services/security/auto_fix_service_spec.rb'
- './ee/spec/services/security/configuration/save_auto_fix_service_spec.rb'
- './ee/spec/services/security/dependency_list_service_spec.rb'
-- './ee/spec/services/security/findings/cleanup_service_spec.rb'
- './ee/spec/services/security/ingestion/finding_map_collection_spec.rb'
- './ee/spec/services/security/ingestion/finding_map_spec.rb'
- './ee/spec/services/security/ingestion/ingest_report_service_spec.rb'
@@ -3120,7 +3043,6 @@
- './ee/spec/services/todo_service_spec.rb'
- './ee/spec/services/upcoming_reconciliations/update_service_spec.rb'
- './ee/spec/services/user_permissions/export_service_spec.rb'
-- './ee/spec/services/users/abuse/excessive_projects_download_ban_service_spec.rb'
- './ee/spec/services/users/abuse/git_abuse/namespace_throttle_service_spec.rb'
- './ee/spec/services/users/abuse/namespace_bans/create_service_spec.rb'
- './ee/spec/services/users/abuse/namespace_bans/destroy_service_spec.rb'
@@ -3169,7 +3091,6 @@
- './ee/spec/tasks/gitlab/license_rake_spec.rb'
- './ee/spec/tasks/gitlab/seed/group_seed_rake_spec.rb'
- './ee/spec/tasks/gitlab/spdx_rake_spec.rb'
-- './ee/spec/tasks/gitlab/uploads/migrate_rake_spec.rb'
- './ee/spec/validators/json_schema_validator_spec.rb'
- './ee/spec/validators/ldap_filter_validator_spec.rb'
- './ee/spec/validators/password/complexity_validator_spec.rb'
@@ -3202,8 +3123,6 @@
- './ee/spec/views/groups/security/discover/show.html.haml_spec.rb'
- './ee/spec/views/groups/settings/_remove.html.haml_spec.rb'
- './ee/spec/views/groups/settings/reporting/show.html.haml_spec.rb'
-- './ee/spec/views/groups/show.html.haml_spec.rb'
-- './ee/spec/views/groups/usage_quotas/index.html.haml_spec.rb'
- './ee/spec/views/layouts/application.html.haml_spec.rb'
- './ee/spec/views/layouts/checkout.html.haml_spec.rb'
- './ee/spec/views/layouts/header/_current_user_dropdown.html.haml_spec.rb'
@@ -3220,12 +3139,9 @@
- './ee/spec/views/operations/index.html.haml_spec.rb'
- './ee/spec/views/profiles/preferences/show.html.haml_spec.rb'
- './ee/spec/views/projects/edit.html.haml_spec.rb'
-- './ee/spec/views/projects/empty.html.haml_spec.rb'
- './ee/spec/views/projects/issues/show.html.haml_spec.rb'
-- './ee/spec/views/projects/merge_requests/_merge_request_approvals.html.haml_spec.rb'
- './ee/spec/views/projects/_merge_request_status_checks_settings.html.haml_spec.rb'
- './ee/spec/views/projects/on_demand_scans/index.html.haml_spec.rb'
-- './ee/spec/views/projects/pipelines/_tabs_content.html.haml_spec.rb'
- './ee/spec/views/projects/project_members/index.html.haml_spec.rb'
- './ee/spec/views/projects/security/corpus_management/show.html.haml_spec.rb'
- './ee/spec/views/projects/security/dast_profiles/show.html.haml_spec.rb'
@@ -3237,12 +3153,8 @@
- './ee/spec/views/projects/security/policies/index.html.haml_spec.rb'
- './ee/spec/views/projects/security/sast_configuration/show.html.haml_spec.rb'
- './ee/spec/views/projects/settings/subscriptions/_index.html.haml_spec.rb'
-- './ee/spec/views/projects/show.html.haml_spec.rb'
-- './ee/spec/views/registrations/groups/new.html.haml_spec.rb'
- './ee/spec/views/registrations/groups_projects/new.html.haml_spec.rb'
-- './ee/spec/views/registrations/projects/new.html.haml_spec.rb'
- './ee/spec/views/registrations/welcome/continuous_onboarding_getting_started.html.haml_spec.rb'
-- './ee/spec/views/registrations/welcome/show.html.haml_spec.rb'
- './ee/spec/views/search/_category.html.haml_spec.rb'
- './ee/spec/views/shared/billings/_billing_plan_actions.html.haml_spec.rb'
- './ee/spec/views/shared/billings/_billing_plan.html.haml_spec.rb'
@@ -3253,7 +3165,6 @@
- './ee/spec/views/shared/credentials_inventory/_expiry_date.html.haml_spec.rb'
- './ee/spec/views/shared/credentials_inventory/gpg_keys/_gpg_key.html.haml_spec.rb'
- './ee/spec/views/shared/credentials_inventory/personal_access_tokens/_personal_access_token.html.haml_spec.rb'
-- './ee/spec/views/shared/credentials_inventory/project_access_tokens/_project_access_token.html.haml_spec.rb'
- './ee/spec/views/shared/credentials_inventory/ssh_keys/_ssh_key.html.haml_spec.rb'
- './ee/spec/views/shared/issuable/_approver_suggestion.html.haml_spec.rb'
- './ee/spec/views/shared/issuable/_epic_dropdown.html.haml_spec.rb'
@@ -3271,8 +3182,6 @@
- './ee/spec/views/subscriptions/buy_storage.html.haml_spec.rb'
- './ee/spec/views/subscriptions/groups/edit.html.haml_spec.rb'
- './ee/spec/views/subscriptions/new.html.haml_spec.rb'
-- './ee/spec/views/trial_registrations/new.html.haml_spec.rb'
-- './ee/spec/views/trials/_skip_trial.html.haml_spec.rb'
- './ee/spec/workers/active_user_count_threshold_worker_spec.rb'
- './ee/spec/workers/adjourned_group_deletion_worker_spec.rb'
- './ee/spec/workers/adjourned_project_deletion_worker_spec.rb'
@@ -3336,7 +3245,6 @@
- './ee/spec/workers/geo/batch_event_create_worker_spec.rb'
- './ee/spec/workers/geo/batch/project_registry_scheduler_worker_spec.rb'
- './ee/spec/workers/geo/batch/project_registry_worker_spec.rb'
-- './ee/spec/workers/geo/container_repository_sync_dispatch_worker_spec.rb'
- './ee/spec/workers/geo/container_repository_sync_worker_spec.rb'
- './ee/spec/workers/geo/create_repository_updated_event_worker_spec.rb'
- './ee/spec/workers/geo/design_repository_shard_sync_worker_spec.rb'
@@ -3391,7 +3299,6 @@
- './ee/spec/workers/merge_request_reset_approvals_worker_spec.rb'
- './ee/spec/workers/merge_requests/stream_approval_audit_event_worker_spec.rb'
- './ee/spec/workers/merge_requests/sync_code_owner_approval_rules_worker_spec.rb'
-- './ee/spec/workers/namespaces/free_user_cap/remediation_worker_spec.rb'
- './ee/spec/workers/namespaces/sync_namespace_name_worker_spec.rb'
- './ee/spec/workers/new_epic_worker_spec.rb'
- './ee/spec/workers/personal_access_tokens/groups/policy_worker_spec.rb'
@@ -3410,8 +3317,6 @@
- './ee/spec/workers/scan_security_report_secrets_worker_spec.rb'
- './ee/spec/workers/security/auto_fix_worker_spec.rb'
- './ee/spec/workers/security/create_orchestration_policy_worker_spec.rb'
-- './ee/spec/workers/security/findings/cleanup_worker_spec.rb'
-- './ee/spec/workers/security/findings/delete_by_job_id_worker_spec.rb'
- './ee/spec/workers/security/orchestration_policy_rule_schedule_namespace_worker_spec.rb'
- './ee/spec/workers/security/orchestration_policy_rule_schedule_worker_spec.rb'
- './ee/spec/workers/security/store_scans_worker_spec.rb'
@@ -3454,7 +3359,6 @@
- './spec/config/application_spec.rb'
- './spec/config/inject_enterprise_edition_module_spec.rb'
- './spec/config/mail_room_spec.rb'
-- './spec/config/metrics/aggregates/aggregated_metrics_spec.rb'
- './spec/config/object_store_settings_spec.rb'
- './spec/config/settings_spec.rb'
- './spec/config/smime_signature_settings_spec.rb'
@@ -3488,10 +3392,7 @@
- './spec/controllers/admin/users_controller_spec.rb'
- './spec/controllers/application_controller_spec.rb'
- './spec/controllers/autocomplete_controller_spec.rb'
-- './spec/controllers/boards/issues_controller_spec.rb'
-- './spec/controllers/boards/lists_controller_spec.rb'
- './spec/controllers/chaos_controller_spec.rb'
-- './spec/controllers/concerns/boards_responses_spec.rb'
- './spec/controllers/concerns/check_rate_limit_spec.rb'
- './spec/controllers/concerns/checks_collaboration_spec.rb'
- './spec/controllers/concerns/confirm_email_warning_spec.rb'
@@ -3563,7 +3464,6 @@
- './spec/controllers/groups/variables_controller_spec.rb'
- './spec/controllers/health_check_controller_spec.rb'
- './spec/controllers/help_controller_spec.rb'
-- './spec/controllers/import/available_namespaces_controller_spec.rb'
- './spec/controllers/import/bitbucket_controller_spec.rb'
- './spec/controllers/import/bitbucket_server_controller_spec.rb'
- './spec/controllers/import/bulk_imports_controller_spec.rb'
@@ -3670,7 +3570,6 @@
- './spec/controllers/projects/pipelines_settings_controller_spec.rb'
- './spec/controllers/projects/pipelines/stages_controller_spec.rb'
- './spec/controllers/projects/pipelines/tests_controller_spec.rb'
-- './spec/controllers/projects/product_analytics_controller_spec.rb'
- './spec/controllers/projects/project_members_controller_spec.rb'
- './spec/controllers/projects/prometheus/alerts_controller_spec.rb'
- './spec/controllers/projects/prometheus/metrics_controller_spec.rb'
@@ -3707,7 +3606,6 @@
- './spec/controllers/projects/web_ide_terminals_controller_spec.rb'
- './spec/controllers/projects/wikis_controller_spec.rb'
- './spec/controllers/registrations_controller_spec.rb'
-- './spec/controllers/registrations/welcome_controller_spec.rb'
- './spec/controllers/repositories/git_http_controller_spec.rb'
- './spec/controllers/repositories/lfs_storage_controller_spec.rb'
- './spec/controllers/root_controller_spec.rb'
@@ -3935,7 +3833,6 @@
- './spec/features/ide/clientside_preview_csp_spec.rb'
- './spec/features/ide_spec.rb'
- './spec/features/ide/static_object_external_storage_csp_spec.rb'
-- './spec/features/ide/user_commits_changes_spec.rb'
- './spec/features/ide/user_opens_merge_request_spec.rb'
- './spec/features/import/manifest_import_spec.rb'
- './spec/features/invites_spec.rb'
@@ -4058,7 +3955,6 @@
- './spec/features/merge_request/user_edits_reviewers_sidebar_spec.rb'
- './spec/features/merge_request/user_expands_diff_spec.rb'
- './spec/features/merge_request/user_interacts_with_batched_mr_diffs_spec.rb'
-- './spec/features/merge_request/user_jumps_to_discussion_spec.rb'
- './spec/features/merge_request/user_locks_discussion_spec.rb'
- './spec/features/merge_request/user_manages_subscription_spec.rb'
- './spec/features/merge_request/user_marks_merge_request_as_draft_spec.rb'
@@ -4113,7 +4009,6 @@
- './spec/features/merge_request/user_views_diffs_spec.rb'
- './spec/features/merge_request/user_views_merge_request_from_deleted_fork_spec.rb'
- './spec/features/merge_request/user_views_open_merge_request_spec.rb'
-- './spec/features/merge_request/user_views_user_status_on_merge_request_spec.rb'
- './spec/features/milestone_spec.rb'
- './spec/features/milestones/user_creates_milestone_spec.rb'
- './spec/features/milestones/user_deletes_milestone_spec.rb'
@@ -4174,7 +4069,6 @@
- './spec/features/projects/blobs/blob_show_spec.rb'
- './spec/features/projects/blobs/edit_spec.rb'
- './spec/features/projects/blobs/shortcuts_blob_spec.rb'
-- './spec/features/projects/blobs/user_creates_new_blob_in_new_project_spec.rb'
- './spec/features/projects/blobs/user_follows_pipeline_suggest_nudge_spec.rb'
- './spec/features/projects/blobs/user_views_pipeline_editor_button_spec.rb'
- './spec/features/projects/branches/download_buttons_spec.rb'
@@ -4334,14 +4228,8 @@
- './spec/features/projects/pages/user_edits_lets_encrypt_settings_spec.rb'
- './spec/features/projects/pages/user_edits_settings_spec.rb'
- './spec/features/projects/pipeline_schedules_spec.rb'
-- './spec/features/projects/pipelines/legacy_pipeline_spec.rb'
-- './spec/features/projects/pipelines/legacy_pipelines_spec.rb'
- './spec/features/projects/pipelines/pipeline_spec.rb'
- './spec/features/projects/pipelines/pipelines_spec.rb'
-- './spec/features/projects/product_analytics/events_spec.rb'
-- './spec/features/projects/product_analytics/graphs_spec.rb'
-- './spec/features/projects/product_analytics/setup_spec.rb'
-- './spec/features/projects/product_analytics/test_spec.rb'
- './spec/features/projects/raw/user_interacts_with_raw_endpoint_spec.rb'
- './spec/features/projects/releases/user_creates_release_spec.rb'
- './spec/features/projects/releases/user_views_edit_release_spec.rb'
@@ -4490,7 +4378,6 @@
- './spec/features/users/anonymous_sessions_spec.rb'
- './spec/features/users/bizible_csp_spec.rb'
- './spec/features/users/confirmation_spec.rb'
-- './spec/features/user_sees_marketing_header_spec.rb'
- './spec/features/user_sees_revert_modal_spec.rb'
- './spec/features/users/email_verification_on_login_spec.rb'
- './spec/features/users/google_analytics_csp_spec.rb'
@@ -4570,7 +4457,6 @@
- './spec/finders/feature_flags_user_lists_finder_spec.rb'
- './spec/finders/fork_projects_finder_spec.rb'
- './spec/finders/fork_targets_finder_spec.rb'
-- './spec/finders/freeze_periods_finder_spec.rb'
- './spec/finders/group_descendants_finder_spec.rb'
- './spec/finders/group_members_finder_spec.rb'
- './spec/finders/group_projects_finder_spec.rb'
@@ -4590,7 +4476,6 @@
- './spec/finders/members_finder_spec.rb'
- './spec/finders/merge_request/metrics_finder_spec.rb'
- './spec/finders/merge_requests/by_approvals_finder_spec.rb'
-- './spec/finders/merge_requests_finder/params_spec.rb'
- './spec/finders/merge_requests_finder_spec.rb'
- './spec/finders/merge_requests/oldest_per_commit_finder_spec.rb'
- './spec/finders/merge_request_target_project_finder_spec.rb'
@@ -4705,7 +4590,6 @@
- './spec/frontend/fixtures/u2f.rb'
- './spec/frontend/fixtures/webauthn.rb'
- './spec/graphql/features/authorization_spec.rb'
-- './spec/graphql/features/feature_flag_spec.rb'
- './spec/graphql/gitlab_schema_spec.rb'
- './spec/graphql/graphql_triggers_spec.rb'
- './spec/graphql/mutations/alert_management/alerts/set_assignees_spec.rb'
@@ -4803,7 +4687,6 @@
- './spec/graphql/mutations/todos/restore_spec.rb'
- './spec/graphql/mutations/user_callouts/create_spec.rb'
- './spec/graphql/mutations/work_items/update_task_spec.rb'
-- './spec/graphql/mutations/work_items/update_widgets_spec.rb'
- './spec/graphql/resolvers/admin/analytics/usage_trends/measurements_resolver_spec.rb'
- './spec/graphql/resolvers/alert_management/alert_resolver_spec.rb'
- './spec/graphql/resolvers/alert_management/alert_status_counts_resolver_spec.rb'
@@ -4867,7 +4750,6 @@
- './spec/graphql/resolvers/group_packages_resolver_spec.rb'
- './spec/graphql/resolvers/group_resolver_spec.rb'
- './spec/graphql/resolvers/groups_resolver_spec.rb'
-- './spec/graphql/resolvers/issues_resolver_spec.rb'
- './spec/graphql/resolvers/issue_status_counts_resolver_spec.rb'
- './spec/graphql/resolvers/kas/agent_configurations_resolver_spec.rb'
- './spec/graphql/resolvers/kas/agent_connections_resolver_spec.rb'
@@ -5367,8 +5249,6 @@
- './spec/initializers/action_mailer_hooks_spec.rb'
- './spec/initializers/active_record_locking_spec.rb'
- './spec/initializers/asset_proxy_setting_spec.rb'
-- './spec/initializers/attr_encrypted_no_db_connection_spec.rb'
-- './spec/initializers/attr_encrypted_thread_safe_spec.rb'
- './spec/initializers/carrierwave_patch_spec.rb'
- './spec/initializers/cookies_serializer_spec.rb'
- './spec/initializers/database_config_spec.rb'
@@ -5436,7 +5316,6 @@
- './spec/lib/api/entities/nuget/search_result_spec.rb'
- './spec/lib/api/entities/package_spec.rb'
- './spec/lib/api/entities/personal_access_token_spec.rb'
-- './spec/lib/api/entities/personal_access_token_with_details_spec.rb'
- './spec/lib/api/entities/plan_limit_spec.rb'
- './spec/lib/api/entities/project_import_failed_relation_spec.rb'
- './spec/lib/api/entities/project_import_status_spec.rb'
@@ -5469,7 +5348,6 @@
- './spec/lib/api/helpers_spec.rb'
- './spec/lib/api/helpers/variables_helpers_spec.rb'
- './spec/lib/api/helpers/version_spec.rb'
-- './spec/lib/api/integrations/slack/events/url_verification_spec.rb'
- './spec/lib/api/support/git_access_actor_spec.rb'
- './spec/lib/api/validations/validators/absence_spec.rb'
- './spec/lib/api/validations/validators/array_none_any_spec.rb'
@@ -5819,7 +5697,6 @@
- './spec/lib/gitlab/auth/unique_ips_limiter_spec.rb'
- './spec/lib/gitlab/auth/user_access_denied_reason_spec.rb'
- './spec/lib/gitlab/avatar_cache_spec.rb'
-- './spec/lib/gitlab/background_migration/add_primary_email_to_emails_if_user_confirmed_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_ci_queuing_tables_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_draft_status_on_merge_requests_with_corrected_regex_spec.rb'
@@ -5841,7 +5718,6 @@
- './spec/lib/gitlab/background_migration/backfill_project_member_namespace_id_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_project_repositories_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_project_settings_spec.rb'
-- './spec/lib/gitlab/background_migration/backfill_projects_with_coverage_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_snippet_repositories_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_topics_title_spec.rb'
- './spec/lib/gitlab/background_migration/backfill_upvotes_count_on_issues_spec.rb'
@@ -5850,7 +5726,6 @@
- './spec/lib/gitlab/background_migration/backfill_work_item_type_id_for_issues_spec.rb'
- './spec/lib/gitlab/background_migration/base_job_spec.rb'
- './spec/lib/gitlab/background_migration/batched_migration_job_spec.rb'
-- './spec/lib/gitlab/background_migration/batching_strategies/backfill_issue_work_item_type_batching_strategy_spec.rb'
- './spec/lib/gitlab/background_migration/batching_strategies/backfill_project_namespace_per_group_batching_strategy_spec.rb'
- './spec/lib/gitlab/background_migration/batching_strategies/backfill_project_statistics_with_container_registry_size_batching_strategy_spec.rb'
- './spec/lib/gitlab/background_migration/batching_strategies/base_strategy_spec.rb'
@@ -5919,7 +5794,6 @@
- './spec/lib/gitlab/backtrace_cleaner_spec.rb'
- './spec/lib/gitlab/bare_repository_import/importer_spec.rb'
- './spec/lib/gitlab/bare_repository_import/repository_spec.rb'
-- './spec/lib/gitlab/batch_pop_queueing_spec.rb'
- './spec/lib/gitlab/batch_worker_context_spec.rb'
- './spec/lib/gitlab/bitbucket_import/importer_spec.rb'
- './spec/lib/gitlab/bitbucket_import/project_creator_spec.rb'
@@ -5986,7 +5860,6 @@
- './spec/lib/gitlab/ci/badge/release/template_spec.rb'
- './spec/lib/gitlab/ci/build/artifacts/adapters/gzip_stream_spec.rb'
- './spec/lib/gitlab/ci/build/artifacts/adapters/raw_stream_spec.rb'
-- './spec/lib/gitlab/ci/build/artifacts/adapters/zip_stream_spec.rb'
- './spec/lib/gitlab/ci/build/artifacts/metadata/entry_spec.rb'
- './spec/lib/gitlab/ci/build/artifacts/metadata_spec.rb'
- './spec/lib/gitlab/ci/build/artifacts/path_spec.rb'
@@ -6160,8 +6033,6 @@
- './spec/lib/gitlab/ci/pipeline/quota/deployments_spec.rb'
- './spec/lib/gitlab/ci/pipeline/seed/build/cache_spec.rb'
- './spec/lib/gitlab/ci/pipeline/seed/build_spec.rb'
-- './spec/lib/gitlab/ci/pipeline/seed/deployment_spec.rb'
-- './spec/lib/gitlab/ci/pipeline/seed/environment_spec.rb'
- './spec/lib/gitlab/ci/pipeline/seed/pipeline_spec.rb'
- './spec/lib/gitlab/ci/pipeline/seed/processable/resource_group_spec.rb'
- './spec/lib/gitlab/ci/pipeline/seed/stage_spec.rb'
@@ -6553,7 +6424,6 @@
- './spec/lib/gitlab/discussions_diff/file_collection_spec.rb'
- './spec/lib/gitlab/discussions_diff/highlight_cache_spec.rb'
- './spec/lib/gitlab/doctor/secrets_spec.rb'
-- './spec/lib/gitlab/doorkeeper_secret_storing/pbkdf2_sha512_spec.rb'
- './spec/lib/gitlab_edition_spec.rb'
- './spec/lib/gitlab/email/attachment_uploader_spec.rb'
- './spec/lib/gitlab/email/failure_handler_spec.rb'
@@ -6613,10 +6483,6 @@
- './spec/lib/gitlab/exclusive_lease_helpers/sleeping_lock_spec.rb'
- './spec/lib/gitlab/exclusive_lease_helpers_spec.rb'
- './spec/lib/gitlab/exclusive_lease_spec.rb'
-- './spec/lib/gitlab/experimentation/controller_concern_spec.rb'
-- './spec/lib/gitlab/experimentation/experiment_spec.rb'
-- './spec/lib/gitlab/experimentation/group_types_spec.rb'
-- './spec/lib/gitlab/experimentation_spec.rb'
- './spec/lib/gitlab/experiment/rollout/feature_spec.rb'
- './spec/lib/gitlab/external_authorization/access_spec.rb'
- './spec/lib/gitlab/external_authorization/cache_spec.rb'
@@ -6665,7 +6531,6 @@
- './spec/lib/gitlab/gitaly_client_spec.rb'
- './spec/lib/gitlab/gitaly_client/storage_settings_spec.rb'
- './spec/lib/gitlab/gitaly_client/util_spec.rb'
-- './spec/lib/gitlab/gitaly_client/wiki_service_spec.rb'
- './spec/lib/gitlab/git/attributes_at_ref_parser_spec.rb'
- './spec/lib/gitlab/git/attributes_parser_spec.rb'
- './spec/lib/gitlab/git/base_error_spec.rb'
@@ -6681,7 +6546,6 @@
- './spec/lib/gitlab/git/conflict/file_spec.rb'
- './spec/lib/gitlab/git/conflict/parser_spec.rb'
- './spec/lib/gitlab/git/conflict/resolver_spec.rb'
-- './spec/lib/gitlab/git/cross_repo_comparer_spec.rb'
- './spec/lib/gitlab/git/diff_collection_spec.rb'
- './spec/lib/gitlab/git/diff_spec.rb'
- './spec/lib/gitlab/git/diff_stats_collection_spec.rb'
@@ -6774,7 +6638,6 @@
- './spec/lib/gitlab/git/user_spec.rb'
- './spec/lib/gitlab/git/util_spec.rb'
- './spec/lib/gitlab/git/wiki_page_version_spec.rb'
-- './spec/lib/gitlab/git/wiki_spec.rb'
- './spec/lib/gitlab/git/wraps_gitaly_errors_spec.rb'
- './spec/lib/gitlab/global_id/deprecations_spec.rb'
- './spec/lib/gitlab/global_id_spec.rb'
@@ -6817,7 +6680,6 @@
- './spec/lib/gitlab/graphql/pagination/connections_spec.rb'
- './spec/lib/gitlab/graphql/pagination/externally_paginated_array_connection_spec.rb'
- './spec/lib/gitlab/graphql/pagination/keyset/connection_spec.rb'
-- './spec/lib/gitlab/graphql/pagination/keyset/last_items_spec.rb'
- './spec/lib/gitlab/graphql/pagination/offset_active_record_relation_connection_spec.rb'
- './spec/lib/gitlab/graphql/present/field_extension_spec.rb'
- './spec/lib/gitlab/graphql/queries_spec.rb'
@@ -6843,13 +6705,6 @@
- './spec/lib/gitlab/health_checks/middleware_spec.rb'
- './spec/lib/gitlab/health_checks/probes/collection_spec.rb'
- './spec/lib/gitlab/health_checks/puma_check_spec.rb'
-- './spec/lib/gitlab/health_checks/redis/cache_check_spec.rb'
-- './spec/lib/gitlab/health_checks/redis/queues_check_spec.rb'
-- './spec/lib/gitlab/health_checks/redis/rate_limiting_check_spec.rb'
-- './spec/lib/gitlab/health_checks/redis/redis_check_spec.rb'
-- './spec/lib/gitlab/health_checks/redis/sessions_check_spec.rb'
-- './spec/lib/gitlab/health_checks/redis/shared_state_check_spec.rb'
-- './spec/lib/gitlab/health_checks/redis/trace_chunks_check_spec.rb'
- './spec/lib/gitlab/health_checks/server_spec.rb'
- './spec/lib/gitlab/highlight_spec.rb'
- './spec/lib/gitlab/hook_data/base_builder_spec.rb'
@@ -6895,8 +6750,6 @@
- './spec/lib/gitlab/import_export/fast_hash_serializer_spec.rb'
- './spec/lib/gitlab/import_export/file_importer_spec.rb'
- './spec/lib/gitlab/import_export/fork_spec.rb'
-- './spec/lib/gitlab/import_export/group/legacy_tree_restorer_spec.rb'
-- './spec/lib/gitlab/import_export/group/legacy_tree_saver_spec.rb'
- './spec/lib/gitlab/import_export/group/object_builder_spec.rb'
- './spec/lib/gitlab/import_export/group/relation_factory_spec.rb'
- './spec/lib/gitlab/import_export/group/relation_tree_restorer_spec.rb'
@@ -7074,7 +6927,6 @@
- './spec/lib/gitlab/memory/reports_daemon_spec.rb'
- './spec/lib/gitlab/memory/reports/jemalloc_stats_spec.rb'
- './spec/lib/gitlab/memory/watchdog_spec.rb'
-- './spec/lib/gitlab/merge_requests/commit_message_generator_spec.rb'
- './spec/lib/gitlab/merge_requests/mergeability/check_result_spec.rb'
- './spec/lib/gitlab/merge_requests/mergeability/redis_interface_spec.rb'
- './spec/lib/gitlab/merge_requests/mergeability/results_store_spec.rb'
@@ -7353,7 +7205,6 @@
- './spec/lib/gitlab/sidekiq_middleware/duplicate_jobs/strategies/until_executing_spec.rb'
- './spec/lib/gitlab/sidekiq_middleware/extra_done_log_metadata_spec.rb'
- './spec/lib/gitlab/sidekiq_middleware/instrumentation_logger_spec.rb'
-- './spec/lib/gitlab/sidekiq_middleware/memory_killer_spec.rb'
- './spec/lib/gitlab/sidekiq_middleware/monitor_spec.rb'
- './spec/lib/gitlab/sidekiq_middleware/query_analyzer_spec.rb'
- './spec/lib/gitlab/sidekiq_middleware/server_metrics_spec.rb'
@@ -7513,13 +7364,12 @@
- './spec/lib/gitlab/usage/metrics/instrumentations/uuid_metric_spec.rb'
- './spec/lib/gitlab/usage/metrics/key_path_processor_spec.rb'
- './spec/lib/gitlab/usage/metrics/names_suggestions/generator_spec.rb'
-- './spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/constraints_spec.rb'
- './spec/lib/gitlab/usage/metrics/names_suggestions/relation_parsers/joins_spec.rb'
- './spec/lib/gitlab/usage/metrics/name_suggestion_spec.rb'
- './spec/lib/gitlab/usage/metric_spec.rb'
- './spec/lib/gitlab/usage/metrics/query_spec.rb'
- './spec/lib/gitlab/usage/service_ping/instrumented_payload_spec.rb'
-- './spec/lib/gitlab/usage/service_ping/legacy_metric_timing_decorator_spec.rb'
+- './spec/lib/gitlab/usage/service_ping/legacy_metric_metadata_decorator_spec.rb'
- './spec/lib/gitlab/usage/service_ping/payload_keys_processor_spec.rb'
- './spec/lib/gitlab/usage/service_ping_report_spec.rb'
- './spec/lib/gitlab/user_access_snippet_spec.rb'
@@ -7606,8 +7456,6 @@
- './spec/lib/security/ci_configuration/sast_iac_build_action_spec.rb'
- './spec/lib/security/ci_configuration/secret_detection_build_action_spec.rb'
- './spec/lib/security/report_schema_version_matcher_spec.rb'
-- './spec/lib/serializers/json_spec.rb'
-- './spec/lib/serializers/symbolized_json_spec.rb'
- './spec/lib/serializers/unsafe_json_spec.rb'
- './spec/lib/service_ping/build_payload_spec.rb'
- './spec/lib/service_ping/devops_report_spec.rb'
@@ -7686,15 +7534,6 @@
- './spec/mailers/notify_spec.rb'
- './spec/mailers/repository_check_mailer_spec.rb'
- './spec/metrics_server/metrics_server_spec.rb'
-- './spec/migrations/20210406144743_backfill_total_tuple_count_for_batched_migrations_spec.rb'
-- './spec/migrations/20210423160427_schedule_drop_invalid_vulnerabilities_spec.rb'
-- './spec/migrations/20210430134202_copy_adoption_snapshot_namespace_spec.rb'
-- './spec/migrations/20210430135954_copy_adoption_segments_namespace_spec.rb'
-- './spec/migrations/20210503105845_add_project_value_stream_id_to_project_stages_spec.rb'
-- './spec/migrations/20210511142748_schedule_drop_invalid_vulnerabilities2_spec.rb'
-- './spec/migrations/20210514063252_schedule_cleanup_orphaned_lfs_objects_projects_spec.rb'
-- './spec/migrations/20210601073400_fix_total_stage_in_vsa_spec.rb'
-- './spec/migrations/20210601080039_group_protected_environments_add_index_and_constraint_spec.rb'
- './spec/migrations/20210603222333_remove_builds_email_service_from_services_spec.rb'
- './spec/migrations/20210610153556_delete_legacy_operations_feature_flags_spec.rb'
- './spec/migrations/2021061716138_cascade_delete_freeze_periods_spec.rb'
@@ -7706,7 +7545,6 @@
- './spec/migrations/20210805192450_update_trial_plans_ci_daily_pipeline_schedule_triggers_spec.rb'
- './spec/migrations/20210811122206_update_external_project_bots_spec.rb'
- './spec/migrations/20210812013042_remove_duplicate_project_authorizations_spec.rb'
-- './spec/migrations/20210818185845_backfill_projects_with_coverage_spec.rb'
- './spec/migrations/20210819145000_drop_temporary_columns_and_triggers_for_ci_builds_runner_session_spec.rb'
- './spec/migrations/20210831203408_upsert_base_work_item_types_spec.rb'
- './spec/migrations/20210902144144_drop_temporary_columns_and_triggers_for_ci_build_needs_spec.rb'
@@ -7803,11 +7641,9 @@
- './spec/migrations/20220801155858_schedule_disable_legacy_open_source_licence_for_recent_public_projects_spec.rb'
- './spec/migrations/20220802114351_reschedule_backfill_container_registry_size_into_project_statistics_spec.rb'
- './spec/migrations/20220802204737_remove_deactivated_user_highest_role_stats_spec.rb'
-- './spec/migrations/20220809002011_schedule_destroy_invalid_group_members_spec.rb'
- './spec/migrations/active_record/schema_spec.rb'
- './spec/migrations/add_default_project_approval_rules_vuln_allowed_spec.rb'
- './spec/migrations/add_epics_relative_position_spec.rb'
-- './spec/migrations/add_new_trail_plans_spec.rb'
- './spec/migrations/add_open_source_plan_spec.rb'
- './spec/migrations/add_premium_and_ultimate_plan_limits_spec.rb'
- './spec/migrations/add_triggers_to_integrations_type_new_spec.rb'
@@ -7816,9 +7652,7 @@
- './spec/migrations/associate_existing_dast_builds_with_variables_spec.rb'
- './spec/migrations/backfill_all_project_namespaces_spec.rb'
- './spec/migrations/backfill_cadence_id_for_boards_scoped_to_iteration_spec.rb'
-- './spec/migrations/backfill_clusters_integration_prometheus_enabled_spec.rb'
- './spec/migrations/backfill_cycle_analytics_aggregations_spec.rb'
-- './spec/migrations/backfill_escalation_policies_for_oncall_schedules_spec.rb'
- './spec/migrations/backfill_group_features_spec.rb'
- './spec/migrations/backfill_integrations_enable_ssl_verification_spec.rb'
- './spec/migrations/backfill_integrations_type_new_spec.rb'
@@ -7826,26 +7660,19 @@
- './spec/migrations/backfill_member_namespace_id_for_group_members_spec.rb'
- './spec/migrations/backfill_namespace_id_for_namespace_routes_spec.rb'
- './spec/migrations/backfill_namespace_id_for_project_routes_spec.rb'
-- './spec/migrations/backfill_nuget_temporary_packages_to_processing_status_spec.rb'
- './spec/migrations/backfill_project_import_level_spec.rb'
- './spec/migrations/backfill_project_namespaces_for_group_spec.rb'
- './spec/migrations/backfill_stage_event_hash_spec.rb'
- './spec/migrations/backfill_user_namespace_spec.rb'
- './spec/migrations/bulk_insert_cluster_enabled_grants_spec.rb'
- './spec/migrations/change_public_projects_cost_factor_spec.rb'
-- './spec/migrations/change_web_hook_events_default_spec.rb'
-- './spec/migrations/cleanup_after_add_primary_email_to_emails_if_user_confirmed_spec.rb'
- './spec/migrations/cleanup_after_fixing_issue_when_admin_changed_primary_email_spec.rb'
- './spec/migrations/cleanup_after_fixing_regression_with_new_users_emails_spec.rb'
- './spec/migrations/cleanup_backfill_integrations_enable_ssl_verification_spec.rb'
-- './spec/migrations/cleanup_move_container_registry_enabled_to_project_feature_spec.rb'
- './spec/migrations/cleanup_mr_attention_request_todos_spec.rb'
- './spec/migrations/cleanup_orphaned_routes_spec.rb'
-- './spec/migrations/clean_up_pending_builds_table_spec.rb'
- './spec/migrations/cleanup_remaining_orphan_invites_spec.rb'
- './spec/migrations/confirm_security_bot_spec.rb'
-- './spec/migrations/confirm_support_bot_user_spec.rb'
-- './spec/migrations/delete_security_findings_without_uuid_spec.rb'
- './spec/migrations/disable_expiration_policies_linked_to_no_container_images_spec.rb'
- './spec/migrations/disable_job_token_scope_when_unused_spec.rb'
- './spec/migrations/finalize_orphaned_routes_cleanup_spec.rb'
@@ -7855,23 +7682,17 @@
- './spec/migrations/fix_and_backfill_project_namespaces_for_projects_with_duplicate_name_spec.rb'
- './spec/migrations/fix_batched_migrations_old_format_job_arguments_spec.rb'
- './spec/migrations/generate_customers_dot_jwt_signing_key_spec.rb'
-- './spec/migrations/insert_ci_daily_pipeline_schedule_triggers_plan_limits_spec.rb'
-- './spec/migrations/migrate_elastic_index_settings_spec.rb'
- './spec/migrations/migrate_protected_attribute_to_pending_builds_spec.rb'
-- './spec/migrations/move_container_registry_enabled_to_project_features3_spec.rb'
- './spec/migrations/orphaned_invite_tokens_cleanup_spec.rb'
- './spec/migrations/populate_audit_event_streaming_verification_token_spec.rb'
-- './spec/migrations/populate_dismissal_information_for_vulnerabilities_spec.rb'
- './spec/migrations/populate_operation_visibility_permissions_spec.rb'
- './spec/migrations/queue_backfill_project_feature_package_registry_access_level_spec.rb'
- './spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_features_spec.rb'
- './spec/migrations/recreate_index_security_ci_builds_on_name_and_id_parser_with_new_features_spec.rb'
- './spec/migrations/remove_duplicate_dast_site_tokens_spec.rb'
- './spec/migrations/remove_duplicate_dast_site_tokens_with_same_token_spec.rb'
-- './spec/migrations/remove_hipchat_service_records_spec.rb'
- './spec/migrations/remove_invalid_integrations_spec.rb'
- './spec/migrations/remove_not_null_contraint_on_title_from_sprints_spec.rb'
-- './spec/migrations/remove_records_without_group_from_webhooks_table_spec.rb'
- './spec/migrations/remove_schedule_and_status_from_pending_alert_escalations_spec.rb'
- './spec/migrations/remove_wiki_notes_spec.rb'
- './spec/migrations/rename_services_to_integrations_spec.rb'
@@ -7883,22 +7704,17 @@
- './spec/migrations/reset_job_token_scope_enabled_spec.rb'
- './spec/migrations/reset_severity_levels_to_new_default_spec.rb'
- './spec/migrations/retry_backfill_traversal_ids_spec.rb'
-- './spec/migrations/schedule_add_primary_email_to_emails_if_user_confirmed_spec.rb'
- './spec/migrations/schedule_backfill_draft_status_on_merge_requests_corrected_regex_spec.rb'
- './spec/migrations/schedule_backfilling_the_namespace_id_for_vulnerability_reads_spec.rb'
- './spec/migrations/schedule_copy_ci_builds_columns_to_security_scans2_spec.rb'
-- './spec/migrations/schedule_disable_expiration_policies_linked_to_no_container_images_spec.rb'
- './spec/migrations/schedule_fix_incorrect_max_seats_used2_spec.rb'
- './spec/migrations/schedule_fix_incorrect_max_seats_used_spec.rb'
-- './spec/migrations/schedule_migrate_shared_vulnerability_scanners_spec.rb'
- './spec/migrations/schedule_populate_requirements_issue_id_spec.rb'
- './spec/migrations/schedule_purging_stale_security_scans_spec.rb'
- './spec/migrations/schedule_recalculate_vulnerability_finding_signatures_for_findings_spec.rb'
- './spec/migrations/schedule_security_setting_creation_spec.rb'
- './spec/migrations/schedule_set_correct_vulnerability_state_spec.rb'
- './spec/migrations/schedule_update_timelogs_null_spent_at_spec.rb'
-- './spec/migrations/schedule_update_timelogs_project_id_spec.rb'
-- './spec/migrations/schedule_update_users_where_two_factor_auth_required_from_group_spec.rb'
- './spec/migrations/set_default_job_token_scope_true_spec.rb'
- './spec/migrations/slice_merge_request_diff_commit_migrations_spec.rb'
- './spec/migrations/start_backfill_ci_queuing_tables_spec.rb'
@@ -7909,7 +7725,6 @@
- './spec/migrations/update_default_scan_method_of_dast_site_profile_spec.rb'
- './spec/migrations/update_integrations_trigger_type_new_on_insert_spec.rb'
- './spec/migrations/update_invalid_member_states_spec.rb'
-- './spec/migrations/update_invalid_web_hooks_spec.rb'
- './spec/models/ability_spec.rb'
- './spec/models/abuse_report_spec.rb'
- './spec/models/active_session_spec.rb'
@@ -7994,7 +7809,6 @@
- './spec/models/ci/daily_build_group_report_result_spec.rb'
- './spec/models/ci/deleted_object_spec.rb'
- './spec/models/ci/freeze_period_spec.rb'
-- './spec/models/ci/freeze_period_status_spec.rb'
- './spec/models/ci/group_spec.rb'
- './spec/models/ci/group_variable_spec.rb'
- './spec/models/ci/instance_variable_spec.rb'
@@ -8066,7 +7880,6 @@
- './spec/models/compare_spec.rb'
- './spec/models/concerns/access_requestable_spec.rb'
- './spec/models/concerns/after_commit_queue_spec.rb'
-- './spec/models/concerns/approvable_base_spec.rb'
- './spec/models/concerns/as_cte_spec.rb'
- './spec/models/concerns/atomic_internal_id_spec.rb'
- './spec/models/concerns/avatarable_spec.rb'
@@ -8080,13 +7893,11 @@
- './spec/models/concerns/bulk_insert_safe_spec.rb'
- './spec/models/concerns/cacheable_attributes_spec.rb'
- './spec/models/concerns/cache_markdown_field_spec.rb'
-- './spec/models/concerns/cascading_namespace_setting_attribute_spec.rb'
- './spec/models/concerns/case_sensitivity_spec.rb'
- './spec/models/concerns/checksummable_spec.rb'
- './spec/models/concerns/chronic_duration_attribute_spec.rb'
- './spec/models/concerns/ci/artifactable_spec.rb'
- './spec/models/concerns/ci/bulk_insertable_tags_spec.rb'
-- './spec/models/concerns/ci/has_deployment_name_spec.rb'
- './spec/models/concerns/ci/has_ref_spec.rb'
- './spec/models/concerns/ci/has_status_spec.rb'
- './spec/models/concerns/ci/has_variable_spec.rb'
@@ -8223,9 +8034,6 @@
- './spec/models/error_tracking/error_spec.rb'
- './spec/models/error_tracking/project_error_tracking_setting_spec.rb'
- './spec/models/event_spec.rb'
-- './spec/models/experiment_spec.rb'
-- './spec/models/experiment_subject_spec.rb'
-- './spec/models/experiment_user_spec.rb'
- './spec/models/exported_protected_branch_spec.rb'
- './spec/models/external_issue_spec.rb'
- './spec/models/external_pull_request_spec.rb'
@@ -8318,7 +8126,6 @@
- './spec/models/internal_id_spec.rb'
- './spec/models/issuable_severity_spec.rb'
- './spec/models/issue_assignee_spec.rb'
-- './spec/models/issue_collection_spec.rb'
- './spec/models/issue_email_participant_spec.rb'
- './spec/models/issue/email_spec.rb'
- './spec/models/issue_link_spec.rb'
@@ -8390,7 +8197,6 @@
- './spec/models/notification_setting_spec.rb'
- './spec/models/oauth_access_grant_spec.rb'
- './spec/models/oauth_access_token_spec.rb'
-- './spec/models/onboarding_progress_spec.rb'
- './spec/models/operations/feature_flags_client_spec.rb'
- './spec/models/operations/feature_flag_spec.rb'
- './spec/models/operations/feature_flags/strategy_spec.rb'
@@ -8566,7 +8372,6 @@
- './spec/models/users/group_callout_spec.rb'
- './spec/models/users/in_product_marketing_email_spec.rb'
- './spec/models/users/merge_request_interaction_spec.rb'
-- './spec/models/users/namespace_callout_spec.rb'
- './spec/models/user_spec.rb'
- './spec/models/users/project_callout_spec.rb'
- './spec/models/users/saved_reply_spec.rb'
@@ -8612,7 +8417,6 @@
- './spec/policies/commit_policy_spec.rb'
- './spec/policies/concerns/crud_policy_helpers_spec.rb'
- './spec/policies/concerns/policy_actor_spec.rb'
-- './spec/policies/concerns/readonly_abilities_spec.rb'
- './spec/policies/container_expiration_policy_policy_spec.rb'
- './spec/policies/custom_emoji_policy_spec.rb'
- './spec/policies/deploy_key_policy_spec.rb'
@@ -8624,7 +8428,6 @@
- './spec/policies/group_deploy_key_policy_spec.rb'
- './spec/policies/group_deploy_keys_group_policy_spec.rb'
- './spec/policies/group_member_policy_spec.rb'
-- './spec/policies/group_policy_spec.rb'
- './spec/policies/identity_provider_policy_spec.rb'
- './spec/policies/instance_metadata_policy_spec.rb'
- './spec/policies/integration_policy_spec.rb'
@@ -8962,7 +8765,6 @@
- './spec/requests/api/graphql/mutations/work_items/delete_task_spec.rb'
- './spec/requests/api/graphql/mutations/work_items/update_spec.rb'
- './spec/requests/api/graphql/mutations/work_items/update_task_spec.rb'
-- './spec/requests/api/graphql/mutations/work_items/update_widgets_spec.rb'
- './spec/requests/api/graphql/namespace/package_settings_spec.rb'
- './spec/requests/api/graphql/namespace/projects_spec.rb'
- './spec/requests/api/graphql/namespace_query_spec.rb'
@@ -9058,7 +8860,6 @@
- './spec/requests/api/import_bitbucket_server_spec.rb'
- './spec/requests/api/import_github_spec.rb'
- './spec/requests/api/integrations/jira_connect/subscriptions_spec.rb'
-- './spec/requests/api/integrations/slack/events_spec.rb'
- './spec/requests/api/integrations_spec.rb'
- './spec/requests/api/internal/base_spec.rb'
- './spec/requests/api/internal/container_registry/migration_spec.rb'
@@ -9078,7 +8879,6 @@
- './spec/requests/api/keys_spec.rb'
- './spec/requests/api/labels_spec.rb'
- './spec/requests/api/lint_spec.rb'
-- './spec/requests/api/markdown_golden_master_spec.rb'
- './spec/requests/api/markdown_snapshot_spec.rb'
- './spec/requests/api/markdown_spec.rb'
- './spec/requests/api/maven_packages_spec.rb'
@@ -9157,9 +8957,7 @@
- './spec/requests/api/users_preferences_spec.rb'
- './spec/requests/api/users_spec.rb'
- './spec/requests/api/v3/github_spec.rb'
-- './spec/requests/api/version_spec.rb'
- './spec/requests/api/wikis_spec.rb'
-- './spec/requests/boards/lists_controller_spec.rb'
- './spec/requests/concerns/planning_hierarchy_spec.rb'
- './spec/requests/content_security_policy_spec.rb'
- './spec/requests/dashboard_controller_spec.rb'
@@ -9201,8 +8999,6 @@
- './spec/requests/oauth/tokens_controller_spec.rb'
- './spec/requests/oauth_tokens_spec.rb'
- './spec/requests/openid_connect_spec.rb'
-- './spec/requests/product_analytics/collector_app_attack_spec.rb'
-- './spec/requests/product_analytics/collector_app_spec.rb'
- './spec/requests/profiles/notifications_controller_spec.rb'
- './spec/requests/projects/ci/promeheus_metrics/histograms_controller_spec.rb'
- './spec/requests/projects/cluster_agents_controller_spec.rb'
@@ -9258,7 +9054,6 @@
- './spec/requests/users_controller_spec.rb'
- './spec/requests/user_sends_malformed_strings_spec.rb'
- './spec/requests/users/group_callouts_spec.rb'
-- './spec/requests/users/namespace_callouts_spec.rb'
- './spec/requests/user_spoofs_ip_spec.rb'
- './spec/requests/users/project_callouts_spec.rb'
- './spec/requests/verifies_with_email_spec.rb'
@@ -9288,8 +9083,6 @@
- './spec/serializers/analytics_summary_serializer_spec.rb'
- './spec/serializers/base_discussion_entity_spec.rb'
- './spec/serializers/blob_entity_spec.rb'
-- './spec/serializers/board_serializer_spec.rb'
-- './spec/serializers/board_simple_entity_spec.rb'
- './spec/serializers/build_action_entity_spec.rb'
- './spec/serializers/build_artifact_entity_spec.rb'
- './spec/serializers/build_details_entity_spec.rb'
@@ -9498,7 +9291,6 @@
- './spec/services/boards/issues/move_service_spec.rb'
- './spec/services/boards/lists/create_service_spec.rb'
- './spec/services/boards/lists/destroy_service_spec.rb'
-- './spec/services/boards/lists/generate_service_spec.rb'
- './spec/services/boards/lists/list_service_spec.rb'
- './spec/services/boards/lists/move_service_spec.rb'
- './spec/services/boards/lists/update_service_spec.rb'
@@ -9528,7 +9320,6 @@
- './spec/services/chat_names/authorize_user_service_spec.rb'
- './spec/services/chat_names/find_user_service_spec.rb'
- './spec/services/ci/abort_pipelines_service_spec.rb'
-- './spec/services/ci/after_requeue_job_service_spec.rb'
- './spec/services/ci/append_build_trace_service_spec.rb'
- './spec/services/ci/archive_trace_service_spec.rb'
- './spec/services/ci/build_cancel_service_spec.rb'
@@ -9641,37 +9432,14 @@
- './spec/services/clusters/agents/refresh_authorization_service_spec.rb'
- './spec/services/clusters/agent_tokens/create_service_spec.rb'
- './spec/services/clusters/agent_tokens/track_usage_service_spec.rb'
-- './spec/services/clusters/applications/check_ingress_ip_address_service_spec.rb'
-- './spec/services/clusters/applications/check_installation_progress_service_spec.rb'
-- './spec/services/clusters/applications/check_uninstall_progress_service_spec.rb'
-- './spec/services/clusters/applications/check_upgrade_progress_service_spec.rb'
-- './spec/services/clusters/applications/create_service_spec.rb'
-- './spec/services/clusters/applications/destroy_service_spec.rb'
-- './spec/services/clusters/applications/install_service_spec.rb'
-- './spec/services/clusters/applications/patch_service_spec.rb'
-- './spec/services/clusters/applications/prometheus_config_service_spec.rb'
-- './spec/services/clusters/applications/prometheus_update_service_spec.rb'
-- './spec/services/clusters/applications/uninstall_service_spec.rb'
-- './spec/services/clusters/applications/update_service_spec.rb'
-- './spec/services/clusters/applications/upgrade_service_spec.rb'
-- './spec/services/clusters/aws/authorize_role_service_spec.rb'
-- './spec/services/clusters/aws/fetch_credentials_service_spec.rb'
-- './spec/services/clusters/aws/finalize_creation_service_spec.rb'
-- './spec/services/clusters/aws/provision_service_spec.rb'
-- './spec/services/clusters/aws/verify_provision_status_service_spec.rb'
- './spec/services/clusters/build_kubernetes_namespace_service_spec.rb'
- './spec/services/clusters/build_service_spec.rb'
- './spec/services/clusters/cleanup/project_namespace_service_spec.rb'
- './spec/services/clusters/cleanup/service_account_service_spec.rb'
- './spec/services/clusters/create_service_spec.rb'
- './spec/services/clusters/destroy_service_spec.rb'
-- './spec/services/clusters/gcp/fetch_operation_service_spec.rb'
-- './spec/services/clusters/gcp/finalize_creation_service_spec.rb'
-- './spec/services/clusters/gcp/provision_service_spec.rb'
-- './spec/services/clusters/gcp/verify_provision_status_service_spec.rb'
- './spec/services/clusters/integrations/create_service_spec.rb'
- './spec/services/clusters/integrations/prometheus_health_check_service_spec.rb'
-- './spec/services/clusters/kubernetes/configure_istio_ingress_service_spec.rb'
- './spec/services/clusters/kubernetes/create_or_update_namespace_service_spec.rb'
- './spec/services/clusters/kubernetes/create_or_update_service_account_service_spec.rb'
- './spec/services/clusters/kubernetes/fetch_kubernetes_token_service_spec.rb'
@@ -10238,14 +10006,12 @@
- './spec/services/users/destroy_service_spec.rb'
- './spec/services/users/dismiss_callout_service_spec.rb'
- './spec/services/users/dismiss_group_callout_service_spec.rb'
-- './spec/services/users/dismiss_namespace_callout_service_spec.rb'
- './spec/services/users/dismiss_project_callout_service_spec.rb'
- './spec/services/users/email_verification/generate_token_service_spec.rb'
- './spec/services/users/email_verification/validate_token_service_spec.rb'
- './spec/services/users/in_product_marketing_email_records_spec.rb'
- './spec/services/users/keys_count_service_spec.rb'
- './spec/services/users/last_push_event_service_spec.rb'
-- './spec/services/users/migrate_to_ghost_user_service_spec.rb'
- './spec/services/users/refresh_authorized_projects_service_spec.rb'
- './spec/services/users/registrations_build_service_spec.rb'
- './spec/services/users/reject_service_spec.rb'
@@ -10296,7 +10062,6 @@
- './spec/sidekiq_cluster/sidekiq_cluster_spec.rb'
- './spec/sidekiq/cron/job_gem_dependency_spec.rb'
- './spec/spam/concerns/has_spam_action_response_fields_spec.rb'
-- './spec/support_specs/database/multiple_databases_spec.rb'
- './spec/support_specs/database/prevent_cross_joins_spec.rb'
- './spec/support_specs/graphql/arguments_spec.rb'
- './spec/support_specs/graphql/field_selection_spec.rb'
@@ -10416,7 +10181,6 @@
- './spec/validators/addressable_url_validator_spec.rb'
- './spec/validators/any_field_validator_spec.rb'
- './spec/validators/array_members_validator_spec.rb'
-- './spec/validators/branch_filter_validator_spec.rb'
- './spec/validators/color_validator_spec.rb'
- './spec/validators/cron_freeze_period_timezone_validator_spec.rb'
- './spec/validators/cron_validator_spec.rb'
@@ -10510,7 +10274,6 @@
- './spec/views/profiles/notifications/show.html.haml_spec.rb'
- './spec/views/profiles/preferences/show.html.haml_spec.rb'
- './spec/views/profiles/show.html.haml_spec.rb'
-- './spec/views/projects/artifacts/_artifact.html.haml_spec.rb'
- './spec/views/projects/blob/_viewer.html.haml_spec.rb'
- './spec/views/projects/branches/index.html.haml_spec.rb'
- './spec/views/projects/commit/branches.html.haml_spec.rb'
@@ -10555,7 +10318,6 @@
- './spec/views/registrations/welcome/show.html.haml_spec.rb'
- './spec/views/search/_results.html.haml_spec.rb'
- './spec/views/search/show.html.haml_spec.rb'
-- './spec/views/shared/deploy_tokens/_form.html.haml_spec.rb'
- './spec/views/shared/groups/_dropdown.html.haml_spec.rb'
- './spec/views/shared/issuable/_sidebar.html.haml_spec.rb'
- './spec/views/shared/_label_row.html.haml_spec.rb'
@@ -10570,7 +10332,6 @@
- './spec/views/shared/projects/_project.html.haml_spec.rb'
- './spec/views/shared/runners/_runner_details.html.haml_spec.rb'
- './spec/views/shared/snippets/_snippet.html.haml_spec.rb'
-- './spec/views/shared/ssh_keys/_key_details.html.haml_spec.rb'
- './spec/views/shared/wikis/_sidebar.html.haml_spec.rb'
- './spec/workers/admin_email_worker_spec.rb'
- './spec/workers/analytics/usage_trends/counter_job_worker_spec.rb'
@@ -10635,18 +10396,12 @@
- './spec/workers/ci/track_failed_build_worker_spec.rb'
- './spec/workers/ci/update_locked_unknown_artifacts_worker_spec.rb'
- './spec/workers/cleanup_container_repository_worker_spec.rb'
-- './spec/workers/cluster_configure_istio_worker_spec.rb'
-- './spec/workers/cluster_provision_worker_spec.rb'
- './spec/workers/clusters/agents/delete_expired_events_worker_spec.rb'
- './spec/workers/clusters/applications/activate_integration_worker_spec.rb'
- './spec/workers/clusters/applications/deactivate_integration_worker_spec.rb'
-- './spec/workers/clusters/applications/wait_for_uninstall_app_worker_spec.rb'
- './spec/workers/clusters/cleanup/project_namespace_worker_spec.rb'
- './spec/workers/clusters/cleanup/service_account_worker_spec.rb'
- './spec/workers/clusters/integrations/check_prometheus_health_worker_spec.rb'
-- './spec/workers/cluster_update_app_worker_spec.rb'
-- './spec/workers/cluster_wait_for_app_update_worker_spec.rb'
-- './spec/workers/cluster_wait_for_ingress_ip_address_worker_spec.rb'
- './spec/workers/concerns/application_worker_spec.rb'
- './spec/workers/concerns/cluster_agent_queue_spec.rb'
- './spec/workers/concerns/cluster_queue_spec.rb'
@@ -10799,10 +10554,6 @@
- './spec/workers/metrics/dashboard/sync_dashboards_worker_spec.rb'
- './spec/workers/migrate_external_diffs_worker_spec.rb'
- './spec/workers/namespaces/in_product_marketing_emails_worker_spec.rb'
-- './spec/workers/namespaces/onboarding_issue_created_worker_spec.rb'
-- './spec/workers/namespaces/onboarding_pipeline_created_worker_spec.rb'
-- './spec/workers/namespaces/onboarding_progress_worker_spec.rb'
-- './spec/workers/namespaces/onboarding_user_added_worker_spec.rb'
- './spec/workers/namespaces/process_sync_events_worker_spec.rb'
- './spec/workers/namespaces/prune_aggregation_schedules_worker_spec.rb'
- './spec/workers/namespaces/root_statistics_worker_spec.rb'
@@ -10918,7 +10669,6 @@
- './spec/workers/users/create_statistics_worker_spec.rb'
- './spec/workers/users/deactivate_dormant_users_worker_spec.rb'
- './spec/workers/user_status_cleanup/batch_worker_spec.rb'
-- './spec/workers/wait_for_cluster_creation_worker_spec.rb'
- './spec/workers/web_hooks/log_destroy_worker_spec.rb'
- './spec/workers/web_hook_worker_spec.rb'
- './spec/workers/wikis/git_garbage_collect_worker_spec.rb'
diff --git a/spec/support/services/clusters/create_service_shared.rb b/spec/support/services/clusters/create_service_shared.rb
index f8a58a828ce..80fa7c58515 100644
--- a/spec/support/services/clusters/create_service_shared.rb
+++ b/spec/support/services/clusters/create_service_shared.rb
@@ -37,9 +37,7 @@ RSpec.shared_context 'invalid cluster create params' do
end
RSpec.shared_examples 'create cluster service success' do
- it 'creates a cluster object and performs a worker' do
- expect(ClusterProvisionWorker).to receive(:perform_async)
-
+ it 'creates a cluster object' do
expect { subject }
.to change { Clusters::Cluster.count }.by(1)
.and change { Clusters::Providers::Gcp.count }.by(1)
@@ -60,7 +58,6 @@ end
RSpec.shared_examples 'create cluster service error' do
it 'returns an error' do
- expect(ClusterProvisionWorker).not_to receive(:perform_async)
expect { subject }.to change { Clusters::Cluster.count }.by(0)
expect(subject.errors[:"provider_gcp.gcp_project_id"]).to be_present
end
diff --git a/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
index 2f74d3131ab..e8fc498cbf7 100644
--- a/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
+++ b/spec/support/shared_contexts/bulk_imports_requests_shared_context.rb
@@ -15,7 +15,7 @@ RSpec.shared_context 'bulk imports requests context' do |url|
let(:request_headers) { { 'Content-Type' => 'application/json' } }
before do
- stub_request(:get, "#{url}/api/v4/version?page=1&per_page=20&private_token=demo-pat")
+ stub_request(:get, "#{url}/api/v4/version?private_token=demo-pat")
.with(headers: request_headers)
.to_return(
status: 200,
diff --git a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
index ca2fe8a6c54..bf5158c9a92 100644
--- a/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
+++ b/spec/support/shared_contexts/features/integrations/integrations_shared_context.rb
@@ -40,7 +40,7 @@ Integration.available_integration_names.each do |integration|
let(:integration_attrs) do
integration_attrs_list.inject({}) do |hash, k|
- if k =~ /^(token*|.*_token|.*_key)/
+ if k =~ /^(token*|.*_token|.*_key)/ && k =~ /^[^app_store]/
hash.merge!(k => 'secrettoken')
elsif integration == 'confluence' && k == :confluence_url
hash.merge!(k => 'https://example.atlassian.net/wiki')
@@ -68,6 +68,12 @@ Integration.available_integration_names.each do |integration|
hash.merge!(k => "match_any")
elsif integration == 'campfire' && k == :room
hash.merge!(k => '1234')
+ elsif integration == 'apple_app_store' && k == :app_store_issuer_id
+ hash.merge!(k => 'aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee')
+ elsif integration == 'apple_app_store' && k == :app_store_private_key
+ hash.merge!(k => File.read('spec/fixtures/ssl_key.pem'))
+ elsif integration == 'apple_app_store' && k == :app_store_key_id
+ hash.merge!(k => 'ABC1')
else
hash.merge!(k => "someword")
end
diff --git a/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb b/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb
deleted file mode 100644
index 72e23e6d5fa..00000000000
--- a/spec/support/shared_contexts/markdown_golden_master_shared_examples.rb
+++ /dev/null
@@ -1,132 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-# See spec/fixtures/markdown/markdown_golden_master_examples.yml for documentation on how this spec works.
-RSpec.shared_context 'API::Markdown Golden Master shared context' do |markdown_yml_file_path|
- include ApiHelpers
- include WikiHelpers
-
- let_it_be(:user) { create(:user, username: 'gfm_user') }
-
- let_it_be(:group) { create(:group, :public) }
- let_it_be(:project) { create(:project, :public, :repository, group: group) }
-
- let_it_be(:label) { create(:label, project: project, title: 'bug') }
- let_it_be(:label2) { create(:label, project: project, title: 'UX bug') }
-
- let_it_be(:milestone) { create(:milestone, project: project, title: '1.1') }
- let_it_be(:issue) { create(:issue, project: project) }
- let_it_be(:merge_request) { create(:merge_request, source_project: project) }
-
- let_it_be(:project_wiki) { create(:project_wiki, project: project, user: user) }
-
- let_it_be(:project_wiki_page) { create(:wiki_page, wiki: project_wiki) }
-
- before(:all) do
- group.add_owner(user)
- project.add_maintainer(user)
- end
-
- before do
- sign_in(user)
- end
-
- markdown_examples = begin
- yaml = File.read(markdown_yml_file_path)
- YAML.safe_load(yaml, symbolize_names: true, aliases: true)
- end
-
- it "examples must be unique and alphabetized by name", :unlimited_max_formatted_output_length do
- names = markdown_examples.map { |example| example[:name] }
- expect(names).to eq(names.sort.uniq)
- end
-
- if focused_markdown_examples_string = ENV['FOCUSED_MARKDOWN_EXAMPLES']
- focused_markdown_examples = focused_markdown_examples_string.split(',').map(&:strip) || []
- markdown_examples.reject! { |markdown_example| !focused_markdown_examples.include?(markdown_example.fetch(:name)) }
- end
-
- markdown_examples.each do |markdown_example|
- name = markdown_example.fetch(:name)
- api_context = markdown_example[:api_context]
-
- if api_context && !name.end_with?("_for_#{api_context}")
- raise "Name must have suffix of '_for_#{api_context}' to the api_context"
- end
-
- context "for #{name}#{api_context ? " (api_context: #{api_context})" : ''}" do
- let(:pending_reason) do
- pending_value = markdown_example.fetch(:pending, nil)
- get_pending_reason(pending_value)
- end
-
- let(:example_markdown) { markdown_example.fetch(:markdown) }
- let(:example_html) { markdown_example.fetch(:html) }
- let(:substitutions) { markdown_example.fetch(:substitutions, {}) }
-
- it "verifies conversion of GFM to HTML", :unlimited_max_formatted_output_length do
- stub_application_setting(plantuml_enabled: true, plantuml_url: 'http://localhost:8080')
- stub_application_setting(kroki_enabled: true, kroki_url: 'http://localhost:8000')
-
- pending pending_reason if pending_reason
-
- normalized_example_html = normalize_html(example_html, substitutions)
-
- api_url = get_url_for_api_context(api_context)
-
- post api_url, params: { text: example_markdown, gfm: true }
- expect(response).to be_successful
- response_body = Gitlab::Json.parse(response.body)
- # Some requests have the HTML in the `html` key, others in the `body` key.
- response_html = response_body['body'] ? response_body.fetch('body') : response_body.fetch('html')
- normalized_response_html = normalize_html(response_html, substitutions)
-
- expect(normalized_response_html).to eq(normalized_example_html)
- end
-
- def get_pending_reason(pending_value)
- return false unless pending_value
-
- return pending_value if pending_value.is_a?(String)
-
- pending_value[:backend] || false
- end
-
- def normalize_html(html, substitutions)
- normalized_html = html.dup
- # Note: having the top level `substitutions` data structure be a hash of arrays
- # allows us to compose multiple substitutions via YAML anchors (YAML anchors
- # pointing to arrays can't be combined)
- substitutions.each_value do |substitution_entry|
- substitution_entry.each do |substitution|
- regex = substitution.fetch(:regex)
- replacement = substitution.fetch(:replacement)
- normalized_html.gsub!(%r{#{regex}}, replacement)
- end
- end
-
- normalized_html
- end
- end
- end
-
- def supported_api_contexts
- %w(project group project_wiki)
- end
-
- def get_url_for_api_context(api_context)
- case api_context
- when 'project'
- "/#{project.full_path}/preview_markdown"
- when 'group'
- "/groups/#{group.full_path}/preview_markdown"
- when 'project_wiki'
- "/#{project.full_path}/-/wikis/#{project_wiki_page.slug}/preview_markdown"
- when nil
- api "/markdown"
- else
- raise "Error: 'context' extension was '#{api_context}'. It must be one of: #{supported_api_contexts.join(',')}"
- end
- end
-end
diff --git a/spec/support/shared_contexts/navbar_structure_context.rb b/spec/support/shared_contexts/navbar_structure_context.rb
index af35a5ff068..9c7cf831241 100644
--- a/spec/support/shared_contexts/navbar_structure_context.rb
+++ b/spec/support/shared_contexts/navbar_structure_context.rb
@@ -221,3 +221,46 @@ RSpec.shared_context 'group navbar structure' do
]
end
end
+
+RSpec.shared_context 'dashboard navbar structure' do
+ let(:structure) do
+ [
+ {
+ nav_item: "Your work",
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("Projects"),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("Groups"),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("Issues"),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("Merge requests"),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("To-Do List"),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("Milestones"),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("Snippets"),
+ nav_sub_items: []
+ },
+ {
+ nav_item: _("Activity"),
+ nav_sub_items: []
+ }
+ ]
+ end
+end
diff --git a/spec/support/shared_contexts/policies/group_policy_shared_context.rb b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
index f6ac98c7669..fddcecbe125 100644
--- a/spec/support/shared_contexts/policies/group_policy_shared_context.rb
+++ b/spec/support/shared_contexts/policies/group_policy_shared_context.rb
@@ -12,7 +12,7 @@ RSpec.shared_context 'GroupPolicy context' do
let(:public_permissions) do
%i[
- read_group read_counts
+ read_group read_counts read_achievement
read_label read_issue_board_list read_milestone read_issue_board
]
end
@@ -57,6 +57,7 @@ RSpec.shared_context 'GroupPolicy context' do
create_projects
create_cluster update_cluster admin_cluster add_cluster
destroy_upload
+ admin_achievement
]
end
diff --git a/spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb b/spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb
new file mode 100644
index 00000000000..5fd0e685c67
--- /dev/null
+++ b/spec/support/shared_examples/analytics/cycle_analytics/parentable_examples.rb
@@ -0,0 +1,28 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'value stream analytics namespace models' do
+ let(:factory_name) { nil }
+
+ context 'when ProjectNamespace is given' do
+ it 'is valid' do
+ project_namespace = create(:project_namespace)
+ model = build(factory_name, namespace: project_namespace)
+
+ expect(model).to be_valid
+ expect(model.save).to be(true)
+ expect(model.namespace).to eq(project_namespace)
+ end
+ end
+
+ context 'when Namespace is given' do
+ it 'fails' do
+ namespace = create(:namespace)
+ model = build(factory_name, namespace: namespace)
+
+ expect(model).to be_invalid
+
+ error_message = s_('CycleAnalytics|the assigned object is not supported')
+ expect(model.errors.messages_for(:namespace)).to eq([error_message])
+ end
+ end
+end
diff --git a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
index 5506b05ca55..de38d1ff9f8 100644
--- a/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/githubish_import_controller_shared_examples.rb
@@ -258,7 +258,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
.to receive(:new).with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **access_params)
.and_return(double(execute: project))
- post :create, format: :json
+ post :create, params: { target_namespace: user.namespace }, format: :json
expect(response).to have_gitlab_http_status(:ok)
end
@@ -272,7 +272,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
.to receive(:new).with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **access_params)
.and_return(double(execute: project))
- post :create, format: :json
+ post :create, params: { target_namespace: user.namespace_path }, format: :json
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response['errors']).to eq('Name is invalid, Path is old')
@@ -286,7 +286,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
expect(store).to receive(:touch) { "realtime_changes_import_#{provider}_path" }
end
- post :create, format: :json
+ post :create, params: { target_namespace: user.namespace_path }, format: :json
end
context "when the repository owner is the provider user" do
@@ -296,7 +296,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
.to receive(:new).with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **access_params)
.and_return(double(execute: project))
- post :create, format: :json
+ post :create, params: { target_namespace: user.namespace_path }, format: :json
end
end
@@ -308,7 +308,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
.to receive(:new).with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **access_params)
.and_return(double(execute: project))
- post :create, format: :json
+ post :create, params: { target_namespace: user.namespace_path }, format: :json
end
end
end
@@ -333,7 +333,7 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
.to receive(:new).with(provider_repo, provider_repo[:name], existing_namespace, user, type: provider, **access_params)
.and_return(double(execute: project))
- post :create, format: :json
+ post :create, params: { target_namespace: user.namespace_path }, format: :json
end
end
@@ -345,47 +345,17 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
.to receive(:new).with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **access_params)
.and_return(double(execute: project))
- post :create, format: :json
+ post :create, params: { target_namespace: user.namespace_path }, format: :json
end
end
end
context "when a namespace with the provider user's username doesn't exist" do
context "when current user can create namespaces" do
- it "creates the namespace" do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: project))
-
- expect { post :create, params: { target_namespace: provider_repo[:name] }, format: :json }.to change { Namespace.count }.by(1)
- end
-
- it "takes the new namespace" do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, provider_repo[:name], an_instance_of(Group), user, type: provider, **access_params)
- .and_return(double(execute: project))
-
- post :create, params: { target_namespace: provider_repo[:name] }, format: :json
- end
- end
-
- context "when current user can't create namespaces" do
- before do
- user.update_attribute(:can_create_group, false)
- end
-
- it "doesn't create the namespace" do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).and_return(double(execute: project))
-
- expect { post :create, format: :json }.not_to change { Namespace.count }
- end
+ it "does not create the namespace" do
+ expect(Gitlab::LegacyGithubImport::ProjectCreator).not_to receive(:new)
- it "takes the current user's namespace" do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, provider_repo[:name], user.namespace, user, type: provider, **access_params)
- .and_return(double(execute: project))
-
- post :create, format: :json
+ expect { post :create, params: { target_namespace: provider_repo[:name] }, format: :json }.not_to change { Namespace.count }
end
end
end
@@ -405,14 +375,6 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
post :create, params: { target_namespace: test_namespace.name, new_name: test_name }, format: :json
end
-
- it 'takes the selected name and default namespace' do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, user.namespace, user, type: provider, **access_params)
- .and_return(double(execute: project))
-
- post :create, params: { new_name: test_name }, format: :json
- end
end
context 'user has chosen an existing nested namespace and name for the project' do
@@ -437,31 +399,16 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
context 'user has chosen a non-existent nested namespaces and name for the project' do
let(:test_name) { 'test_name' }
- it 'takes the selected namespace and name' do
+ it 'does not take the selected namespace and name' do
expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, type: provider, **access_params)
- .and_return(double(execute: project))
+ .not_to receive(:new)
post :create, params: { target_namespace: 'foo/bar', new_name: test_name }, format: :json
end
- it 'creates the namespaces' do
- allow(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, type: provider, **access_params)
- .and_return(double(execute: project))
-
+ it 'does not create namespaces' do
expect { post :create, params: { target_namespace: 'foo/bar', new_name: test_name }, format: :json }
- .to change { Namespace.count }.by(2)
- end
-
- it 'new namespace has the right parent' do
- allow(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, type: provider, **access_params)
- .and_return(double(execute: project))
-
- post :create, params: { target_namespace: 'foo/bar', new_name: test_name }, format: :json
-
- expect(Namespace.find_by_path_or_name('bar').parent.path).to eq('foo')
+ .not_to change { Namespace.count }
end
end
@@ -473,55 +420,25 @@ RSpec.shared_examples 'a GitHub-ish import controller: POST create' do
parent_namespace.add_owner(user)
end
- it 'takes the selected namespace and name' do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, type: provider, **access_params)
- .and_return(double(execute: project))
+ it 'does not take the selected namespace and name' do
+ expect(Gitlab::LegacyGithubImport::ProjectCreator).not_to receive(:new)
post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :json
end
- it 'creates the namespaces' do
- allow(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, type: provider, **access_params)
- .and_return(double(execute: project))
-
+ it 'does not create the namespaces' do
expect { post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :json }
- .to change { Namespace.count }.by(2)
+ .not_to change { Namespace.count }
end
it 'does not create a new namespace under the user namespace' do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, user.namespace, user, type: provider, **access_params)
- .and_return(double(execute: project))
+ expect(Gitlab::LegacyGithubImport::ProjectCreator).not_to receive(:new)
expect { post :create, params: { target_namespace: "#{user.namespace_path}/test_group", new_name: test_name }, format: :js }
.not_to change { Namespace.count }
end
end
- context 'user cannot create a subgroup inside a group is not a member of' do
- let(:test_name) { 'test_name' }
- let!(:parent_namespace) { create(:group, name: 'foo') }
-
- it 'does not take the selected namespace and name' do
- expect(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, user.namespace, user, type: provider, **access_params)
- .and_return(double(execute: project))
-
- post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :js
- end
-
- it 'does not create the namespaces' do
- allow(Gitlab::LegacyGithubImport::ProjectCreator)
- .to receive(:new).with(provider_repo, test_name, kind_of(Namespace), user, type: provider, **access_params)
- .and_return(double(execute: project))
-
- expect { post :create, params: { target_namespace: 'foo/foobar/bar', new_name: test_name }, format: :js }
- .not_to change { Namespace.count }
- end
- end
-
context 'user can use a group without having permissions to create a group' do
let(:test_name) { 'test_name' }
let!(:group) { create(:group, name: 'foo') }
diff --git a/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
index 02915206cc5..446bc4cd92f 100644
--- a/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/issuables_list_metadata_shared_examples.rb
@@ -42,6 +42,10 @@ RSpec.shared_examples 'issuables list meta-data' do |issuable_type, action = nil
let(:result_issuable) { issuables.first }
let(:search) { result_issuable.title }
+ before do
+ stub_application_setting(search_rate_limit: 0, search_rate_limit_unauthenticated: 0)
+ end
+
# .simple_sorts is the same across all Sortable classes
sorts = ::Issue.simple_sorts.keys + %w[popularity priority label_priority]
sorts.each do |sort|
diff --git a/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb b/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb
index 20edca1ee9f..b34038ca0e4 100644
--- a/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb
+++ b/spec/support/shared_examples/controllers/rate_limited_endpoint_shared_examples.rb
@@ -5,7 +5,9 @@
# - current_user
# - error_message # optional
-RSpec.shared_examples 'rate limited endpoint' do |rate_limit_key:|
+RSpec.shared_examples 'rate limited endpoint' do |rate_limit_key:, graphql: false|
+ let(:error_message) { _('This endpoint has been requested too many times. Try again later.') }
+
context 'when rate limiter enabled', :freeze_time, :clean_gitlab_redis_rate_limiting do
let(:expected_logger_attributes) do
{
@@ -25,8 +27,6 @@ RSpec.shared_examples 'rate limited endpoint' do |rate_limit_key:|
end
end
- let(:error_message) { _('This endpoint has been requested too many times. Try again later.') }
-
before do
allow(Gitlab::ApplicationRateLimiter).to receive(:threshold).with(rate_limit_key).and_return(1)
end
@@ -37,12 +37,16 @@ RSpec.shared_examples 'rate limited endpoint' do |rate_limit_key:|
request
request
- expect(response).to have_gitlab_http_status(:too_many_requests)
+ if graphql
+ expect_graphql_errors_to_include(error_message)
+ else
+ expect(response).to have_gitlab_http_status(:too_many_requests)
- if example.metadata[:type] == :controller
- expect(response.body).to eq(error_message)
- else # it is API spec
- expect(response.body).to eq({ message: { error: error_message } }.to_json)
+ if response.content_type == 'application/json' # it is API spec
+ expect(response.body).to eq({ message: { error: error_message } }.to_json)
+ else
+ expect(response.body).to eq(error_message)
+ end
end
end
end
@@ -57,7 +61,11 @@ RSpec.shared_examples 'rate limited endpoint' do |rate_limit_key:|
request
- expect(response).not_to have_gitlab_http_status(:too_many_requests)
+ if graphql
+ expect_graphql_errors_to_be_empty
+ else
+ expect(response).not_to have_gitlab_http_status(:too_many_requests)
+ end
end
end
end
diff --git a/spec/support/shared_examples/features/code_highlight_shared_examples.rb b/spec/support/shared_examples/features/code_highlight_shared_examples.rb
new file mode 100644
index 00000000000..3917ac9b489
--- /dev/null
+++ b/spec/support/shared_examples/features/code_highlight_shared_examples.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'code highlight' do
+ include PreferencesHelper
+
+ let_it_be(:current_user) { user }
+ let_it_be(:scheme_class) { user_color_scheme }
+
+ it 'has highlighted code', :js do
+ wait_for_requests
+ expect(subject).to have_selector(".js-syntax-highlight.#{scheme_class}")
+ end
+end
diff --git a/spec/support/shared_examples/features/content_editor_shared_examples.rb b/spec/support/shared_examples/features/content_editor_shared_examples.rb
index efdf7513b2d..6cd9c4ce1c4 100644
--- a/spec/support/shared_examples/features/content_editor_shared_examples.rb
+++ b/spec/support/shared_examples/features/content_editor_shared_examples.rb
@@ -4,7 +4,8 @@ RSpec.shared_examples 'edits content using the content editor' do
let(:content_editor_testid) { '[data-testid="content-editor"] [contenteditable].ProseMirror' }
def switch_to_content_editor
- find('[data-testid="toggle-editing-mode-button"] label', text: 'Rich text').click
+ click_button _('View rich text')
+ click_button _('Rich text')
end
def type_in_content_editor(keys)
diff --git a/spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb b/spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb
new file mode 100644
index 00000000000..efbd735c451
--- /dev/null
+++ b/spec/support/shared_examples/features/dashboard/sidebar_shared_examples.rb
@@ -0,0 +1,21 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples "a dashboard page with sidebar" do |page_path, menu_label|
+ before do
+ sign_in(user)
+ visit send(page_path)
+ end
+
+ let(:sidebar_css) { "aside.nav-sidebar[aria-label=\"Your work\"]" }
+ let(:active_menu_item_css) { "li.active[data-track-label=\"#{menu_label}_menu\"]" }
+
+ it "shows the \"Your work\" sidebar" do
+ expect(page).to have_css(sidebar_css)
+ end
+
+ it "shows the correct sidebar menu item as active" do
+ within(sidebar_css) do
+ expect(page).to have_css(active_menu_item_css)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/features/reportable_note_shared_examples.rb b/spec/support/shared_examples/features/reportable_note_shared_examples.rb
index c35f711111b..9d859403465 100644
--- a/spec/support/shared_examples/features/reportable_note_shared_examples.rb
+++ b/spec/support/shared_examples/features/reportable_note_shared_examples.rb
@@ -36,7 +36,7 @@ RSpec.shared_examples 'reportable note' do |type|
dropdown.click_link('Report abuse to administrator')
expect(find('#user_name')['value']).to match(note.author.username)
- expect(find('#abuse_report_message')['value']).to match(noteable_note_url(note))
+ expect(find('#abuse_report_reported_from_url')['value']).to match(noteable_note_url(note))
end
def open_dropdown(dropdown)
diff --git a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
index 9d1f05d5543..6f4072ba762 100644
--- a/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
+++ b/spec/support/shared_examples/finders/issues_finder_shared_examples.rb
@@ -550,6 +550,24 @@ RSpec.shared_examples 'issues or work items finder' do |factory, execute_context
expect(items).to contain_exactly(item1, item4, item5)
end
end
+
+ context 'using OR' do
+ let(:params) { { or: { label_name: [label.title, label2.title].join(',') } } }
+
+ it 'returns items that have at least one of the given labels' do
+ expect(items).to contain_exactly(item2, item3)
+ end
+
+ context 'when feature flag is disabled' do
+ before do
+ stub_feature_flags(or_issuable_queries: false)
+ end
+
+ it 'does not add any filter' do
+ expect(items).to contain_exactly(item1, item2, item3, item4, item5)
+ end
+ end
+ end
end
context 'filtering by a label that includes any or none in the title' do
diff --git a/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
index 0d0dbb112de..19ceb465383 100644
--- a/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
+++ b/spec/support/shared_examples/graphql/types/merge_request_interactions_type_shared_examples.rb
@@ -14,7 +14,10 @@ RSpec.shared_examples "a user type with merge request interaction type" do
name
username
email
+ emails
publicEmail
+ commitEmail
+ namespaceCommitEmails
avatarUrl
webUrl
webPath
diff --git a/spec/support/shared_examples/lib/sidebars/your_work/menus/menu_item_examples.rb b/spec/support/shared_examples/lib/sidebars/your_work/menus/menu_item_examples.rb
new file mode 100644
index 00000000000..19c94a3ba5b
--- /dev/null
+++ b/spec/support/shared_examples/lib/sidebars/your_work/menus/menu_item_examples.rb
@@ -0,0 +1,38 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.shared_examples 'menu item shows pill based on count' do |count|
+ describe '#has_pill?' do
+ context 'when count is zero' do
+ it 'returns false' do
+ allow(user).to receive(count).and_return(0)
+ expect(subject.has_pill?).to eq false
+ end
+ end
+
+ context 'when count is larger than zero' do
+ it 'returns true' do
+ allow(user).to receive(count).and_return(3)
+ expect(subject.has_pill?).to eq true
+ end
+ end
+ end
+
+ describe '#pill_count' do
+ it "returns the #{count} of the user" do
+ allow(user).to receive(count).and_return(123)
+ expect(subject.pill_count).to eq 123
+ end
+
+ it 'memoizes the query' do
+ subject.pill_count
+
+ control = ActiveRecord::QueryRecorder.new do
+ subject.pill_count
+ end
+
+ expect(control.count).to eq 0
+ end
+ end
+end
diff --git a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
index a20bb794095..f98be12523d 100644
--- a/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/counter_attribute_shared_examples.rb
@@ -14,13 +14,14 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
counter_attributes.each do |attribute|
describe attribute do
describe '#increment_counter', :redis do
- let(:increment) { 10 }
+ let(:amount) { 10 }
+ let(:increment) { Gitlab::Counters::Increment.new(amount: amount) }
let(:counter_key) { model.counter(attribute).key }
subject { model.increment_counter(attribute, increment) }
context 'when attribute is a counter attribute' do
- where(:increment) { [10, -3] }
+ where(:amount) { [10, -3] }
with_them do
it 'increments the counter in Redis and logs it' do
@@ -29,8 +30,8 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
message: 'Increment counter attribute',
attribute: attribute,
project_id: model.project_id,
- increment: increment,
- new_counter_value: 0 + increment,
+ increment: amount,
+ new_counter_value: 0 + amount,
current_db_value: model.read_attribute(attribute),
'correlation_id' => an_instance_of(String),
'meta.feature_category' => 'test',
@@ -42,7 +43,7 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
Gitlab::Redis::SharedState.with do |redis|
counter = redis.get(counter_key)
- expect(counter).to eq(increment.to_s)
+ expect(counter).to eq(amount.to_s)
end
end
@@ -59,8 +60,8 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
end
end
- context 'when increment is 0' do
- let(:increment) { 0 }
+ context 'when increment amount is 0' do
+ let(:amount) { 0 }
it 'does nothing' do
expect(FlushCounterIncrementsWorker).not_to receive(:perform_in)
@@ -71,37 +72,49 @@ RSpec.shared_examples_for CounterAttribute do |counter_attributes|
end
end
end
- end
- end
- describe '#reset_counter!' do
- let(:attribute) { counter_attributes.first }
- let(:counter_key) { model.counter(attribute).key }
+ describe '#bulk_increment_counter', :redis do
+ let(:increments) { [Gitlab::Counters::Increment.new(amount: 10), Gitlab::Counters::Increment.new(amount: 5)] }
+ let(:total_amount) { increments.sum(&:amount) }
+ let(:counter_key) { model.counter(attribute).key }
- before do
- model.update!(attribute => 123)
- model.increment_counter(attribute, 10)
- end
+ subject { model.bulk_increment_counter(attribute, increments) }
- subject { model.reset_counter!(attribute) }
+ context 'when attribute is a counter attribute' do
+ it 'increments the counter in Redis and logs it' do
+ expect(Gitlab::AppLogger).to receive(:info).with(
+ hash_including(
+ message: 'Increment counter attribute',
+ attribute: attribute,
+ project_id: model.project_id,
+ increment: total_amount,
+ new_counter_value: 0 + total_amount,
+ current_db_value: model.read_attribute(attribute),
+ 'correlation_id' => an_instance_of(String),
+ 'meta.feature_category' => 'test',
+ 'meta.caller_id' => 'caller'
+ )
+ )
- it 'resets the attribute value to 0 and clears existing counter', :aggregate_failures do
- expect { subject }.to change { model.reload.send(attribute) }.from(123).to(0)
+ subject
- Gitlab::Redis::SharedState.with do |redis|
- key_exists = redis.exists?(counter_key)
- expect(key_exists).to be_falsey
- end
- end
+ Gitlab::Redis::SharedState.with do |redis|
+ counter = redis.get(counter_key)
+ expect(counter).to eq(total_amount.to_s)
+ end
+ end
- it_behaves_like 'obtaining lease to update database' do
- context 'when the execution raises error' do
- before do
- allow(model).to receive(:update!).and_raise(StandardError, 'Something went wrong')
- end
+ it 'does not increment the counter for the record' do
+ expect { subject }.not_to change { model.reset.read_attribute(attribute) }
+ end
- it 'reraises error' do
- expect { subject }.to raise_error(StandardError, 'Something went wrong')
+ it 'schedules a worker to flush counter increments asynchronously' do
+ expect(FlushCounterIncrementsWorker).to receive(:perform_in)
+ .with(Gitlab::Counters::BufferedCounter::WORKER_DELAY, model.class.name, model.id, attribute)
+ .and_call_original
+
+ subject
+ end
end
end
end
diff --git a/spec/support/shared_examples/models/concerns/integrations/reset_secret_fields_shared_examples.rb b/spec/support/shared_examples/models/concerns/integrations/reset_secret_fields_shared_examples.rb
index 873f858e432..c51a6c4f6fd 100644
--- a/spec/support/shared_examples/models/concerns/integrations/reset_secret_fields_shared_examples.rb
+++ b/spec/support/shared_examples/models/concerns/integrations/reset_secret_fields_shared_examples.rb
@@ -42,7 +42,7 @@ RSpec.shared_examples Integrations::ResetSecretFields do
# Treat values as persisted
integration.reset_updated_properties
- integration.instance_variable_set('@old_data_fields', nil) if integration.supports_data_fields?
+ integration.instance_variable_set(:@old_data_fields, nil) if integration.supports_data_fields?
end
context 'when an exposing field has changed' do
diff --git a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
index 457ee49938f..5eeefacdeb9 100644
--- a/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
+++ b/spec/support/shared_examples/models/cycle_analytics_stage_shared_examples.rb
@@ -25,7 +25,7 @@ RSpec.shared_examples 'value stream analytics stage' do
stage = described_class.new(valid_params.except(:parent))
expect(stage).to be_invalid
- expect(stage.errors[parent_name]).to include("can't be blank")
+ expect(stage.errors[parent_name]).to include('must exist')
end
it 'validates presence of start_event_identifier' do
diff --git a/spec/support/shared_examples/models/member_shared_examples.rb b/spec/support/shared_examples/models/member_shared_examples.rb
index f8cff5c5558..7159c55e303 100644
--- a/spec/support/shared_examples/models/member_shared_examples.rb
+++ b/spec/support/shared_examples/models/member_shared_examples.rb
@@ -207,16 +207,14 @@ RSpec.shared_examples_for "member creation" do
source.request_access(user)
end
- it 'does not add the requester as a regular member', :aggregate_failures do
+ it 'adds the requester as a member', :aggregate_failures do
expect(source.users).not_to include(user)
- expect(source.requesters.exists?(user_id: user)).to be_truthy
+ expect(source.requesters.exists?(user_id: user)).to eq(true)
- expect do
- described_class.add_member(source, user, :maintainer)
- end.to raise_error(Gitlab::Access::AccessDeniedError)
+ described_class.add_member(source, user, :maintainer)
- expect(source.users.reload).not_to include(user)
- expect(source.requesters.reload.exists?(user_id: user)).to be_truthy
+ expect(source.users.reload).to include(user)
+ expect(source.requesters.reload.exists?(user_id: user)).to eq(false)
end
end
diff --git a/spec/support/shared_examples/models/members_notifications_shared_example.rb b/spec/support/shared_examples/models/members_notifications_shared_example.rb
index e74aab95e46..e28220334ac 100644
--- a/spec/support/shared_examples/models/members_notifications_shared_example.rb
+++ b/spec/support/shared_examples/models/members_notifications_shared_example.rb
@@ -51,7 +51,7 @@ RSpec.shared_examples 'members notifications' do |entity_type|
it "calls NotificationService.new_#{entity_type}_member" do
expect(notification_service).to receive(:"new_#{entity_type}_member").with(member)
- member.accept_request
+ member.accept_request(create(:user))
end
end
diff --git a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
index b8d12a6da59..2b46c8c8fb9 100644
--- a/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
+++ b/spec/support/shared_examples/models/relative_positioning_shared_examples.rb
@@ -20,6 +20,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
let(:new_item) { create_item(relative_position: nil) }
let(:set_size) { RelativePositioning.mover.context(item1).scoped_items.count }
+ let(:items_with_nil_position_sample_quantity) { 101 }
def create_item(params = {})
create(factory, params.merge(default_params))
@@ -163,7 +164,7 @@ RSpec.shared_examples 'a class that supports relative positioning' do
end
it 'can move many nulls' do
- nils = create_items_with_positions([nil] * 101)
+ nils = create_items_with_positions([nil] * items_with_nil_position_sample_quantity)
described_class.move_nulls_to_end(nils)
diff --git a/spec/support/shared_examples/models/resource_event_shared_examples.rb b/spec/support/shared_examples/models/resource_event_shared_examples.rb
index 80806ee768a..8cab2de076d 100644
--- a/spec/support/shared_examples/models/resource_event_shared_examples.rb
+++ b/spec/support/shared_examples/models/resource_event_shared_examples.rb
@@ -161,3 +161,15 @@ RSpec.shared_examples 'a resource event for merge requests' do
end
end
end
+
+RSpec.shared_examples 'a note for work item resource event' do
+ let_it_be(:user) { create(:user) }
+ let_it_be(:project) { create(:project) }
+ let_it_be(:work_item) { create(:work_item, :task, project: project, author: user) }
+
+ it 'builds synthetic note with correct synthetic_note_class' do
+ event = build(described_class.name.underscore.to_sym, issue: work_item)
+
+ expect(event.work_item_synthetic_system_note.class.name).to eq(event.synthetic_note_class.name)
+ end
+end
diff --git a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
index eb742921d35..5aaa93aecef 100644
--- a/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
+++ b/spec/support/shared_examples/models/update_project_statistics_shared_examples.rb
@@ -60,8 +60,11 @@ RSpec.shared_examples 'UpdateProjectStatistics' do |with_counter_attribute|
end
it 'stores pending increments for async update' do
+ expected_increment = have_attributes(amount: delta, ref: subject.id)
+
expect(ProjectStatistics)
.to receive(:increment_statistic)
+ .with(project, project_statistics_name, expected_increment)
.and_call_original
subject.write_attribute(statistic_attribute, read_attribute + delta)
@@ -108,11 +111,8 @@ RSpec.shared_examples 'UpdateProjectStatistics' do |with_counter_attribute|
end
context 'when it is destroyed from the project level' do
- it 'does not update the project statistics' do
- expect(ProjectStatistics)
- .not_to receive(:increment_statistic)
-
- expect(Projects::DestroyService.new(project, project.first_owner).execute).to eq(true)
+ it 'does not store pending increments for async update' do
+ expect { Projects::DestroyService.new(project, project.first_owner).execute }.not_to change { read_pending_increment }
end
it 'does not schedule a namespace statistics worker' do
diff --git a/spec/support/shared_examples/namespaces/members.rb b/spec/support/shared_examples/namespaces/members.rb
new file mode 100644
index 00000000000..ed1ea23226c
--- /dev/null
+++ b/spec/support/shared_examples/namespaces/members.rb
@@ -0,0 +1,13 @@
+# frozen_string_literal: true
+
+RSpec.shared_examples 'query without source filters' do
+ it do
+ expect(subject.where_values_hash.keys).not_to include('source_id', 'source_type')
+ end
+end
+
+RSpec.shared_examples 'query with source filters' do
+ it do
+ expect(subject.where_values_hash.keys).to include('source_id', 'source_type')
+ end
+end
diff --git a/spec/support/shared_examples/observability/csp_shared_examples.rb b/spec/support/shared_examples/observability/csp_shared_examples.rb
index 868d7023d14..0cd211f69eb 100644
--- a/spec/support/shared_examples/observability/csp_shared_examples.rb
+++ b/spec/support/shared_examples/observability/csp_shared_examples.rb
@@ -2,9 +2,17 @@
# Verifies that the proper CSP rules for Observabilty UI are applied to a given controller/path
#
-# The path under test needs to be declared with `let(:tested_path) { .. }` in the context including this example
+# It requires the following variables declared in the context including this example:
+#
+# - `tested_path`: the path under test
+# - `user`: the test user
+# - `group`: the test group
+#
+# e.g.
#
# ```
+# let_it_be(:group) { create(:group) }
+# let_it_be(:user) { create(:user) }
# it_behaves_like "observability csp policy" do
# let(:tested_path) { ....the path under test }
# end
@@ -33,6 +41,9 @@ RSpec.shared_examples 'observability csp policy' do |controller_class = describe
before do
setup_csp_for_controller(controller_class, csp, any_time: true)
+ group.add_developer(user)
+ login_as(user)
+ allow(Gitlab::Observability).to receive(:observability_enabled?).and_return(true)
end
subject do
@@ -48,6 +59,40 @@ RSpec.shared_examples 'observability csp policy' do |controller_class = describe
end
end
+ context 'when observability is disabled' do
+ let(:csp) do
+ ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.frame_src 'https://something.test'
+ end
+ end
+
+ before do
+ allow(Gitlab::Observability).to receive(:observability_enabled?).and_return(false)
+ end
+
+ it 'does not add observability urls to the csp header' do
+ expect(subject).to include("frame-src https://something.test")
+ expect(subject).not_to include("#{observability_url} #{signin_url} #{oauth_url}")
+ end
+ end
+
+ context 'when checking if observability is enabled' do
+ let(:csp) do
+ ActionDispatch::ContentSecurityPolicy.new do |p|
+ p.frame_src 'https://something.test'
+ end
+ end
+
+ it 'check access for a given user and group' do
+ allow(Gitlab::Observability).to receive(:observability_enabled?)
+
+ get tested_path
+
+ expect(Gitlab::Observability).to have_received(:observability_enabled?)
+ .with(user, group).at_least(:once)
+ end
+ end
+
context 'when frame-src exists in the CSP config' do
let(:csp) do
ActionDispatch::ContentSecurityPolicy.new do |p|
diff --git a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
index ca6536444fd..d8690356f81 100644
--- a/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issuable/close_quick_action_shared_examples.rb
@@ -12,10 +12,10 @@ RSpec.shared_examples 'close quick action' do |issuable_type|
before do
case issuable_type
when :merge_request
- visit public_send('namespace_project_new_merge_request_path', project.namespace, project, new_url_opts)
+ visit public_send(:namespace_project_new_merge_request_path, project.namespace, project, new_url_opts)
wait_for_all_requests
when :issue
- visit public_send('new_namespace_project_issue_path', project.namespace, project, new_url_opts)
+ visit public_send(:new_namespace_project_issue_path, project.namespace, project, new_url_opts)
wait_for_all_requests
end
end
diff --git a/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb b/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb
index 5167d27f8b9..3f1a98ca08e 100644
--- a/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb
+++ b/spec/support/shared_examples/quick_actions/issue/promote_to_incident_quick_action_shared_examples.rb
@@ -36,5 +36,33 @@ RSpec.shared_examples 'promote_to_incident quick action' do
expect(page).to have_content('Could not apply promote_to_incident command')
end
end
+
+ context 'on issue creation' do
+ it 'promotes issue to incident' do
+ visit new_project_issue_path(project)
+ fill_in('Title', with: 'Title')
+ fill_in('Description', with: '/promote_to_incident')
+ click_button('Create issue')
+
+ wait_for_all_requests
+
+ expect(page).to have_content("Incident created just now by #{user.name}")
+ end
+
+ context 'when incident is selected for issue type' do
+ it 'promotes issue to incident' do
+ visit new_project_issue_path(project)
+ fill_in('Title', with: 'Title')
+ find('.js-issuable-type-filter-dropdown-wrap').click
+ click_link('Incident')
+ fill_in('Description', with: '/promote_to_incident')
+ click_button('Create issue')
+
+ wait_for_all_requests
+
+ expect(page).to have_content("Incident created just now by #{user.name}")
+ end
+ end
+ end
end
end
diff --git a/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb b/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
index d4479e462af..d4af9e570d1 100644
--- a/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/graphql/issue_list_shared_examples.rb
@@ -17,6 +17,11 @@ RSpec.shared_examples 'graphql issue list request spec' do
end
describe 'filters' do
+ before_all do
+ issue_a.assignee_ids = current_user.id
+ issue_b.assignee_ids = another_user.id
+ end
+
context 'when filtering by assignees' do
context 'when both assignee_username filters are provided' do
let(:issue_filter_params) do
@@ -44,12 +49,30 @@ RSpec.shared_examples 'graphql issue list request spec' do
end
context 'when filtering by unioned arguments' do
- let(:issue_filter_params) { { or: { assignee_usernames: [current_user.username, another_user.username] } } }
+ context 'when filtering by assignees' do
+ let(:issue_filter_params) { { or: { assignee_usernames: [current_user.username, another_user.username] } } }
- it 'returns correctly filtered issues' do
- post_query
+ it 'returns correctly filtered issues' do
+ post_query
- expect(issue_ids).to match_array(expected_unioned_assignee_issues.map { |i| i.to_gid.to_s })
+ expect(issue_ids).to match_array([issue_a, issue_b].map { |i| i.to_gid.to_s })
+ end
+ end
+
+ context 'when filtering by labels' do
+ let_it_be(:label_a) { create(:label, project: issue_a.project) }
+ let_it_be(:label_b) { create(:label, project: issue_b.project) }
+
+ let(:issue_filter_params) { { or: { label_names: [label_a.title, label_b.title] } } }
+
+ it 'returns correctly filtered issues' do
+ issue_a.label_ids = label_a.id
+ issue_b.label_ids = label_b.id
+
+ post_graphql(query, current_user: current_user)
+
+ expect(issue_ids).to match_array([issue_a, issue_b].map { |i| i.to_gid.to_s })
+ end
end
context 'when argument is blank' do
@@ -63,6 +86,8 @@ RSpec.shared_examples 'graphql issue list request spec' do
end
context 'when feature flag is disabled' do
+ let(:issue_filter_params) { { or: { assignee_usernames: [current_user.username] } } }
+
it 'returns an error' do
stub_feature_flags(or_issuable_queries: false)
diff --git a/spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb b/spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb
index 9f67bd69560..fcde3b65b4f 100644
--- a/spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/issuable_search_shared_examples.rb
@@ -34,3 +34,35 @@ RSpec.shared_examples 'issuable anonymous search' do
end
end
end
+
+RSpec.shared_examples 'issuable API rate-limited search' do
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit do
+ let(:current_user) { user }
+
+ def request
+ get api(url, current_user), params: { scope: 'all', search: issuable.title }
+ end
+ end
+
+ it_behaves_like 'rate limited endpoint', rate_limit_key: :search_rate_limit_unauthenticated do
+ def request
+ get api(url), params: { scope: 'all', search: issuable.title }
+ end
+ end
+
+ context 'when rate_limit_issuable_searches is disabled', :freeze_time, :clean_gitlab_redis_rate_limiting do
+ before do
+ stub_feature_flags(rate_limit_issuable_searches: false)
+
+ allow(Gitlab::ApplicationRateLimiter).to receive(:threshold)
+ .with(:search_rate_limit_unauthenticated).and_return(1)
+ end
+
+ it 'does not enforce the rate limit' do
+ get api(url), params: { scope: 'all', search: issuable.title }
+ get api(url), params: { scope: 'all', search: issuable.title }
+
+ expect(response).to have_gitlab_http_status(:ok)
+ end
+ end
+end
diff --git a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
index 290bf58fb6b..17d8b9c7fab 100644
--- a/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_endpoints_shared_examples.rb
@@ -1,6 +1,11 @@
# frozen_string_literal: true
-RSpec.shared_examples 'handling nuget service requests' do |anonymous_requests_example_name: 'process nuget service index request', anonymous_requests_status: :success|
+RSpec.shared_examples 'handling nuget service requests' do |example_names_with_status: {}|
+ anonymous_requests_example_name = example_names_with_status.fetch(:anonymous_requests_example_name, 'process nuget service index request')
+ anonymous_requests_status = example_names_with_status.fetch(:anonymous_requests_status, :success)
+ guest_requests_example_name = example_names_with_status.fetch(:guest_requests_example_name, 'rejects nuget packages access')
+ guest_requests_status = example_names_with_status.fetch(:guest_requests_status, :forbidden)
+
subject { get api(url) }
context 'with valid target' do
@@ -18,7 +23,7 @@ RSpec.shared_examples 'handling nuget service requests' do |anonymous_requests_e
'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :guest | true | true | guest_requests_example_name | guest_requests_status
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
@@ -54,7 +59,7 @@ RSpec.shared_examples 'handling nuget service requests' do |anonymous_requests_e
'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget service index request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :guest | true | true | guest_requests_example_name | guest_requests_status
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
@@ -90,9 +95,14 @@ RSpec.shared_examples 'handling nuget service requests' do |anonymous_requests_e
it_behaves_like 'rejects nuget access with invalid target id'
end
-RSpec.shared_examples 'handling nuget metadata requests with package name' do |anonymous_requests_example_name: 'process nuget metadata request at package name level', anonymous_requests_status: :success|
+RSpec.shared_examples 'handling nuget metadata requests with package name' do |example_names_with_status: {}|
include_context 'with expected presenters dependency groups'
+ anonymous_requests_example_name = example_names_with_status.fetch(:anonymous_requests_example_name, 'process nuget metadata request at package name level')
+ anonymous_requests_status = example_names_with_status.fetch(:anonymous_requests_status, :success)
+ guest_requests_example_name = example_names_with_status.fetch(:guest_requests_example_name, 'rejects nuget packages access')
+ guest_requests_status = example_names_with_status.fetch(:guest_requests_status, :forbidden)
+
let_it_be(:package_name) { 'Dummy.Package' }
let_it_be(:packages) { create_list(:nuget_package, 5, :with_metadatum, name: package_name, project: project) }
let_it_be(:tags) { packages.each { |pkg| create(:packages_tag, package: pkg, name: 'test') } }
@@ -117,7 +127,7 @@ RSpec.shared_examples 'handling nuget metadata requests with package name' do |a
'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget metadata request at package name level' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :guest | true | true | guest_requests_example_name | guest_requests_status
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
@@ -152,9 +162,14 @@ RSpec.shared_examples 'handling nuget metadata requests with package name' do |a
end
end
-RSpec.shared_examples 'handling nuget metadata requests with package name and package version' do |anonymous_requests_example_name: 'process nuget metadata request at package name and package version level', anonymous_requests_status: :success|
+RSpec.shared_examples 'handling nuget metadata requests with package name and package version' do |example_names_with_status: {}|
include_context 'with expected presenters dependency groups'
+ anonymous_requests_example_name = example_names_with_status.fetch(:anonymous_requests_example_name, 'process nuget metadata request at package name and package version level')
+ anonymous_requests_status = example_names_with_status.fetch(:anonymous_requests_status, :success)
+ guest_requests_example_name = example_names_with_status.fetch(:guest_requests_example_name, 'rejects nuget packages access')
+ guest_requests_status = example_names_with_status.fetch(:guest_requests_status, :forbidden)
+
let_it_be(:package_name) { 'Dummy.Package' }
let_it_be(:package) { create(:nuget_package, :with_metadatum, name: package_name, project: project) }
let_it_be(:tag) { create(:packages_tag, package: package, name: 'test') }
@@ -179,7 +194,7 @@ RSpec.shared_examples 'handling nuget metadata requests with package name and pa
'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget metadata request at package name and package version level' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :guest | true | true | guest_requests_example_name | guest_requests_status
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
@@ -214,7 +229,12 @@ RSpec.shared_examples 'handling nuget metadata requests with package name and pa
it_behaves_like 'rejects nuget access with invalid target id'
end
-RSpec.shared_examples 'handling nuget search requests' do |anonymous_requests_example_name: 'process nuget search request', anonymous_requests_status: :success|
+RSpec.shared_examples 'handling nuget search requests' do |example_names_with_status: {}|
+ anonymous_requests_example_name = example_names_with_status.fetch(:anonymous_requests_example_name, 'process nuget search request')
+ anonymous_requests_status = example_names_with_status.fetch(:anonymous_requests_status, :success)
+ guest_requests_example_name = example_names_with_status.fetch(:guest_requests_example_name, 'rejects nuget packages access')
+ guest_requests_status = example_names_with_status.fetch(:guest_requests_status, :forbidden)
+
let_it_be(:package_a) { create(:nuget_package, :with_metadatum, name: 'Dummy.PackageA', project: project) }
let_it_be(:tag) { create(:packages_tag, package: package_a, name: 'test') }
let_it_be(:packages_b) { create_list(:nuget_package, 5, name: 'Dummy.PackageB', project: project) }
@@ -244,7 +264,7 @@ RSpec.shared_examples 'handling nuget search requests' do |anonymous_requests_ex
'PUBLIC' | :guest | false | false | 'rejects nuget packages access' | :unauthorized
'PUBLIC' | :anonymous | false | true | anonymous_requests_example_name | anonymous_requests_status
'PRIVATE' | :developer | true | true | 'process nuget search request' | :success
- 'PRIVATE' | :guest | true | true | 'rejects nuget packages access' | :forbidden
+ 'PRIVATE' | :guest | true | true | guest_requests_example_name | guest_requests_status
'PRIVATE' | :developer | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :guest | true | false | 'rejects nuget packages access' | :unauthorized
'PRIVATE' | :developer | false | true | 'rejects nuget packages access' | :not_found
diff --git a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
index bace570e47a..3abe545db59 100644
--- a/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
+++ b/spec/support/shared_examples/requests/api/nuget_packages_shared_examples.rb
@@ -379,6 +379,26 @@ RSpec.shared_examples 'process nuget search request' do |user_type, status, add_
end
end
+RSpec.shared_examples 'process empty nuget search request' do |user_type, status, add_member = true|
+ before do
+ target.send("add_#{user_type}", user) if add_member && user_type != :anonymous
+ end
+
+ it_behaves_like 'returning response status', status
+
+ it 'returns a valid json response' do
+ subject
+
+ expect(response.media_type).to eq('application/json')
+ expect(json_response).to be_a(Hash)
+ expect(json_response).to match_schema('public_api/v4/packages/nuget/search')
+ expect(json_response['totalHits']).to eq(0)
+ expect(json_response['data'].map { |e| e['versions'].size }).to be_empty
+ end
+
+ it_behaves_like 'a package tracking event', 'API::NugetPackages', 'search_package'
+end
+
RSpec.shared_examples 'rejects nuget access with invalid target id' do
context 'with a target id with invalid integers' do
using RSpec::Parameterized::TableSyntax
diff --git a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
index 12f2b5d78a5..e47ff2fcd59 100644
--- a/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_links/create_links_shared_examples.rb
@@ -30,7 +30,7 @@ RSpec.shared_examples 'issuable link creation' do
context 'when user has no permission to target issuable' do
let(:params) do
- { issuable_references: [guest_issuable.to_reference(issuable_parent)] }
+ { issuable_references: [restricted_issuable.to_reference(issuable_parent)] }
end
it 'returns error' do
diff --git a/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb b/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
index cc170c6544d..1532e870dcc 100644
--- a/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
+++ b/spec/support/shared_examples/services/issuable_links/destroyable_issuable_links_shared_examples.rb
@@ -1,10 +1,11 @@
# frozen_string_literal: true
-RSpec.shared_examples 'a destroyable issuable link' do
+RSpec.shared_examples 'a destroyable issuable link' do |required_role: :reporter|
context 'when successfully removes an issuable link' do
before do
- issuable_link.source.resource_parent.add_reporter(user)
- issuable_link.target.resource_parent.add_reporter(user)
+ [issuable_link.target, issuable_link.source].each do |issuable|
+ issuable.resource_parent.try(:"add_#{required_role}", user)
+ end
end
it 'removes related issue' do
diff --git a/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb b/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
index 8a937303711..8cc71230ba4 100644
--- a/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
+++ b/spec/support/shared_examples/services/repositories/housekeeping_shared_examples.rb
@@ -65,12 +65,9 @@ RSpec.shared_examples 'housekeeps repository' do
# At push 200
expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :gc, :the_lease_key, :the_uuid)
.once
- # At push 50, 100, 150
- expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :full_repack, :the_lease_key, :the_uuid)
- .exactly(3).times
- # At push 10, 20, ... (except those above)
+ # At push 10, 20, ... (except the gc call)
expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid)
- .exactly(16).times
+ .exactly(19).times
201.times do
subject.increment!
@@ -79,37 +76,6 @@ RSpec.shared_examples 'housekeeps repository' do
expect(resource.pushes_since_gc).to eq(1)
end
-
- context 'when optimized_repository feature flag is disabled' do
- before do
- stub_feature_flags(optimized_housekeeping: false)
- end
-
- it 'calls also the garbage collect worker with pack_refs every 6 commits' do
- allow(subject).to receive(:try_obtain_lease).and_return(:the_uuid)
- allow(subject).to receive(:lease_key).and_return(:the_lease_key)
-
- # At push 200
- expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :gc, :the_lease_key, :the_uuid)
- .once
- # At push 50, 100, 150
- expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :full_repack, :the_lease_key, :the_uuid)
- .exactly(3).times
- # At push 10, 20, ... (except those above)
- expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :incremental_repack, :the_lease_key, :the_uuid)
- .exactly(16).times
- # At push 6, 12, 18, ... (except those above)
- expect(resource.git_garbage_collect_worker_klass).to receive(:perform_async).with(resource.id, :pack_refs, :the_lease_key, :the_uuid)
- .exactly(27).times
-
- 201.times do
- subject.increment!
- subject.execute if subject.needed?
- end
-
- expect(resource.pushes_since_gc).to eq(1)
- end
- end
end
it 'runs the task specifically requested' do
@@ -136,15 +102,11 @@ RSpec.shared_examples 'housekeeps repository' do
expect(subject.needed?).to eq(true)
end
- context 'when optimized_housekeeping is disabled' do
- before do
- stub_feature_flags(optimized_housekeeping: false)
- end
+ it 'when incremental repack period is not multiple of gc period' do
+ allow(Gitlab::CurrentSettings).to receive(:housekeeping_incremental_repack_period).and_return(12)
+ allow(resource).to receive(:pushes_since_gc).and_return(200)
- it 'returns true pack refs is needed' do
- allow(resource).to receive(:pushes_since_gc).and_return(described_class::PACK_REFS_PERIOD)
- expect(subject.needed?).to eq(true)
- end
+ expect(subject.needed?).to eq(true)
end
end
diff --git a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
index 105c4247ff7..716be8c6210 100644
--- a/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
+++ b/spec/support/shared_examples/services/security/ci_configuration/create_service_shared_examples.rb
@@ -88,6 +88,68 @@ RSpec.shared_examples_for 'services security ci configuration create service' do
end
end
+ context 'when existing ci config contains anchors/aliases' do
+ let(:params) { {} }
+ let(:unsupported_yaml) do
+ <<-YAML
+ image: python:latest
+
+ cache: &global_cache
+ key: 'common-cache'
+ paths:
+ - .cache/pip
+ - venv/
+
+ test:
+ cache:
+ <<: *global_cache
+ key: 'custom-cache'
+ script:
+ - python setup.py test
+ - pip install tox flake8 # you can also use tox
+ - tox -e py36,flake8
+ YAML
+ end
+
+ it 'fails with error' do
+ expect(project).to receive(:ci_config_for).and_return(unsupported_yaml)
+
+ expect { result }.to raise_error(Gitlab::Graphql::Errors::MutationError, '.gitlab-ci.yml with aliases/anchors is not supported. Please change the CI configuration manually.')
+ end
+ end
+
+ context 'when parsing existing ci config gives a Psych error' do
+ let(:params) { {} }
+ let(:invalid_yaml) do
+ <<-YAML
+ image: python:latest
+
+ test:
+ script:
+ - python setup.py test
+ - pip install tox flake8 # you can also use tox
+ - tox -e py36,flake8
+ YAML
+ end
+
+ it 'fails with error' do
+ expect(project).to receive(:ci_config_for).and_return(invalid_yaml)
+ expect(YAML).to receive(:safe_load).and_raise(Psych::Exception)
+
+ expect { result }.to raise_error(Gitlab::Graphql::Errors::MutationError, /merge request creation mutation failed/)
+ end
+ end
+
+ context 'when parsing existing ci config gives any other error' do
+ let(:params) { {} }
+ let_it_be(:repository) { project.repository }
+
+ it 'is successful' do
+ expect(repository).to receive(:root_ref_sha).and_raise(StandardError)
+ expect(result.status).to eq(:success)
+ end
+ end
+
unless skip_w_params
context 'with parameters' do
let(:params) { non_empty_params }
diff --git a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
index 09ebc495e61..8ec955940c0 100644
--- a/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
+++ b/spec/support/shared_examples/workers/batched_background_migration_worker_shared_examples.rb
@@ -256,6 +256,7 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
Class.new(Gitlab::BackgroundMigration::BatchedMigrationJob) do
job_arguments :matching_status
operation_name :update_all
+ feature_category :code_review_workflow
def perform
each_sub_batch(
@@ -325,16 +326,16 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
stub_const('Gitlab::BackgroundMigration::ExampleDataMigration', migration_class)
end
- shared_examples 'batched background migration execution' do
- subject(:full_migration_run) do
- # process all batches, then do an extra execution to mark the job as finished
- (number_of_batches + 1).times do
- described_class.new.perform
+ subject(:full_migration_run) do
+ # process all batches, then do an extra execution to mark the job as finished
+ (number_of_batches + 1).times do
+ described_class.new.perform
- travel_to((migration.interval + described_class::INTERVAL_VARIANCE).seconds.from_now)
- end
+ travel_to((migration.interval + described_class::INTERVAL_VARIANCE).seconds.from_now)
end
+ end
+ shared_examples 'batched background migration execution' do
it 'marks the migration record as finished' do
expect { full_migration_run }.to change { migration.reload.status }.from(1).to(3) # active -> finished
end
@@ -404,6 +405,15 @@ RSpec.shared_examples 'it runs batched background migration jobs' do |tracking_d
end
it_behaves_like 'batched background migration execution'
+
+ it 'assigns proper feature category to the context and the worker' do
+ expected_feature_category = migration_class.feature_category.to_s
+
+ expect { full_migration_run }.to change {
+ Gitlab::ApplicationContext.current["meta.feature_category"]
+ }.to(expected_feature_category)
+ .and change { described_class.get_feature_category }.from(:database).to(expected_feature_category)
+ end
end
context 'when parallel execution is enabled', :sidekiq_inline do
diff --git a/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
index 503e331ea2e..ba1bdfa7aa8 100644
--- a/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
+++ b/spec/support/shared_examples/workers/concerns/git_garbage_collect_methods_shared_examples.rb
@@ -24,19 +24,6 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
subject.perform(*params)
end
-
- context 'when optimized_housekeeping feature is disabled' do
- before do
- stub_feature_flags(optimized_housekeeping: false)
- end
-
- specify do
- expect(subject).to receive(:get_gitaly_client).with(task, repository.raw_repository).and_return(repository_service)
- expect(repository_service).to receive(gitaly_task)
-
- subject.perform(*params)
- end
- end
end
shared_examples 'it updates the resource statistics' do
@@ -91,20 +78,6 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
expect { subject.perform(*params) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
end
-
- context 'when optimized_housekeeping feature flag is disabled' do
- before do
- stub_feature_flags(optimized_housekeeping: false)
- end
-
- it 'handles gRPC errors' do
- allow_next_instance_of(Gitlab::GitalyClient::RepositoryService, repository.raw_repository) do |instance|
- allow(instance).to receive(:garbage_collect).and_raise(GRPC::NotFound)
- end
-
- expect { subject.perform(*params) }.to raise_exception(Gitlab::Git::Repository::NoRepository)
- end
- end
end
context 'with different lease than the active one' do
@@ -161,51 +134,6 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
end
end
- context 'repack_full' do
- let(:task) { :full_repack }
- let(:gitaly_task) { :repack_full }
-
- before do
- expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
- end
-
- it_behaves_like 'it calls Gitaly'
- it_behaves_like 'it updates the resource statistics' if update_statistics
- end
-
- context 'pack_refs' do
- let(:task) { :pack_refs }
- let(:gitaly_task) { :pack_refs }
-
- before do
- expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
- end
-
- it_behaves_like 'it calls Gitaly' do
- let(:repository_service) { instance_double(Gitlab::GitalyClient::RefService) }
- end
-
- it 'does not update the resource statistics' do
- expect(statistics_service_klass).not_to receive(:new)
-
- subject.perform(*params)
- end
- end
-
- context 'repack_incremental' do
- let(:task) { :incremental_repack }
- let(:gitaly_task) { :repack_incremental }
-
- before do
- expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
-
- statistics_keys.delete(:repository_size)
- end
-
- it_behaves_like 'it calls Gitaly'
- it_behaves_like 'it updates the resource statistics' if update_statistics
- end
-
context 'prune' do
before do
expect(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
@@ -219,41 +147,5 @@ RSpec.shared_examples 'can collect git garbage' do |update_statistics: true|
subject.perform(resource.id, 'prune', lease_key, lease_uuid)
end
end
-
- shared_examples 'gc tasks' do
- before do
- allow(subject).to receive(:get_lease_uuid).and_return(lease_uuid)
- allow(subject).to receive(:bitmaps_enabled?).and_return(bitmaps_enabled)
-
- stub_feature_flags(optimized_housekeeping: false)
- end
-
- it 'cleans up repository after finishing' do
- expect(resource).to receive(:cleanup).and_call_original
-
- subject.perform(resource.id, 'gc', lease_key, lease_uuid)
- end
-
- it 'prune calls garbage_collect with the option prune: true' do
- repository_service = instance_double(Gitlab::GitalyClient::RepositoryService)
-
- expect(subject).to receive(:get_gitaly_client).with(:prune, repository.raw_repository).and_return(repository_service)
- expect(repository_service).to receive(:garbage_collect).with(bitmaps_enabled, prune: true)
-
- subject.perform(resource.id, 'prune', lease_key, lease_uuid)
- end
- end
-
- context 'with bitmaps enabled' do
- let(:bitmaps_enabled) { true }
-
- include_examples 'gc tasks'
- end
-
- context 'with bitmaps disabled' do
- let(:bitmaps_enabled) { false }
-
- include_examples 'gc tasks'
- end
end
end
diff --git a/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb b/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
index c50dc6d5372..9b7183a9eac 100644
--- a/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
+++ b/spec/support/shared_examples/workers/update_repository_move_shared_examples.rb
@@ -1,6 +1,10 @@
# frozen_string_literal: true
RSpec.shared_examples 'an update storage move worker' do
+ it 'has the `until_executed` deduplicate strategy' do
+ expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
+ end
+
describe '#perform' do
let(:service) { double(:update_repository_storage_service) }
diff --git a/spec/support/tmpdir.rb b/spec/support/tmpdir.rb
new file mode 100644
index 00000000000..ea8e26d2878
--- /dev/null
+++ b/spec/support/tmpdir.rb
@@ -0,0 +1,18 @@
+# frozen_string_literal: true
+
+module TmpdirHelper
+ def mktmpdir
+ @tmpdir_helper_dirs ||= []
+ @tmpdir_helper_dirs << Dir.mktmpdir
+ @tmpdir_helper_dirs.last
+ end
+
+ def self.included(base)
+ base.after do
+ if @tmpdir_helper_dirs
+ FileUtils.rm_rf(@tmpdir_helper_dirs)
+ @tmpdir_helper_dirs = nil
+ end
+ end
+ end
+end
diff --git a/spec/support_specs/license_metadata_tags_spec.rb b/spec/support_specs/license_metadata_tags_spec.rb
new file mode 100644
index 00000000000..f89a32574c7
--- /dev/null
+++ b/spec/support_specs/license_metadata_tags_spec.rb
@@ -0,0 +1,31 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+# These specs only make sense if ee/spec/spec_helper is loaded
+# In FOSS_ONLY=1 mode, nothing should happen
+RSpec.describe 'license metadata tags', feature_category: :sm_provisioning, if: Gitlab.ee? do
+ it 'applies the without_license metadata tag by default' do |example|
+ expect(example.metadata[:without_license]).to eq(true)
+ end
+
+ it 'does not apply the with_license metadata tag by default' do |example|
+ expect(example.metadata[:with_license]).to be_nil
+ end
+
+ it 'does not have a current license' do
+ expect(License.current).to be_nil
+ end
+
+ context 'with with_license tag', :with_license do
+ it 'has a current license' do
+ expect(License.current).to be_present
+ end
+ end
+
+ context 'with without_license tag', :without_license do
+ it 'does not have a current license' do
+ expect(License.current).to be_nil
+ end
+ end
+end
diff --git a/spec/tasks/cache/clear/redis_spec.rb b/spec/tasks/cache/clear/redis_spec.rb
index e8c62bbe124..9b6ea3891d9 100644
--- a/spec/tasks/cache/clear/redis_spec.rb
+++ b/spec/tasks/cache/clear/redis_spec.rb
@@ -2,13 +2,16 @@
require 'rake_helper'
-RSpec.describe 'clearing redis cache', :clean_gitlab_redis_cache, :silence_stdout do
+RSpec.describe 'clearing redis cache', :clean_gitlab_redis_repository_cache, :clean_gitlab_redis_cache,
+ :silence_stdout, feature_category: :redis do
before do
Rake.application.rake_require 'tasks/cache'
end
+ let(:keys_size_changed) { -1 }
+
shared_examples 'clears the cache' do
- it { expect { run_rake_task('cache:clear:redis') }.to change { redis_keys.size }.by(-1) }
+ it { expect { run_rake_task('cache:clear:redis') }.to change { redis_keys.size }.by(keys_size_changed) }
end
describe 'clearing pipeline status cache' do
@@ -17,15 +20,37 @@ RSpec.describe 'clearing redis cache', :clean_gitlab_redis_cache, :silence_stdou
create(:ci_pipeline, project: project).project.pipeline_status
end
- before do
- allow(pipeline_status).to receive(:loaded).and_return(nil)
- end
+ context 'when use_primary_and_secondary_stores_for_repository_cache MultiStore FF is enabled' do
+ # Initially, project:{id}:pipeline_status is explicitly cached in Gitlab::Redis::Cache, whereas repository is
+ # cached in Rails.cache (which is a NullStore).
+ # With the MultiStore feature flag enabled, we use Gitlab::Redis::RepositoryCache instance as primary store and
+ # Gitlab::Redis::Cache as secondary store.
+ # This ends up storing 2 extra keys (exists? and root_ref) in both Gitlab::Redis::RepositoryCache and
+ # Gitlab::Redis::Cache instances when loading project.pipeline_status
+ let(:keys_size_changed) { -3 }
+
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: true)
+ allow(pipeline_status).to receive(:loaded).and_return(nil)
+ end
+
+ it 'clears pipeline status cache' do
+ expect { run_rake_task('cache:clear:redis') }.to change { pipeline_status.has_cache? }
+ end
- it 'clears pipeline status cache' do
- expect { run_rake_task('cache:clear:redis') }.to change { pipeline_status.has_cache? }
+ it_behaves_like 'clears the cache'
end
- it_behaves_like 'clears the cache'
+ context 'when use_primary_and_secondary_stores_for_repository_cache and
+ use_primary_store_as_default_for_repository_cache feature flags are disabled' do
+ before do
+ stub_feature_flags(use_primary_and_secondary_stores_for_repository_cache: false)
+ stub_feature_flags(use_primary_store_as_default_for_repository_cache: false)
+ allow(pipeline_status).to receive(:loaded).and_return(nil)
+ end
+
+ it_behaves_like 'clears the cache'
+ end
end
describe 'clearing set caches' do
diff --git a/spec/tasks/gitlab/backup_rake_spec.rb b/spec/tasks/gitlab/backup_rake_spec.rb
index dc74f25db87..972851cba8c 100644
--- a/spec/tasks/gitlab/backup_rake_spec.rb
+++ b/spec/tasks/gitlab/backup_rake_spec.rb
@@ -2,7 +2,7 @@
require 'rake_helper'
-RSpec.describe 'gitlab:app namespace rake task', :delete do
+RSpec.describe 'gitlab:app namespace rake task', :delete, feature_category: :backup_restore do
let(:enable_registry) { true }
let(:backup_restore_pid_path) { "#{Rails.application.root}/tmp/backup_restore.pid" }
let(:backup_tasks) { %w[db repo uploads builds artifacts pages lfs terraform_state registry packages] }
diff --git a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
index b03e964ce87..9cdbf8539c6 100644
--- a/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
+++ b/spec/tasks/gitlab/db/decomposition/rollback/bump_ci_sequences_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:db:decomposition:rollback:bump_ci_sequences', :silence_stdout,
- :suppress_gitlab_schemas_validate_connection do
+ :suppress_gitlab_schemas_validate_connection, feature_category: :pods do
before :all do
Rake.application.rake_require 'tasks/gitlab/db/decomposition/rollback/bump_ci_sequences'
diff --git a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
index e3155d3c377..a0a99b65767 100644
--- a/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
+++ b/spec/tasks/gitlab/db/lock_writes_rake_spec.rb
@@ -2,8 +2,8 @@
require 'rake_helper'
-RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_record_base,
- :suppress_gitlab_schemas_validate_connection do
+RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_record_base, :delete,
+ :suppress_gitlab_schemas_validate_connection, feature_category: :pods do
before :all do
Rake.application.rake_require 'active_record/railties/databases'
Rake.application.rake_require 'tasks/seed_fu'
@@ -14,10 +14,10 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
Rake::Task.define_task :environment
end
- let!(:project) { create(:project) }
- let!(:ci_build) { create(:ci_build) }
let(:main_connection) { ApplicationRecord.connection }
let(:ci_connection) { Ci::ApplicationRecord.connection }
+ let!(:user) { create(:user) }
+ let!(:ci_build) { create(:ci_build) }
let(:detached_partition_table) { '_test_gitlab_main_part_20220101' }
@@ -37,10 +37,23 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
drop_after: Time.current
)
end
+ end
+
+ after do
+ run_rake_task('gitlab:db:unlock_writes')
+ end
- allow(Gitlab::Database::GitlabSchema).to receive(:table_schema).and_call_original
- allow(Gitlab::Database::GitlabSchema).to receive(:table_schema)
- .with(detached_partition_table).and_return(:gitlab_main)
+ after(:all) do
+ drop_detached_partition_sql = <<~SQL
+ DROP TABLE IF EXISTS gitlab_partitions_dynamic._test_gitlab_main_part_20220101
+ SQL
+
+ ApplicationRecord.connection.execute(drop_detached_partition_sql)
+ Ci::ApplicationRecord.connection.execute(drop_detached_partition_sql)
+
+ Gitlab::Database::SharedModel.using_connection(ApplicationRecord.connection) do
+ Postgresql::DetachedPartition.delete_all
+ end
end
context 'single database' do
@@ -60,7 +73,7 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
it 'will be still able to modify tables that belong to the main two schemas' do
run_rake_task('gitlab:db:lock_writes')
expect do
- Project.last.touch
+ User.last.touch
Ci::Build.last.touch
end.not_to raise_error
end
@@ -81,7 +94,7 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
context 'when locking writes' do
it 'still allows writes on the tables with the correct connections' do
- Project.update_all(updated_at: Time.now)
+ User.update_all(updated_at: Time.now)
Ci::Build.update_all(updated_at: Time.now)
end
@@ -90,7 +103,7 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
connections.each do |connection|
Gitlab::Database::SharedModel.using_connection(connection) do
LooseForeignKeys::DeletedRecord.create!(
- fully_qualified_table_name: "public.projects",
+ fully_qualified_table_name: "public.users",
primary_key_value: 1,
cleanup_attempts: 0
)
@@ -101,8 +114,8 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
it 'prevents writes on the main tables on the ci database' do
run_rake_task('gitlab:db:lock_writes')
expect do
- ci_connection.execute("delete from projects")
- end.to raise_error(ActiveRecord::StatementInvalid, /Table: "projects" is write protected/)
+ ci_connection.execute("delete from users")
+ end.to raise_error(ActiveRecord::StatementInvalid, /Table: "users" is write protected/)
end
it 'prevents writes on the ci tables on the main database' do
@@ -135,7 +148,7 @@ RSpec.describe 'gitlab:db:lock_writes', :silence_stdout, :reestablished_active_r
it 'allows writes on the main tables on the ci database' do
run_rake_task('gitlab:db:lock_writes')
expect do
- ci_connection.execute("delete from projects")
+ ci_connection.execute("delete from users")
end.not_to raise_error
end
diff --git a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
index e95c2e241a8..a7ced4a69f3 100644
--- a/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
+++ b/spec/tasks/gitlab/db/truncate_legacy_tables_rake_spec.rb
@@ -3,7 +3,7 @@
require 'rake_helper'
RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablished_active_record_base,
- :suppress_gitlab_schemas_validate_connection do
+ :suppress_gitlab_schemas_validate_connection, feature_category: :pods do
let(:main_connection) { ApplicationRecord.connection }
let(:ci_connection) { Ci::ApplicationRecord.connection }
let(:test_gitlab_main_table) { '_test_gitlab_main_table' }
@@ -56,14 +56,16 @@ RSpec.describe 'gitlab:db:truncate_legacy_tables', :silence_stdout, :reestablish
Gitlab::Database::LockWritesManager.new(
table_name: test_gitlab_ci_table,
connection: main_connection,
- database_name: "main"
+ database_name: "main",
+ with_retries: false
).lock_writes
# Locking main table on the ci database
Gitlab::Database::LockWritesManager.new(
table_name: test_gitlab_main_table,
connection: ci_connection,
- database_name: "ci"
+ database_name: "ci",
+ with_retries: false
).lock_writes
end
diff --git a/spec/tasks/gitlab/db_rake_spec.rb b/spec/tasks/gitlab/db_rake_spec.rb
index 22abfc33d1b..7671c65d22c 100644
--- a/spec/tasks/gitlab/db_rake_spec.rb
+++ b/spec/tasks/gitlab/db_rake_spec.rb
@@ -22,7 +22,7 @@ RSpec.describe 'gitlab:db namespace rake task', :silence_stdout do
describe 'mark_migration_complete' do
context 'with a single database' do
- let(:main_model) { ActiveRecord::Base }
+ let(:main_model) { ApplicationRecord }
before do
skip_if_multiple_databases_are_setup
diff --git a/spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb b/spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb
new file mode 100644
index 00000000000..85f71da8c97
--- /dev/null
+++ b/spec/tasks/gitlab/security/update_banned_ssh_keys_rake_spec.rb
@@ -0,0 +1,56 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+# We need to load the constants here, or else stubbed
+# constants will be overwritten when `require 'git'`
+# is hit in the rake task.
+require 'git'
+
+RSpec.describe 'gitlab:security namespace rake tasks', :silence_stdout, feature_category: :security do
+ let(:fixture_path) { Rails.root.join('spec/fixtures/tasks/gitlab/security') }
+ let(:output_file) { File.join(__dir__, 'tmp/banned_keys_test.yml') }
+ let(:git_url) { 'https://github.com/rapid7/ssh-badkeys.git' }
+ let(:mock_git) { class_double('Git') }
+
+ subject(:execute) { run_rake_task('gitlab:security:update_banned_ssh_keys', git_url, output_file) }
+
+ before do
+ Rake.application.rake_require 'tasks/gitlab/security/update_banned_ssh_keys'
+ stub_const('Git', mock_git)
+ allow(Dir).to receive(:mktmpdir).and_return(fixture_path)
+ allow(mock_git).to receive(:clone)
+ end
+
+ around do |example|
+ test_dir = File.dirname(output_file)
+ FileUtils.mkdir_p(test_dir)
+
+ example.run
+
+ FileUtils.rm_rf(test_dir)
+ end
+
+ it 'adds banned keys when clone is successful' do
+ expect(mock_git).to receive(:clone).with(git_url, 'ssh-badkeys', path: fixture_path)
+
+ execute
+
+ actual = File.read(output_file)
+ expected = File.read(File.join(fixture_path, 'expected_banned_keys.yml'))
+ expect(actual).to eq(expected)
+ end
+
+ it 'exits when clone fails' do
+ expect(mock_git).to receive(:clone).with(git_url, 'ssh-badkeys', path: fixture_path).and_raise(RuntimeError)
+
+ expect { execute }.to raise_error(SystemExit)
+ end
+
+ it 'exits when max config size reaches' do
+ stub_const('MAX_CONFIG_SIZE', 0.bytes)
+ expect(mock_git).to receive(:clone).with(git_url, 'ssh-badkeys', path: fixture_path)
+
+ expect { execute }.to output(/banned_ssh_keys.yml has grown too large - halting execution/).to_stdout
+ end
+end
diff --git a/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb b/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
new file mode 100644
index 00000000000..e0390d2aa09
--- /dev/null
+++ b/spec/tasks/gitlab/seed/runner_fleet_rake_spec.rb
@@ -0,0 +1,43 @@
+# frozen_string_literal: true
+
+require 'rake_helper'
+
+RSpec.describe 'gitlab:seed:runner_fleet rake task', :silence_stdout, feature_category: :runner_fleet do
+ let(:registration_prefix) { 'rf-' }
+ let(:runner_count) { 10 }
+ let(:job_count) { 20 }
+ let(:task_params) { [username, registration_prefix, runner_count, job_count] }
+ let(:runner_releases_url) do
+ ::Gitlab::CurrentSettings.current_application_settings.public_runner_releases_url
+ end
+
+ before do
+ Rake.application.rake_require('tasks/gitlab/seed/runner_fleet')
+
+ WebMock.stub_request(:get, runner_releases_url).to_return(
+ body: '[]',
+ status: 200,
+ headers: { 'Content-Type' => 'application/json' }
+ )
+ end
+
+ subject(:rake_task) { run_rake_task('gitlab:seed:runner_fleet', task_params) }
+
+ context 'with admin username', :enable_admin_mode do
+ let(:username) { 'runner_fleet_seed' }
+ let!(:admin) { create(:user, :admin, username: username) }
+
+ it 'performs runner fleet seed successfully' do
+ expect { rake_task }
+ .to change { Group.count }.by(6)
+ .and change { Project.count }.by(3)
+ .and change { Ci::Runner.count }.by(runner_count)
+ .and change { Ci::Runner.instance_type.count }.by(1)
+ .and change { Ci::Build.count }.by(job_count)
+
+ expect(Group.search(registration_prefix).count).to eq 6
+ expect(Project.search(registration_prefix).count).to eq 3
+ expect(Ci::Runner.search(registration_prefix).count).to eq runner_count
+ end
+ end
+end
diff --git a/spec/tooling/danger/specs_spec.rb b/spec/tooling/danger/specs_spec.rb
index dcc1f592062..422923827a8 100644
--- a/spec/tooling/danger/specs_spec.rb
+++ b/spec/tooling/danger/specs_spec.rb
@@ -245,15 +245,16 @@ RSpec.describe Tooling::Danger::Specs, feature_category: :tooling do
" let_it_be(:user) { create(:user) }",
" end",
" describe 'GET \"time_summary\"' do",
- " end"
- ]
- end
-
- let(:matching_lines) do
- [
- "+ RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController, feature_category: :planning_analytics do",
- "+RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do",
- "+ RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do"
+ " end",
+ " \n",
+ "RSpec.describe Projects :aggregate_failures,",
+ " feature_category: planning_analytics do",
+ " \n",
+ "RSpec.describe Epics :aggregate_failures,",
+ " ee: true do",
+ "\n",
+ "RSpec.describe Issues :aggregate_failures,",
+ " feature_category: :team_planning do"
]
end
@@ -264,14 +265,24 @@ RSpec.describe Tooling::Danger::Specs, feature_category: :tooling do
"+ let_it_be(:user) { create(:user) }",
"- end",
"+ describe 'GET \"time_summary\"' do",
- "+ RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do"
+ "+ RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do",
+ "+RSpec.describe Projects :aggregate_failures,",
+ "+ feature_category: planning_analytics do",
+ "+RSpec.describe Epics :aggregate_failures,",
+ "+ ee: true do",
+ "+RSpec.describe Issues :aggregate_failures,"
]
end
+ before do
+ allow(specs.helper).to receive(:changed_lines).with(filename).and_return(changed_lines)
+ end
+
it 'adds suggestions at the correct lines', :aggregate_failures do
[
{ suggested_line: "RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", number: 5 },
- { suggested_line: " RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", number: 10 }
+ { suggested_line: " RSpec.describe Projects::Analytics::CycleAnalytics::SummaryController do", number: 10 },
+ { suggested_line: "RSpec.describe Epics :aggregate_failures,", number: 19 }
].each do |test_case|
comment = format(template, suggested_line: test_case[:suggested_line])
diff --git a/spec/tooling/danger/user_types_spec.rb b/spec/tooling/danger/user_types_spec.rb
deleted file mode 100644
index 53556601212..00000000000
--- a/spec/tooling/danger/user_types_spec.rb
+++ /dev/null
@@ -1,56 +0,0 @@
-# frozen_string_literal: true
-
-require 'gitlab-dangerfiles'
-require 'gitlab/dangerfiles/spec_helper'
-require_relative '../../../tooling/danger/user_types'
-
-RSpec.describe Tooling::Danger::UserTypes, feature_category: :subscription_cost_management do
- include_context 'with dangerfile'
-
- let(:fake_danger) { DangerSpecHelper.fake_danger.include(described_class) }
- let(:user_types) { fake_danger.new(helper: fake_helper) }
-
- describe 'changed files' do
- subject(:bot_user_types_change_warning) { user_types.bot_user_types_change_warning }
-
- before do
- allow(fake_helper).to receive(:modified_files).and_return(modified_files)
- allow(fake_helper).to receive(:changed_lines).and_return(changed_lines)
- end
-
- context 'when has_user_type.rb file is not impacted' do
- let(:modified_files) { ['app/models/concerns/importable.rb'] }
- let(:changed_lines) { ['+ANY_CHANGES'] }
-
- it "doesn't add any warnings" do
- expect(user_types).not_to receive(:warn)
-
- bot_user_types_change_warning
- end
- end
-
- context 'when the has_user_type.rb file is impacted' do
- let(:modified_files) { ['app/models/concerns/has_user_type.rb'] }
-
- context 'with BOT_USER_TYPES changes' do
- let(:changed_lines) { ['+BOT_USER_TYPES'] }
-
- it 'adds warning' do
- expect(user_types).to receive(:warn).with(described_class::BOT_USER_TYPES_CHANGED_WARNING)
-
- bot_user_types_change_warning
- end
- end
-
- context 'without BOT_USER_TYPES changes' do
- let(:changed_lines) { ['+OTHER_CHANGES'] }
-
- it "doesn't add any warnings" do
- expect(user_types).not_to receive(:warn)
-
- bot_user_types_change_warning
- end
- end
- end
- end
-end
diff --git a/spec/tooling/lib/tooling/view_to_js_mappings_spec.rb b/spec/tooling/lib/tooling/view_to_js_mappings_spec.rb
new file mode 100644
index 00000000000..b09df2a9200
--- /dev/null
+++ b/spec/tooling/lib/tooling/view_to_js_mappings_spec.rb
@@ -0,0 +1,356 @@
+# frozen_string_literal: true
+
+require 'tempfile'
+require_relative '../../../../tooling/lib/tooling/view_to_js_mappings'
+
+RSpec.describe Tooling::ViewToJsMappings, feature_category: :tooling do
+ # We set temporary folders, and those readers give access to those folder paths
+ attr_accessor :view_base_folder, :js_base_folder
+
+ around do |example|
+ Dir.mktmpdir do |tmp_js_base_folder|
+ Dir.mktmpdir do |tmp_views_base_folder|
+ self.js_base_folder = tmp_js_base_folder
+ self.view_base_folder = tmp_views_base_folder
+
+ example.run
+ end
+ end
+ end
+
+ describe '#execute' do
+ let(:instance) do
+ described_class.new(
+ view_base_folder: view_base_folder,
+ js_base_folder: js_base_folder
+ )
+ end
+
+ let(:changed_files) { %W[#{view_base_folder}/index.html] }
+
+ subject { instance.execute(changed_files) }
+
+ context 'when no view files have been changed' do
+ before do
+ allow(instance).to receive(:view_files).and_return([])
+ end
+
+ it 'returns nothing' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when some view files have been changed' do
+ before do
+ File.write("#{view_base_folder}/index.html", index_html_content)
+ end
+
+ context 'when they do not contain the HTML attribute value we search for' do
+ let(:index_html_content) do
+ <<~FILE
+ Beginning of file
+ End of file
+ FILE
+ end
+
+ it 'returns nothing' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when they contain the HTML attribute value we search for' do
+ let(:index_html_content) do
+ <<~FILE
+ Beginning of file
+
+ <a id="js-some-id">A link</a>
+
+ End of file
+ FILE
+ end
+
+ context 'when no matching JS files are found' do
+ it 'returns nothing' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when some matching JS files are found' do
+ let(:index_js_content) do
+ <<~FILE
+ Beginning of file
+
+ const isMainAwardsBlock = votesBlock.closest('#js-some-id.some_class').length;
+
+ End of file
+ FILE
+ end
+
+ before do
+ File.write("#{js_base_folder}/index.js", index_js_content)
+ end
+
+ it 'returns the matching JS files' do
+ expect(subject).to match_array(["#{js_base_folder}/index.js"])
+ end
+ end
+ end
+ end
+
+ context 'when rails partials are included in the file' do
+ before do
+ File.write("#{view_base_folder}/index.html", index_html_content)
+ File.write("#{view_base_folder}/_my-partial.html.haml", partial_file_content)
+ File.write("#{js_base_folder}/index.js", index_js_content)
+ end
+
+ let(:index_html_content) do
+ <<~FILE
+ Beginning of file
+
+ = render 'my-partial'
+
+ End of file
+ FILE
+ end
+
+ let(:partial_file_content) do
+ <<~FILE
+ Beginning of file
+
+ <a class="js-some-class">A link with class</a>
+
+ End of file
+ FILE
+ end
+
+ let(:index_js_content) do
+ <<~FILE
+ Beginning of file
+
+ const isMainAwardsBlock = votesBlock.closest('.js-some-class').length;
+
+ End of file
+ FILE
+ end
+
+ it 'scans those partials for the HTML attribute value' do
+ expect(subject).to match_array(["#{js_base_folder}/index.js"])
+ end
+ end
+ end
+
+ describe '#view_files' do
+ subject { described_class.new(view_base_folder: view_base_folder).view_files(changed_files) }
+
+ before do
+ File.write("#{js_base_folder}/index.js", "index.js")
+ File.write("#{view_base_folder}/index.html", "index.html")
+ end
+
+ context 'when no files were changed' do
+ let(:changed_files) { [] }
+
+ it 'returns an empty array' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when no view files were changed' do
+ let(:changed_files) { ["#{js_base_folder}/index.js"] }
+
+ it 'returns an empty array' do
+ expect(subject).to match_array([])
+ end
+ end
+
+ context 'when view files were changed' do
+ let(:changed_files) { ["#{js_base_folder}/index.js", "#{view_base_folder}/index.html"] }
+
+ it 'returns the path to the view files' do
+ expect(subject).to match_array(["#{view_base_folder}/index.html"])
+ end
+ end
+
+ context 'when view files are deleted' do
+ let(:changed_files) { ["#{js_base_folder}/index.js", "#{view_base_folder}/deleted.html"] }
+
+ it 'returns an empty array' do
+ expect(subject).to match_array([])
+ end
+ end
+ end
+
+ describe '#folders_for_available_editions' do
+ let(:base_folder_path) { 'app/views' }
+
+ subject { described_class.new.folders_for_available_editions(base_folder_path) }
+
+ context 'when FOSS' do
+ before do
+ allow(GitlabEdition).to receive(:ee?).and_return(false)
+ allow(GitlabEdition).to receive(:jh?).and_return(false)
+ end
+
+ it 'returns the correct paths' do
+ expect(subject).to match_array([base_folder_path])
+ end
+ end
+
+ context 'when EE' do
+ before do
+ allow(GitlabEdition).to receive(:ee?).and_return(true)
+ allow(GitlabEdition).to receive(:jh?).and_return(false)
+ end
+
+ it 'returns the correct paths' do
+ expect(subject).to eq([base_folder_path, "ee/#{base_folder_path}"])
+ end
+ end
+
+ context 'when JiHu' do
+ before do
+ allow(GitlabEdition).to receive(:ee?).and_return(true)
+ allow(GitlabEdition).to receive(:jh?).and_return(true)
+ end
+
+ it 'returns the correct paths' do
+ expect(subject).to eq([base_folder_path, "ee/#{base_folder_path}", "jh/#{base_folder_path}"])
+ end
+ end
+ end
+
+ describe '#find_partials' do
+ subject { described_class.new(view_base_folder: view_base_folder).find_partials(file_path) }
+
+ let(:file_path) { "#{view_base_folder}/my_html_file.html" }
+
+ before do
+ File.write(file_path, file_content)
+ end
+
+ context 'when the file includes a partial' do
+ context 'when the partial is in the same folder as the view file' do
+ before do
+ File.write("#{view_base_folder}/_my-partial.html.haml", 'Hello from partial')
+ end
+
+ let(:file_content) do
+ <<~FILE
+ Beginning of file
+
+ = render "my-partial"
+
+ End of file
+ FILE
+ end
+
+ it "returns the partial file path" do
+ expect(subject).to match_array(["#{view_base_folder}/_my-partial.html.haml"])
+ end
+ end
+
+ context 'when the partial is in a subfolder' do
+ before do
+ FileUtils.mkdir_p("#{view_base_folder}/subfolder")
+
+ (1..12).each do |i|
+ FileUtils.touch "#{view_base_folder}/subfolder/_my-partial#{i}.html.haml"
+ end
+ end
+
+ let(:file_content) do
+ <<~FILE
+ Beginning of file
+
+ = render("subfolder/my-partial1")
+ = render "subfolder/my-partial2"
+ = render(partial: "subfolder/my-partial3")
+ = render partial: "subfolder/my-partial4"
+ = render(partial:"subfolder/my-partial5", path: 'else')
+ = render partial:"subfolder/my-partial6"
+ = render_if_exist("subfolder/my-partial7", path: 'else')
+ = render_if_exist "subfolder/my-partial8"
+ = render_if_exist(partial: "subfolder/my-partial9", path: 'else')
+ = render_if_exist partial: "subfolder/my-partial10"
+ = render_if_exist(partial:"subfolder/my-partial11", path: 'else')
+ = render_if_exist partial:"subfolder/my-partial12"
+
+ End of file
+ FILE
+ end
+
+ it "returns the partials file path" do
+ expect(subject).to match_array([
+ "#{view_base_folder}/subfolder/_my-partial1.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial2.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial3.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial4.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial5.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial6.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial7.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial8.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial9.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial10.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial11.html.haml",
+ "#{view_base_folder}/subfolder/_my-partial12.html.haml"
+ ])
+ end
+ end
+
+ context 'when the file does not include a partial' do
+ let(:file_content) do
+ <<~FILE
+ Beginning of file
+ End of file
+ FILE
+ end
+
+ it 'returns an empty array' do
+ expect(subject).to match_array([])
+ end
+ end
+ end
+ end
+
+ describe '#find_pattern_in_file' do
+ let(:subject) { described_class.new.find_pattern_in_file(file.path, /pattern/) }
+ let(:file) { Tempfile.new('find_pattern_in_file') }
+
+ before do
+ file.write(file_content)
+ file.close
+ end
+
+ context 'when the file contains the pattern' do
+ let(:file_content) do
+ <<~FILE
+ Beginning of file
+
+ pattern
+ pattern
+ pattern
+
+ End of file
+ FILE
+ end
+
+ it 'returns the pattern once' do
+ expect(subject).to match_array(%w[pattern])
+ end
+ end
+
+ context 'when the file does not contain the pattern' do
+ let(:file_content) do
+ <<~FILE
+ Beginning of file
+ End of file
+ FILE
+ end
+
+ it 'returns an empty array' do
+ expect(subject).to match_array([])
+ end
+ end
+ end
+end
diff --git a/spec/tooling/quality/test_level_spec.rb b/spec/tooling/quality/test_level_spec.rb
index 3f46b3e79f4..aac7d19c079 100644
--- a/spec/tooling/quality/test_level_spec.rb
+++ b/spec/tooling/quality/test_level_spec.rb
@@ -4,9 +4,9 @@ require 'fast_spec_helper'
require_relative '../../../tooling/quality/test_level'
-RSpec.describe Quality::TestLevel do
+RSpec.describe Quality::TestLevel, feature_category: :tooling do
describe 'TEST_LEVEL_FOLDERS constant' do
- it 'all directories it refers to exists', :aggregate_failures do
+ it 'ensures all directories it refers to exists', :aggregate_failures do
ee_only_directories = %w[
lib/ee/gitlab/background_migration
elastic
@@ -53,7 +53,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is migration' do
it 'returns a pattern' do
expect(subject.pattern(:migration))
- .to eq("spec/{migrations,lib/gitlab/background_migration,lib/ee/gitlab/background_migration}{,/**/}*_spec.rb")
+ .to eq("spec/{migrations}{,/**/}*_spec.rb")
end
end
@@ -128,7 +128,7 @@ RSpec.describe Quality::TestLevel do
context 'when level is migration' do
it 'returns a regexp' do
expect(subject.regexp(:migration))
- .to eq(%r{spec/(migrations|lib/gitlab/background_migration|lib/ee/gitlab/background_migration)/})
+ .to eq(%r{spec/(migrations)/})
end
end
@@ -196,7 +196,7 @@ RSpec.describe Quality::TestLevel do
end
it 'returns the correct level for a background migration test' do
- expect(subject.level_for('spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb')).to eq(:migration)
+ expect(subject.level_for('spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb')).to eq(:background_migration)
end
it 'returns the correct level for an EE file without passing a prefix' do
@@ -208,7 +208,7 @@ RSpec.describe Quality::TestLevel do
end
it 'returns the correct level for a EE-namespaced background migration test' do
- expect(described_class.new('ee/').level_for('ee/spec/lib/ee/gitlab/background_migration/prune_orphaned_geo_events_spec.rb')).to eq(:migration)
+ expect(described_class.new('ee/').level_for('ee/spec/lib/ee/gitlab/background_migration/prune_orphaned_geo_events_spec.rb')).to eq(:background_migration)
end
it 'returns the correct level for an integration test' do
@@ -228,27 +228,13 @@ RSpec.describe Quality::TestLevel do
.to raise_error(described_class::UnknownTestLevelError,
%r{Test level for spec/unknown/foo_spec.rb couldn't be set. Please rename the file properly or change the test level detection regexes in .+/tooling/quality/test_level.rb.})
end
- end
- describe '#background_migration?' do
- it 'returns false for a unit test' do
- expect(subject.background_migration?('spec/models/abuse_report_spec.rb')).to be(false)
- end
+ it 'ensures all spec/ folders are covered by a test level' do
+ Dir['{,ee/}spec/**/*/'].each do |path|
+ next if path =~ %r{\A(ee/)?spec/(benchmarks|docs_screenshots|fixtures|frontend_integration|support)/}
- it 'returns true for a migration test' do
- expect(subject.background_migration?('spec/migrations/add_default_and_free_plans_spec.rb')).to be(false)
- end
-
- it 'returns true for a background migration test' do
- expect(subject.background_migration?('spec/lib/gitlab/background_migration/archive_legacy_traces_spec.rb')).to be(true)
- end
-
- it 'returns true for a geo migration test' do
- expect(described_class.new('ee/').background_migration?('ee/spec/migrations/geo/migrate_ci_job_artifacts_to_separate_registry_spec.rb')).to be(false)
- end
-
- it 'returns true for a EE-namespaced background migration test' do
- expect(described_class.new('ee/').background_migration?('ee/spec/lib/ee/gitlab/background_migration/prune_orphaned_geo_events_spec.rb')).to be(true)
+ expect { subject.level_for(path) }.not_to raise_error
+ end
end
end
end
diff --git a/spec/uploaders/object_storage_spec.rb b/spec/uploaders/object_storage_spec.rb
index a4f6116f7d7..5344dbeb512 100644
--- a/spec/uploaders/object_storage_spec.rb
+++ b/spec/uploaders/object_storage_spec.rb
@@ -497,6 +497,18 @@ RSpec.describe ObjectStorage do
subject { uploader_class.workhorse_authorize(has_length: has_length, maximum_size: maximum_size) }
+ context 'when FIPS is enabled', :fips_mode do
+ it 'response enables FIPS' do
+ expect(subject[:UploadHashFunctions]).to match_array(%w[sha1 sha256 sha512])
+ end
+ end
+
+ context 'when FIPS is disabled' do
+ it 'response disables FIPS' do
+ expect(subject[:UploadHashFunctions]).to be nil
+ end
+ end
+
shared_examples 'returns the maximum size given' do
it "returns temporary path" do
expect(subject[:MaximumSize]).to eq(maximum_size)
diff --git a/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb b/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb
index dc3459f84ef..011f05eac21 100644
--- a/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/_repository_check.html.haml_spec.rb
@@ -41,27 +41,6 @@ RSpec.describe 'admin/application_settings/_repository_check.html.haml', feature
expect(rendered).to have_field('Enable automatic repository housekeeping')
expect(rendered).to have_field('Optimize repository period')
-
- # TODO: Remove it along with optimized_housekeeping feature flag
- expect(rendered).not_to have_field('Incremental repack period')
- expect(rendered).not_to have_field('Full repack period')
- expect(rendered).not_to have_field('Git GC period')
- end
-
- context 'when optimized_housekeeping is disabled' do
- before do
- stub_feature_flags(optimized_housekeeping: false)
- end
-
- it 'renders the correct setting subsection content' do
- render
-
- expect(rendered).to have_field('Enable automatic repository housekeeping')
- expect(rendered).to have_field('Incremental repack period')
- expect(rendered).to have_field('Full repack period')
- expect(rendered).to have_field('Git GC period')
- expect(rendered).not_to have_field('Optimize repository period')
- end
end
end
diff --git a/spec/views/admin/application_settings/general.html.haml_spec.rb b/spec/views/admin/application_settings/general.html.haml_spec.rb
index f229fd2dcdc..dd49de8f880 100644
--- a/spec/views/admin/application_settings/general.html.haml_spec.rb
+++ b/spec/views/admin/application_settings/general.html.haml_spec.rb
@@ -46,13 +46,9 @@ RSpec.describe 'admin/application_settings/general.html.haml' do
it_behaves_like 'does not render registration features prompt', :application_setting_disabled_repository_size_limit
end
- context 'with no license and service ping disabled' do
+ context 'with no license and service ping disabled', :without_license do
before do
stub_application_setting(usage_ping_enabled: false)
-
- if Gitlab.ee?
- allow(License).to receive(:current).and_return(nil)
- end
end
it_behaves_like 'renders registration features prompt', :application_setting_disabled_repository_size_limit
diff --git a/spec/views/admin/broadcast_messages/index.html.haml_spec.rb b/spec/views/admin/broadcast_messages/index.html.haml_spec.rb
deleted file mode 100644
index ba998085bf9..00000000000
--- a/spec/views/admin/broadcast_messages/index.html.haml_spec.rb
+++ /dev/null
@@ -1,52 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'admin/broadcast_messages/index' do
- let(:role_targeted_broadcast_messages) { true }
- let(:vue_broadcast_messages) { false }
-
- let_it_be(:message) { create(:broadcast_message, broadcast_type: 'banner', target_access_levels: [Gitlab::Access::GUEST, Gitlab::Access::DEVELOPER]) }
-
- before do
- assign(:broadcast_messages, BroadcastMessage.page(1))
- assign(:broadcast_message, BroadcastMessage.new)
-
- stub_feature_flags(role_targeted_broadcast_messages: role_targeted_broadcast_messages)
- stub_feature_flags(vue_broadcast_messages: vue_broadcast_messages)
-
- render
- end
-
- describe 'Target roles select and table column' do
- it 'rendered' do
- expect(rendered).to have_content('Target roles')
- expect(rendered).to have_content('Owner')
- expect(rendered).to have_content('Guest, Developer')
- end
-
- context 'when feature flag is off' do
- let(:role_targeted_broadcast_messages) { false }
-
- it 'is not rendered' do
- expect(rendered).not_to have_content('Target roles')
- expect(rendered).not_to have_content('Owner')
- expect(rendered).not_to have_content('Guest, Developer')
- end
- end
- end
-
- describe 'Vue application' do
- it 'is not rendered' do
- expect(rendered).not_to have_selector('#js-broadcast-messages')
- end
-
- context 'when feature flag is on' do
- let(:vue_broadcast_messages) { true }
-
- it 'is rendered' do
- expect(rendered).to have_selector('#js-broadcast-messages')
- end
- end
- end
-end
diff --git a/spec/views/admin/dashboard/index.html.haml_spec.rb b/spec/views/admin/dashboard/index.html.haml_spec.rb
index 337964f1354..6e9cb5e2657 100644
--- a/spec/views/admin/dashboard/index.html.haml_spec.rb
+++ b/spec/views/admin/dashboard/index.html.haml_spec.rb
@@ -51,6 +51,16 @@ RSpec.describe 'admin/dashboard/index.html.haml' do
expect(rendered).not_to have_content "Users over License"
end
+ it 'shows database versions for all database models' do
+ render
+
+ expect(rendered).to have_content /PostgreSQL \(main\).+?#{::Gitlab::Database::Reflection.new(ApplicationRecord).version}/
+
+ if Gitlab::Database.has_config?(:ci)
+ expect(rendered).to have_content /PostgreSQL \(ci\).+?#{::Gitlab::Database::Reflection.new(Ci::ApplicationRecord).version}/
+ end
+ end
+
describe 'when show_version_check? is true' do
before do
allow(view).to receive(:show_version_check?).and_return(true)
diff --git a/spec/views/errors/omniauth_error.html.haml_spec.rb b/spec/views/errors/omniauth_error.html.haml_spec.rb
index e99cb536bd8..487dd9f066f 100644
--- a/spec/views/errors/omniauth_error.html.haml_spec.rb
+++ b/spec/views/errors/omniauth_error.html.haml_spec.rb
@@ -15,8 +15,10 @@ RSpec.describe 'errors/omniauth_error' do
render
expect(rendered).to have_content(provider)
- expect(rendered).to have_content(_('Sign-in failed because %{error}.') % { error: error })
+ expect(rendered).to have_content(error)
expect(rendered).to have_link('Sign in')
- expect(rendered).to have_content(_('If none of the options work, try contacting a GitLab administrator.'))
+ expect(rendered).to have_content(
+ _('If you are unable to sign in or recover your password, contact a GitLab administrator.')
+ )
end
end
diff --git a/spec/views/groups/edit.html.haml_spec.rb b/spec/views/groups/edit.html.haml_spec.rb
index ddcfea0ab10..fda93ebab51 100644
--- a/spec/views/groups/edit.html.haml_spec.rb
+++ b/spec/views/groups/edit.html.haml_spec.rb
@@ -127,13 +127,7 @@ RSpec.describe 'groups/edit.html.haml' do
allow(view).to receive(:current_user) { user }
end
- context 'prompt user about registration features' do
- before do
- if Gitlab.ee?
- allow(License).to receive(:current).and_return(nil)
- end
- end
-
+ context 'prompt user about registration features', :without_license do
context 'with service ping disabled' do
before do
stub_application_setting(usage_ping_enabled: false)
diff --git a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
index d4e97d96dfd..163f39568e5 100644
--- a/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_admin.html.haml_spec.rb
@@ -67,6 +67,24 @@ RSpec.describe 'layouts/nav/sidebar/_admin' do
it_behaves_like 'page has active sub tab', 'Topics'
end
+ context 'on runners' do
+ before do
+ allow(controller).to receive(:controller_name).and_return('runners')
+ end
+
+ it_behaves_like 'page has active tab', 'CI/CD'
+ it_behaves_like 'page has active sub tab', 'Runners'
+ end
+
+ context 'on jobs' do
+ before do
+ allow(controller).to receive(:controller_name).and_return('jobs')
+ end
+
+ it_behaves_like 'page has active tab', 'CI/CD'
+ it_behaves_like 'page has active sub tab', 'Jobs'
+ end
+
context 'on messages' do
before do
allow(controller).to receive(:controller_name).and_return('broadcast_messages')
diff --git a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
index 080a53cc1a2..4de2c011b93 100644
--- a/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
+++ b/spec/views/layouts/nav/sidebar/_project.html.haml_spec.rb
@@ -827,7 +827,7 @@ RSpec.describe 'layouts/nav/sidebar/_project' do
end
end
- context 'gitlab.com' do
+ context 'gitlab.com', :with_license do
before do
allow(Gitlab).to receive(:com?).and_return(true)
end
diff --git a/spec/views/layouts/snippets.html.haml_spec.rb b/spec/views/layouts/snippets.html.haml_spec.rb
new file mode 100644
index 00000000000..69378906bcd
--- /dev/null
+++ b/spec/views/layouts/snippets.html.haml_spec.rb
@@ -0,0 +1,48 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe 'layouts/snippets', feature_category: :snippets do
+ before do
+ allow(view).to receive(:current_user).and_return(user)
+ allow(view).to receive(:current_user_mode).and_return(Gitlab::Auth::CurrentUserMode.new(user))
+ end
+
+ describe 'sidebar' do
+ context 'when feature flag is on' do
+ context 'when signed in' do
+ let(:user) { build_stubbed(:user) }
+
+ it 'renders the "Your work" sidebar' do
+ render
+
+ expect(rendered).to have_css('aside.nav-sidebar[aria-label="Your work"]')
+ end
+ end
+
+ context 'when not signed in' do
+ let(:user) { nil }
+
+ it 'renders no sidebar' do
+ render
+
+ expect(rendered).not_to have_css('aside.nav-sidebar')
+ end
+ end
+ end
+
+ context 'when feature flag is off' do
+ before do
+ stub_feature_flags(your_work_sidebar: false)
+ end
+
+ let(:user) { build_stubbed(:user) }
+
+ it 'renders no sidebar' do
+ render
+
+ expect(rendered).not_to have_css('aside.nav-sidebar')
+ end
+ end
+ end
+end
diff --git a/spec/views/profiles/notifications/show.html.haml_spec.rb b/spec/views/profiles/notifications/show.html.haml_spec.rb
index 9cdf8124fcf..1cfd8847bf8 100644
--- a/spec/views/profiles/notifications/show.html.haml_spec.rb
+++ b/spec/views/profiles/notifications/show.html.haml_spec.rb
@@ -5,6 +5,11 @@ require 'spec_helper'
RSpec.describe 'profiles/notifications/show' do
let(:groups) { GroupsFinder.new(user).execute.page(1) }
let(:user) { create(:user) }
+ let(:option_default) { _('Use primary email (%{email})') % { email: user.email } }
+ let(:option_primary_email) { user.email }
+ let(:expected_primary_email_attr) { "[data-emails='#{[option_primary_email].to_json}']" }
+ let(:expected_default_attr) { "[data-empty-value-text='#{option_default}']" }
+ let(:expected_selector) { expected_primary_email_attr + expected_default_attr + expected_value_attr }
before do
assign(:group_notifications, [])
@@ -16,14 +21,26 @@ RSpec.describe 'profiles/notifications/show' do
end
context 'when there is no database value for User#notification_email' do
- let(:option_default) { _('Use primary email (%{email})') % { email: user.email } }
- let(:option_primary_email) { user.email }
- let(:options) { [option_default, option_primary_email] }
+ let(:expected_value_attr) { ":not([data-value])" }
it 'displays the correct elements' do
render
- expect(rendered).to have_select('user_notification_email', options: options, selected: nil)
+ expect(rendered).to have_selector(expected_selector)
+ end
+ end
+
+ context 'when there is a database value for User#notification_email' do
+ let(:expected_value_attr) { "[data-value='#{option_primary_email}']" }
+
+ before do
+ user.notification_email = option_primary_email
+ end
+
+ it 'displays the correct elements' do
+ render
+
+ expect(rendered).to have_selector(expected_selector)
end
end
end
diff --git a/spec/views/profiles/preferences/show.html.haml_spec.rb b/spec/views/profiles/preferences/show.html.haml_spec.rb
index 4e4499c3252..6e0c6d67d85 100644
--- a/spec/views/profiles/preferences/show.html.haml_spec.rb
+++ b/spec/views/profiles/preferences/show.html.haml_spec.rb
@@ -54,8 +54,9 @@ RSpec.describe 'profiles/preferences/show' do
end
it 'has helpful homepage setup guidance' do
- expect(rendered).to have_field('Dashboard')
- expect(rendered).to have_content('Choose what content you want to see by default on your dashboard.')
+ expect(rendered).to have_selector('[data-label="Dashboard"]')
+ expect(rendered).to have_selector("[data-description=" \
+ "'Choose what content you want to see by default on your dashboard.']")
end
end
diff --git a/spec/views/projects/_files.html.haml_spec.rb b/spec/views/projects/_files.html.haml_spec.rb
deleted file mode 100644
index b6a8b4735b0..00000000000
--- a/spec/views/projects/_files.html.haml_spec.rb
+++ /dev/null
@@ -1,73 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe 'projects/_files' do
- include ProjectForksHelper
-
- let_it_be(:user) { create(:user) }
- let_it_be(:source_project) { create(:project, :repository, :public) }
-
- context 'when the project is a fork' do
- let_it_be(:project) { fork_project(source_project, user, { repository: true }) }
-
- before do
- assign(:project, project)
- assign(:ref, project.default_branch)
- assign(:path, '/')
- assign(:id, project.commit.id)
-
- allow(view).to receive(:current_user).and_return(user)
- end
-
- context 'when user can read fork source' do
- before do
- allow(view).to receive(:can?).with(user, :read_project, source_project).and_return(true)
- end
-
- it 'shows the forked-from project' do
- render
-
- expect(rendered).to have_content("Forked from #{source_project.full_name}")
- expect(rendered).to have_content("Up to date with upstream repository")
- end
-
- context 'when fork_divergence_counts is disabled' do
- before do
- stub_feature_flags(fork_divergence_counts: false)
- end
-
- it 'does not show fork info' do
- render
-
- expect(rendered).not_to have_content("Forked from #{source_project.full_name}")
- expect(rendered).not_to have_content("Up to date with upstream repository")
- end
- end
- end
-
- context 'when user cannot read fork source' do
- before do
- allow(view).to receive(:can?).with(user, :read_project, source_project).and_return(false)
- end
-
- it 'does not show the forked-from project' do
- render
-
- expect(rendered).to have_content("Forked from an inaccessible project")
- end
-
- context 'when fork_divergence_counts is disabled' do
- before do
- stub_feature_flags(fork_divergence_counts: false)
- end
-
- it 'does not show fork info' do
- render
-
- expect(rendered).not_to have_content("Forked from an inaccessible project")
- end
- end
- end
- end
-end
diff --git a/spec/views/projects/commit/show.html.haml_spec.rb b/spec/views/projects/commit/show.html.haml_spec.rb
index 1d9e5e782e5..52b3d5b95f9 100644
--- a/spec/views/projects/commit/show.html.haml_spec.rb
+++ b/spec/views/projects/commit/show.html.haml_spec.rb
@@ -92,7 +92,7 @@ RSpec.describe 'projects/commit/show.html.haml', feature_category: :source_code
let(:commit) { project.commit(GpgHelpers::SIGNED_COMMIT_SHA) }
it 'renders unverified badge' do
- expect(title).to include('This commit was signed with an <strong>unverified</strong> signature.')
+ expect(title).to include('This commit was signed with an unverified signature.')
expect(content).to include(commit.signature.gpg_key_primary_keyid)
end
end
@@ -101,8 +101,8 @@ RSpec.describe 'projects/commit/show.html.haml', feature_category: :source_code
let(:commit) { project.commit('7b5160f9bb23a3d58a0accdbe89da13b96b1ece9') }
it 'renders unverified badge' do
- expect(title).to include('This commit was signed with an <strong>unverified</strong> signature.')
- expect(content).to match(/SSH key fingerprint:[\s\S]+Unknown/)
+ expect(title).to include('This commit was signed with an unverified signature.')
+ expect(content).to match(/SSH key fingerprint:[\s\S].+#{commit.signature.key_fingerprint_sha256}/)
end
end
@@ -112,7 +112,6 @@ RSpec.describe 'projects/commit/show.html.haml', feature_category: :source_code
it 'renders unverified badge' do
expect(title).to include('This commit was signed with an <strong>unverified</strong> signature.')
expect(content).to include(commit.signature.x509_certificate.subject_key_identifier.tr(":", " "))
- expect(content).to include(commit.signature.x509_certificate.email)
end
end
end
diff --git a/spec/views/projects/edit.html.haml_spec.rb b/spec/views/projects/edit.html.haml_spec.rb
index 2935e4395ba..bf154b61609 100644
--- a/spec/views/projects/edit.html.haml_spec.rb
+++ b/spec/views/projects/edit.html.haml_spec.rb
@@ -93,13 +93,9 @@ RSpec.describe 'projects/edit' do
it_behaves_like 'does not render registration features prompt', :project_disabled_repository_size_limit
end
- context 'with no license and service ping disabled' do
+ context 'with no license and service ping disabled', :without_license do
before do
stub_application_setting(usage_ping_enabled: false)
-
- if Gitlab.ee?
- allow(License).to receive(:current).and_return(nil)
- end
end
it_behaves_like 'renders registration features prompt', :project_disabled_repository_size_limit
diff --git a/spec/views/registrations/welcome/show.html.haml_spec.rb b/spec/views/registrations/welcome/show.html.haml_spec.rb
index 99d87ac449b..372dbf01a64 100644
--- a/spec/views/registrations/welcome/show.html.haml_spec.rb
+++ b/spec/views/registrations/welcome/show.html.haml_spec.rb
@@ -7,10 +7,6 @@ RSpec.describe 'registrations/welcome/show' do
before do
allow(view).to receive(:current_user).and_return(user)
- allow(view).to receive(:in_subscription_flow?).and_return(false)
- allow(view).to receive(:in_trial_flow?).and_return(false)
- allow(view).to receive(:user_has_memberships?).and_return(false)
- allow(view).to receive(:in_oauth_flow?).and_return(false)
allow(view).to receive(:glm_tracking_params).and_return({})
render
diff --git a/spec/views/search/_results.html.haml_spec.rb b/spec/views/search/_results.html.haml_spec.rb
index e81462ee518..de994a0da2b 100644
--- a/spec/views/search/_results.html.haml_spec.rb
+++ b/spec/views/search/_results.html.haml_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe 'search/_results' do
+RSpec.describe 'search/_results', feature_category: :global_search do
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
@@ -11,7 +11,6 @@ RSpec.describe 'search/_results' do
let(:scope) { 'issues' }
let(:term) { 'foo' }
let(:search_results) { instance_double('Gitlab::SearchResults', { formatted_count: 10, current_user: user } ) }
- let(:search_service) { class_double(SearchServicePresenter, scope: scope, search: term, current_user: user) }
before do
controller.params[:action] = 'show'
@@ -20,6 +19,7 @@ RSpec.describe 'search/_results' do
create_list(:issue, 3)
allow(view).to receive(:current_user) { user }
+
assign(:search_count_path, 'test count link')
assign(:search_path, 'link test')
assign(:search_results, search_results)
@@ -27,8 +27,9 @@ RSpec.describe 'search/_results' do
assign(:search_term, term)
assign(:scope, scope)
- @search_service = SearchServicePresenter.new(SearchService.new(user, search: term, scope: scope))
- allow(@search_service).to receive(:search_objects).and_return(search_objects)
+ search_service_presenter = SearchServicePresenter.new(SearchService.new(user, search: term, scope: scope))
+ allow(search_service_presenter).to receive(:search_objects).and_return(search_objects)
+ assign(:search_service_presenter, search_service_presenter)
end
where(search_page_vertical_nav_enabled: [true, false])
diff --git a/spec/views/search/show.html.haml_spec.rb b/spec/views/search/show.html.haml_spec.rb
index 26ec2c6ae74..6adb2c77c4d 100644
--- a/spec/views/search/show.html.haml_spec.rb
+++ b/spec/views/search/show.html.haml_spec.rb
@@ -13,7 +13,7 @@ RSpec.describe 'search/show', feature_category: :global_search do
stub_template "search/_category.html.haml" => 'Category Partial'
stub_template "search/_results.html.haml" => 'Results Partial'
- assign(:search_service, search_service_presenter)
+ assign(:search_service_presenter, search_service_presenter)
end
context 'search_page_vertical_nav feature flag enabled' do
diff --git a/spec/views/shared/projects/_list.html.haml_spec.rb b/spec/views/shared/projects/_list.html.haml_spec.rb
index 5c38bb79ea1..b7b4f97f2b6 100644
--- a/spec/views/shared/projects/_list.html.haml_spec.rb
+++ b/spec/views/shared/projects/_list.html.haml_spec.rb
@@ -82,7 +82,7 @@ RSpec.describe 'shared/projects/_list' do
it 'renders a no-content message' do
render
- expect(rendered).to have_content(s_('UserProfile|This user doesn\'t have any personal projects'))
+ expect(rendered).to have_content(s_('UserProfile|There are no projects available to be displayed here.'))
end
end
end
diff --git a/spec/workers/ci/build_finished_worker_spec.rb b/spec/workers/ci/build_finished_worker_spec.rb
index e8bb3988001..049f3af1dd7 100644
--- a/spec/workers/ci/build_finished_worker_spec.rb
+++ b/spec/workers/ci/build_finished_worker_spec.rb
@@ -65,6 +65,12 @@ RSpec.describe Ci::BuildFinishedWorker do
subject
end
end
+
+ context 'when it has a token' do
+ it 'removes the token' do
+ expect { subject }.to change { build.reload.token }.to(nil)
+ end
+ end
end
context 'when build does not exist' do
diff --git a/spec/workers/ci/initial_pipeline_process_worker_spec.rb b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
index 5fb8671fd5c..c7bbe83433e 100644
--- a/spec/workers/ci/initial_pipeline_process_worker_spec.rb
+++ b/spec/workers/ci/initial_pipeline_process_worker_spec.rb
@@ -2,12 +2,13 @@
require 'spec_helper'
-RSpec.describe Ci::InitialPipelineProcessWorker do
- describe '#perform' do
- let_it_be_with_reload(:pipeline) do
- create(:ci_pipeline, :with_job, status: :created)
- end
+RSpec.describe Ci::InitialPipelineProcessWorker, feature_category: :continuous_integration do
+ let_it_be(:project) { create(:project, :repository) }
+ let(:job) { build(:ci_build, project: project) }
+ let(:stage) { build(:ci_stage, project: project, statuses: [job]) }
+ let(:pipeline) { create(:ci_pipeline, stages: [stage], status: :created, project: project, builds: [job]) }
+ describe '#perform' do
include_examples 'an idempotent worker' do
let(:job_args) { pipeline.id }
@@ -19,5 +20,52 @@ RSpec.describe Ci::InitialPipelineProcessWorker do
expect(pipeline.reload).to be_pending
end
end
+
+ context 'when a pipeline does not contain a deployment job' do
+ it 'does not create any deployments' do
+ expect { subject }.not_to change { Deployment.count }
+ end
+ end
+
+ context 'when a pipeline contains a teardown job' do
+ let(:job) { build(:ci_build, :stop_review_app, project: project) }
+
+ before do
+ create(:environment, name: job.expanded_environment_name)
+ end
+
+ it 'does not create a deployment record' do
+ expect { subject }.not_to change { Deployment.count }
+
+ expect(job.deployment).to be_nil
+ end
+ end
+
+ context 'when a pipeline contains a deployment job' do
+ let(:job) { build(:ci_build, :start_review_app, project: project) }
+ let!(:environment) { create(:environment, project: project, name: job.expanded_environment_name) }
+
+ it 'creates a deployment record' do
+ expect { subject }.to change { Deployment.count }.by(1)
+
+ expect(job.deployment).to have_attributes(
+ project: job.project,
+ ref: job.ref,
+ sha: job.sha,
+ deployable: job,
+ deployable_type: 'CommitStatus',
+ environment: job.persisted_environment)
+ end
+
+ context 'when the corresponding environment does not exist' do
+ let(:environment) {}
+
+ it 'does not create a deployment record' do
+ expect { subject }.not_to change { Deployment.count }
+
+ expect(job.deployment).to be_nil
+ end
+ end
+ end
end
end
diff --git a/spec/workers/cluster_provision_worker_spec.rb b/spec/workers/cluster_provision_worker_spec.rb
deleted file mode 100644
index 2d6ca4ab7e3..00000000000
--- a/spec/workers/cluster_provision_worker_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe ClusterProvisionWorker do
- describe '#perform' do
- context 'when provider type is gcp' do
- let(:cluster) { create(:cluster, provider_type: :gcp, provider_gcp: provider) }
- let(:provider) { create(:cluster_provider_gcp, :scheduled) }
-
- it 'provision a cluster' do
- expect_any_instance_of(Clusters::Gcp::ProvisionService).to receive(:execute).with(provider)
-
- described_class.new.perform(cluster.id)
- end
- end
-
- context 'when provider type is aws' do
- let(:cluster) { create(:cluster, provider_type: :aws, provider_aws: provider) }
- let(:provider) { create(:cluster_provider_aws, :scheduled) }
-
- it 'provision a cluster' do
- expect_any_instance_of(Clusters::Aws::ProvisionService).to receive(:execute).with(provider)
-
- described_class.new.perform(cluster.id)
- end
- end
-
- context 'when provider type is user' do
- let(:cluster) { create(:cluster, :provided_by_user) }
-
- it 'does not provision a cluster' do
- expect_any_instance_of(Clusters::Gcp::ProvisionService).not_to receive(:execute)
-
- described_class.new.perform(cluster.id)
- end
- end
-
- context 'when cluster does not exist' do
- it 'does not provision a cluster' do
- expect_any_instance_of(Clusters::Gcp::ProvisionService).not_to receive(:execute)
-
- described_class.new.perform(123)
- end
- end
- end
-end
diff --git a/spec/workers/counters/cleanup_refresh_worker_spec.rb b/spec/workers/counters/cleanup_refresh_worker_spec.rb
new file mode 100644
index 00000000000..a56c98f72a0
--- /dev/null
+++ b/spec/workers/counters/cleanup_refresh_worker_spec.rb
@@ -0,0 +1,42 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Counters::CleanupRefreshWorker do
+ let(:model) { create(:project_statistics) }
+
+ describe '#perform', :redis do
+ let(:attribute) { :build_artifacts_size }
+ let(:worker) { described_class.new }
+
+ subject { worker.perform(model.class.name, model.id, attribute) }
+
+ it 'calls cleanup_refresh on the counter' do
+ expect_next_instance_of(Gitlab::Counters::BufferedCounter, model, attribute) do |counter|
+ expect(counter).to receive(:cleanup_refresh)
+ end
+
+ subject
+ end
+
+ context 'when model class does not exist' do
+ subject { worker.perform('NonExistentModel', 1, attribute) }
+
+ it 'does nothing' do
+ expect(Gitlab::Counters::BufferedCounter).not_to receive(:new)
+
+ subject
+ end
+ end
+
+ context 'when record does not exist' do
+ subject { worker.perform(model.class.name, non_existing_record_id, attribute) }
+
+ it 'does nothing' do
+ expect(Gitlab::Counters::BufferedCounter).not_to receive(:new)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/every_sidekiq_worker_spec.rb b/spec/workers/every_sidekiq_worker_spec.rb
index 788f5d8222c..c444e1f383c 100644
--- a/spec/workers/every_sidekiq_worker_spec.rb
+++ b/spec/workers/every_sidekiq_worker_spec.rb
@@ -210,6 +210,7 @@ RSpec.describe 'Every Sidekiq worker' do
'Deployments::LinkMergeRequestWorker' => 3,
'Deployments::SuccessWorker' => 3,
'Deployments::UpdateEnvironmentWorker' => 3,
+ 'Deployments::ApprovalWorker' => 3,
'DesignManagement::CopyDesignCollectionWorker' => 3,
'DesignManagement::NewVersionWorker' => 3,
'DestroyPagesDeploymentsWorker' => 3,
diff --git a/spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb b/spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb
index c4c19f2f9c5..cba6c578d11 100644
--- a/spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_gists_import/finish_import_worker_spec.rb
@@ -2,13 +2,17 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubGistsImport::FinishImportWorker, feature_category: :importer do
+RSpec.describe Gitlab::GithubGistsImport::FinishImportWorker, :clean_gitlab_redis_cache, feature_category: :importers do
subject(:worker) { described_class.new }
let_it_be(:user) { create(:user) }
describe '#perform', :aggregate_failures do
context 'when there are no remaining jobs' do
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).and_return(nil)
+ end
+
it 'marks import status as finished' do
waiter = instance_double(Gitlab::JobWaiter, key: :key, jobs_remaining: 0)
expect(Gitlab::JobWaiter).to receive(:new).and_return(waiter)
@@ -19,6 +23,8 @@ RSpec.describe Gitlab::GithubGistsImport::FinishImportWorker, feature_category:
expect(Gitlab::GithubImport::Logger)
.to receive(:info)
.with(user_id: user.id, message: 'GitHub Gists import finished')
+ expect(Notify).not_to receive(:github_gists_import_errors_email)
+ expect(Gitlab::Cache::Import::Caching).to receive(:expire).and_call_original
worker.perform(user.id, waiter.key, waiter.jobs_remaining)
end
@@ -35,6 +41,33 @@ RSpec.describe Gitlab::GithubGistsImport::FinishImportWorker, feature_category:
worker.perform(user.id, waiter.key, waiter.jobs_remaining)
end
end
+
+ context 'when some gists were failed to import' do
+ let(:errors) { { '12345' => 'Snippet maximum file count exceeded.' } }
+ let(:waiter) { instance_double(Gitlab::JobWaiter, key: :key, jobs_remaining: 0) }
+ let(:mail_instance) { instance_double(ActionMailer::MessageDelivery, deliver_now: true) }
+
+ before do
+ allow(Gitlab::Cache::Import::Caching).to receive(:values_from_hash).and_return(errors)
+ allow(Gitlab::JobWaiter).to receive(:new).and_return(waiter)
+ allow(waiter).to receive(:wait).with(described_class::BLOCKING_WAIT_TIME)
+ end
+
+ it 'sends an email to user' do
+ expect_next_instance_of(Gitlab::GithubGistsImport::Status) do |status|
+ expect(status).to receive(:finish!)
+ end
+ expect(Gitlab::GithubImport::Logger)
+ .to receive(:info)
+ .with(user_id: user.id, message: 'GitHub Gists import finished')
+ expect(Notify).to receive(:github_gists_import_errors_email)
+ .with(user.id, errors).once.and_return(mail_instance)
+ expect(mail_instance).to receive(:deliver_now)
+ expect(Gitlab::Cache::Import::Caching).to receive(:expire).and_call_original
+
+ worker.perform(user.id, waiter.key, waiter.jobs_remaining)
+ end
+ end
end
describe '.sidekiq_retries_exhausted' do
diff --git a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
index dfc5084bb10..1c24cdcccae 100644
--- a/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
+++ b/spec/workers/gitlab/github_gists_import/import_gist_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :importer do
+RSpec.describe Gitlab::GithubGistsImport::ImportGistWorker, feature_category: :importers do
subject { described_class.new }
let_it_be(:user) { create(:user) }
diff --git a/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb b/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
index 523b7463a9d..220f2bb0c75 100644
--- a/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
+++ b/spec/workers/gitlab/github_gists_import/start_import_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Gitlab::GithubGistsImport::StartImportWorker, feature_category: :importer do
+RSpec.describe Gitlab::GithubGistsImport::StartImportWorker, feature_category: :importers do
subject(:worker) { described_class.new }
let_it_be(:user) { create(:user) }
diff --git a/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb b/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb
index 4a3ef2bf560..c2b8ee661a3 100644
--- a/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb
+++ b/spec/workers/gitlab/github_import/import_protected_branch_worker_spec.rb
@@ -14,7 +14,8 @@ RSpec.describe Gitlab::GithubImport::ImportProtectedBranchWorker do
let(:json_hash) do
{
id: 'main',
- allow_force_pushes: true
+ allow_force_pushes: true,
+ allowed_to_push_users: []
}
end
diff --git a/spec/workers/merge_requests/create_pipeline_worker_spec.rb b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
index 441d7652219..afb9fa1a549 100644
--- a/spec/workers/merge_requests/create_pipeline_worker_spec.rb
+++ b/spec/workers/merge_requests/create_pipeline_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe MergeRequests::CreatePipelineWorker do
+RSpec.describe MergeRequests::CreatePipelineWorker, feature_category: :continuous_integration do
describe '#perform' do
let(:user) { create(:user) }
let(:project) { create(:project) }
@@ -17,7 +17,7 @@ RSpec.describe MergeRequests::CreatePipelineWorker do
expect_next_instance_of(MergeRequests::CreatePipelineService,
project: project,
current_user: user,
- params: { push_options: nil }) do |service|
+ params: { allow_duplicate: nil, push_options: nil }) do |service|
expect(service).to receive(:execute).with(merge_request)
end
@@ -38,7 +38,7 @@ RSpec.describe MergeRequests::CreatePipelineWorker do
expect_next_instance_of(MergeRequests::CreatePipelineService,
project: project,
current_user: user,
- params: { push_options: { ci: { skip: true } } }) do |service|
+ params: { allow_duplicate: nil, push_options: { ci: { skip: true } } }) do |service|
expect(service).to receive(:execute).with(merge_request)
end
diff --git a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
index c786d4658d4..b3c81b25a93 100644
--- a/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
+++ b/spec/workers/pages/invalidate_domain_cache_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Pages::InvalidateDomainCacheWorker do
+RSpec.describe Pages::InvalidateDomainCacheWorker, feature_category: :pages do
shared_examples 'clears caches with' do |event_class:, event_data:, caches:|
include AfterNextHelpers
@@ -22,44 +22,70 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
end
end
+ context 'when a project have multiple domains' do
+ include AfterNextHelpers
+
+ let_it_be(:project) { create(:project) }
+ let_it_be(:pages_domain) { create(:pages_domain, project: project) }
+ let_it_be(:pages_domain2) { create(:pages_domain, project: project) }
+
+ let(:event) do
+ Pages::PageDeployedEvent.new(
+ data: {
+ project_id: project.id,
+ namespace_id: project.namespace_id,
+ root_namespace_id: project.root_ancestor.id
+ }
+ )
+ end
+
+ subject { consume_event(subscriber: described_class, event: event) }
+
+ it 'clears the cache with Gitlab::Pages::CacheControl' do
+ expect_next(Gitlab::Pages::CacheControl, type: :namespace, id: project.namespace_id)
+ .to receive(:clear_cache)
+ expect_next(Gitlab::Pages::CacheControl, type: :domain, id: pages_domain.id)
+ .to receive(:clear_cache)
+ expect_next(Gitlab::Pages::CacheControl, type: :domain, id: pages_domain2.id)
+ .to receive(:clear_cache)
+
+ subject
+ end
+ end
+
it_behaves_like 'clears caches with',
event_class: Pages::PageDeployedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
- { type: :namespace, id: 3 },
- { type: :project, id: 1 }
+ { type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Pages::PageDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
- { type: :namespace, id: 3 },
- { type: :project, id: 1 }
+ { type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectDeletedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
- { type: :namespace, id: 3 },
- { type: :project, id: 1 }
+ { type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectCreatedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
- { type: :namespace, id: 3 },
- { type: :project, id: 1 }
+ { type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
event_class: Projects::ProjectArchivedEvent,
event_data: { project_id: 1, namespace_id: 2, root_namespace_id: 3 },
caches: [
- { type: :namespace, id: 3 },
- { type: :project, id: 1 }
+ { type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
@@ -72,8 +98,7 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
new_path: 'new_path'
},
caches: [
- { type: :namespace, id: 3 },
- { type: :project, id: 1 }
+ { type: :namespace, id: 3 }
]
it_behaves_like 'clears caches with',
@@ -86,7 +111,6 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
new_root_namespace_id: 5
},
caches: [
- { type: :project, id: 1 },
{ type: :namespace, id: 3 },
{ type: :namespace, id: 5 }
]
@@ -131,10 +155,11 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
+ domain_id: 4,
domain: 'somedomain.com'
},
caches: [
- { type: :project, id: 1 },
+ { type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
@@ -144,10 +169,11 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
+ domain_id: 4,
domain: 'somedomain.com'
},
caches: [
- { type: :project, id: 1 },
+ { type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
@@ -157,10 +183,11 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
+ domain_id: 4,
domain: 'somedomain.com'
},
caches: [
- { type: :project, id: 1 },
+ { type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
@@ -172,10 +199,11 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
project_id: 1,
namespace_id: 2,
root_namespace_id: 3,
+ domain_id: 4,
attributes: [attribute]
},
caches: [
- { type: :project, id: 1 },
+ { type: :domain, id: 4 },
{ type: :namespace, id: 3 }
]
end
@@ -204,7 +232,6 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
features: ['pages_access_level']
},
caches: [
- { type: :project, id: 1 },
{ type: :namespace, id: 3 }
]
@@ -234,7 +261,6 @@ RSpec.describe Pages::InvalidateDomainCacheWorker do
new_root_namespace_id: 5
},
caches: [
- { type: :project, id: 1 },
{ type: :namespace, id: 5 }
]
end
diff --git a/spec/workers/personal_access_tokens/expired_notification_worker_spec.rb b/spec/workers/personal_access_tokens/expired_notification_worker_spec.rb
index 3ff67f47523..7c3c48b3f80 100644
--- a/spec/workers/personal_access_tokens/expired_notification_worker_spec.rb
+++ b/spec/workers/personal_access_tokens/expired_notification_worker_spec.rb
@@ -11,7 +11,7 @@ RSpec.describe PersonalAccessTokens::ExpiredNotificationWorker, type: :worker do
it 'uses notification service to send email to the user' do
expect_next_instance_of(NotificationService) do |notification_service|
- expect(notification_service).to receive(:access_token_expired).with(expired_today.user)
+ expect(notification_service).to receive(:access_token_expired).with(expired_today.user, [expired_today.name])
end
worker.perform
@@ -25,7 +25,7 @@ RSpec.describe PersonalAccessTokens::ExpiredNotificationWorker, type: :worker do
shared_examples 'expiry notification is not required to be sent for the token' do
it do
expect_next_instance_of(NotificationService) do |notification_service|
- expect(notification_service).not_to receive(:access_token_expired).with(token.user)
+ expect(notification_service).not_to receive(:access_token_expired).with(token.user, [token.name])
end
worker.perform
diff --git a/spec/workers/pipeline_schedule_worker_spec.rb b/spec/workers/pipeline_schedule_worker_spec.rb
index d23907a8def..db58dc00338 100644
--- a/spec/workers/pipeline_schedule_worker_spec.rb
+++ b/spec/workers/pipeline_schedule_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe PipelineScheduleWorker do
+RSpec.describe PipelineScheduleWorker, :sidekiq_inline, feature_category: :continuous_integration do
include ExclusiveLeaseHelpers
subject { described_class.new.perform }
@@ -30,7 +30,7 @@ RSpec.describe PipelineScheduleWorker do
project.add_maintainer(user)
end
- context 'when there is a scheduled pipeline within next_run_at', :sidekiq_inline do
+ context 'when there is a scheduled pipeline within next_run_at' do
shared_examples 'successful scheduling' do
it 'creates a new pipeline' do
expect { subject }.to change { project.ci_pipelines.count }.by(1)
@@ -49,7 +49,19 @@ RSpec.describe PipelineScheduleWorker do
end
end
- it_behaves_like 'successful scheduling'
+ shared_examples 'successful scheduling with/without ci_use_run_pipeline_schedule_worker' do
+ it_behaves_like 'successful scheduling'
+
+ context 'when feature flag ci_use_run_pipeline_schedule_worker is disabled' do
+ before do
+ stub_feature_flags(ci_use_run_pipeline_schedule_worker: false)
+ end
+
+ it_behaves_like 'successful scheduling'
+ end
+ end
+
+ it_behaves_like 'successful scheduling with/without ci_use_run_pipeline_schedule_worker'
context 'when the latest commit contains [ci skip]' do
before do
@@ -58,7 +70,7 @@ RSpec.describe PipelineScheduleWorker do
.and_return('some commit [ci skip]')
end
- it_behaves_like 'successful scheduling'
+ it_behaves_like 'successful scheduling with/without ci_use_run_pipeline_schedule_worker'
end
end
@@ -123,4 +135,13 @@ RSpec.describe PipelineScheduleWorker do
expect { subject }.not_to raise_error
end
end
+
+ context 'when max retry attempts reach' do
+ let!(:lease) { stub_exclusive_lease_taken(described_class.name.underscore) }
+
+ it 'does not raise error' do
+ expect(lease).to receive(:try_obtain).exactly(described_class::LOCK_RETRY + 1).times
+ expect { subject }.to raise_error(Gitlab::ExclusiveLeaseHelpers::FailedToObtainLockError)
+ end
+ end
end
diff --git a/spec/workers/projects/delete_branch_worker_spec.rb b/spec/workers/projects/delete_branch_worker_spec.rb
index c1289f56929..771ab3def84 100644
--- a/spec/workers/projects/delete_branch_worker_spec.rb
+++ b/spec/workers/projects/delete_branch_worker_spec.rb
@@ -64,9 +64,11 @@ RSpec.describe Projects::DeleteBranchWorker, feature_category: :source_code_mana
expect(instance).to receive(:execute).with(branch).and_return(service_result)
end
- expect(service_result).to receive(:track_and_raise_exception).and_call_original
+ expect(service_result).to receive(:log_and_raise_exception).and_call_original
- expect { worker.perform(project.id, user.id, branch) }.to raise_error(StandardError)
+ expect do
+ worker.perform(project.id, user.id, branch)
+ end.to raise_error(Projects::DeleteBranchWorker::GitReferenceLockedError)
end
end
@@ -78,25 +80,7 @@ RSpec.describe Projects::DeleteBranchWorker, feature_category: :source_code_mana
expect(instance).to receive(:execute).with(branch).and_return(service_result)
end
- expect(service_result).not_to receive(:track_and_raise_exception)
-
- expect { worker.perform(project.id, user.id, branch) }.not_to raise_error
- end
- end
-
- context 'when track_and_raise_delete_source_errors is disabled' do
- let(:status_code) { 400 }
-
- before do
- stub_feature_flags(track_and_raise_delete_source_errors: false)
- end
-
- it 'does not track the exception' do
- expect_next_instance_of(::Branches::DeleteService) do |instance|
- expect(instance).to receive(:execute).with(branch).and_return(service_result)
- end
-
- expect(service_result).not_to receive(:track_and_raise_exception)
+ expect(service_result).not_to receive(:log_and_raise_exception)
expect { worker.perform(project.id, user.id, branch) }.not_to raise_error
end
diff --git a/spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb b/spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb
new file mode 100644
index 00000000000..932ba29f806
--- /dev/null
+++ b/spec/workers/projects/finalize_project_statistics_refresh_worker_spec.rb
@@ -0,0 +1,41 @@
+# frozen_string_literal: true
+
+require 'spec_helper'
+
+RSpec.describe Projects::FinalizeProjectStatisticsRefreshWorker do
+ let_it_be(:record) { create(:project_build_artifacts_size_refresh, :finalizing) }
+
+ describe '#perform' do
+ let(:attribute) { record.class.counter_attributes.first }
+ let(:worker) { described_class.new }
+
+ subject { worker.perform(record.class.name, record.id) }
+
+ it 'stores the refresh increment to the buffered counter' do
+ expect(record.class).to receive(:find_by_id).and_return(record)
+ expect(record).to receive(:finalize!)
+
+ subject
+ end
+
+ context 'when record class does not exist' do
+ subject { worker.perform('NonExistentModel', 1) }
+
+ it 'does nothing' do
+ expect(record).not_to receive(:finalize!)
+
+ subject
+ end
+ end
+
+ context 'when record does not exist' do
+ subject { worker.perform(record.class.name, non_existing_record_id) }
+
+ it 'does nothing' do
+ expect(record).not_to receive(:finalize!)
+
+ subject
+ end
+ end
+ end
+end
diff --git a/spec/workers/projects/git_garbage_collect_worker_spec.rb b/spec/workers/projects/git_garbage_collect_worker_spec.rb
index ae567107443..899e3ed2007 100644
--- a/spec/workers/projects/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/projects/git_garbage_collect_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Projects::GitGarbageCollectWorker do
+RSpec.describe Projects::GitGarbageCollectWorker, feature_category: :source_code_management do
let_it_be(:project) { create(:project, :repository) }
it_behaves_like 'can collect git garbage' do
@@ -24,8 +24,7 @@ RSpec.describe Projects::GitGarbageCollectWorker do
end
context 'when the repository has joined a pool' do
- let!(:pool) { create(:pool_repository, :ready) }
- let(:project) { pool.source_project }
+ let_it_be(:pool) { create(:pool_repository, :ready, source_project: project) }
it 'ensures the repositories are linked' do
expect(project.pool_repository).to receive(:link_repository).once
diff --git a/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
index c7e45e7e4d7..00c45255316 100644
--- a/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
+++ b/spec/workers/projects/refresh_build_artifacts_size_statistics_worker_spec.rb
@@ -62,14 +62,38 @@ RSpec.describe Projects::RefreshBuildArtifactsSizeStatisticsWorker do
describe '#max_running_jobs' do
subject { worker.max_running_jobs }
- it { is_expected.to eq(10) }
+ before do
+ stub_feature_flags(
+ projects_build_artifacts_size_refresh: false,
+ projects_build_artifacts_size_refresh_medium: false,
+ projects_build_artifacts_size_refresh_high: false
+ )
+ end
- context 'when projects_build_artifacts_size_refresh flag is disabled' do
+ it { is_expected.to eq(0) }
+
+ context 'when projects_build_artifacts_size_refresh flag is enabled' do
before do
- stub_feature_flags(projects_build_artifacts_size_refresh: false)
+ stub_feature_flags(projects_build_artifacts_size_refresh: true)
end
- it { is_expected.to eq(0) }
+ it { is_expected.to eq(described_class::MAX_RUNNING_LOW) }
+ end
+
+ context 'when projects_build_artifacts_size_refresh_medium flag is enabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_medium: true)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_MEDIUM) }
+ end
+
+ context 'when projects_build_artifacts_size_refresh_high flag is enabled' do
+ before do
+ stub_feature_flags(projects_build_artifacts_size_refresh_high: true)
+ end
+
+ it { is_expected.to eq(described_class::MAX_RUNNING_HIGH) }
end
end
end
diff --git a/spec/workers/repository_import_worker_spec.rb b/spec/workers/repository_import_worker_spec.rb
index 1dc77fbf83f..ca7c13fe24e 100644
--- a/spec/workers/repository_import_worker_spec.rb
+++ b/spec/workers/repository_import_worker_spec.rb
@@ -2,92 +2,88 @@
require 'spec_helper'
-RSpec.describe RepositoryImportWorker do
+RSpec.describe RepositoryImportWorker, feature_category: :importers do
describe '#perform' do
- let(:project) { create(:project, :import_scheduled) }
- let(:import_state) { project.import_state }
-
- context 'when worker was reset without cleanup' do
- it 'imports the project successfully' do
- jid = '12345678'
- started_project = create(:project)
- started_import_state = create(:import_state, :started, project: started_project, jid: jid)
-
- allow(subject).to receive(:jid).and_return(jid)
+ let(:project) { build_stubbed(:project, :import_scheduled, import_state: import_state, import_url: 'url') }
+ let(:import_state) { create(:import_state, status: :scheduled) }
+ let(:jid) { '12345678' }
+
+ before do
+ allow(subject).to receive(:jid).and_return(jid)
+ allow(Project).to receive(:find_by_id).with(project.id).and_return(project)
+ allow(project).to receive(:after_import)
+ allow(import_state).to receive(:start).and_return(true)
+ end
- expect_next_instance_of(Projects::ImportService) do |instance|
- expect(instance).to receive(:execute).and_return({ status: :ok })
- end
+ context 'when project not found (deleted)' do
+ before do
+ allow(Project).to receive(:find_by_id).with(project.id).and_return(nil)
+ end
- # Works around https://github.com/rspec/rspec-mocks/issues/910
- expect(Project).to receive(:find).with(started_project.id).and_return(started_project)
- expect(started_project.repository).to receive(:expire_emptiness_caches)
- expect(started_project.wiki.repository).to receive(:expire_emptiness_caches)
- expect(started_import_state).to receive(:finish)
+ it 'does not raise any exception' do
+ expect(Projects::ImportService).not_to receive(:new)
- subject.perform(started_project.id)
+ expect { subject.perform(project.id) }.not_to raise_error
end
end
- context 'when the import was successful' do
- it 'imports a project' do
+ context 'when import_state is scheduled' do
+ it 'imports the project successfully' do
expect_next_instance_of(Projects::ImportService) do |instance|
expect(instance).to receive(:execute).and_return({ status: :ok })
end
- # Works around https://github.com/rspec/rspec-mocks/issues/910
- expect(Project).to receive(:find).with(project.id).and_return(project)
- expect(project.repository).to receive(:expire_emptiness_caches)
- expect(project.wiki.repository).to receive(:expire_emptiness_caches)
- expect(import_state).to receive(:finish)
-
subject.perform(project.id)
+
+ expect(project).to have_received(:after_import)
+ expect(import_state).to have_received(:start)
end
end
- context 'when the import has failed' do
- it 'updates the error on Import/Export & hides credentials from import URL' do
- import_url = 'https://user:pass@test.com/root/repoC.git/'
- error = "#{import_url} not found"
-
- import_state.update!(jid: '123')
- project.update!(import_type: 'gitlab_project')
+ context 'when worker was reset without cleanup (import_state is started)' do
+ let(:import_state) { create(:import_state, :started, jid: jid) }
+ it 'imports the project successfully' do
expect_next_instance_of(Projects::ImportService) do |instance|
- expect(instance).to receive(:track_start_import).and_raise(StandardError, error)
+ expect(instance).to receive(:execute).and_return({ status: :ok })
end
- expect { subject.perform(project.id) }.not_to raise_error
+ subject.perform(project.id)
- import_state.reload
- expect(import_state.jid).to eq('123')
- expect(import_state.status).to eq('failed')
- expect(import_state.last_error).to include("[FILTERED] not found")
- expect(import_state.last_error).not_to include(import_url)
+ expect(project).to have_received(:after_import)
+ expect(import_state).not_to have_received(:start)
end
end
context 'when using an asynchronous importer' do
it 'does not mark the import process as finished' do
- service = double(:service)
+ expect_next_instance_of(Projects::ImportService) do |instance|
+ expect(instance).to receive(:execute).and_return({ status: :ok })
+ expect(instance).to receive(:async?).and_return(true)
+ end
+
+ subject.perform(project.id)
- allow(Projects::ImportService)
- .to receive(:new)
- .and_return(service)
+ expect(project).not_to have_received(:after_import)
+ end
+ end
- allow(service)
- .to receive(:execute)
- .and_return(true)
+ context 'when the import has failed' do
+ let(:error) { "https://user:pass@test.com/root/repoC.git/ not found" }
- allow(service)
- .to receive(:async?)
- .and_return(true)
+ before do
+ allow(import_state).to receive(:mark_as_failed)
+ end
- expect_next_instance_of(ProjectImportState) do |instance|
- expect(instance).not_to receive(:finish)
+ it 'marks import_state as failed' do
+ expect_next_instance_of(Projects::ImportService) do |instance|
+ expect(instance).to receive(:execute).and_return({ status: :error, message: error })
end
subject.perform(project.id)
+
+ expect(import_state).to have_received(:mark_as_failed).with(error)
+ expect(project).not_to have_received(:after_import)
end
end
end
diff --git a/spec/workers/run_pipeline_schedule_worker_spec.rb b/spec/workers/run_pipeline_schedule_worker_spec.rb
index 4fdf6149435..25158de3341 100644
--- a/spec/workers/run_pipeline_schedule_worker_spec.rb
+++ b/spec/workers/run_pipeline_schedule_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe RunPipelineScheduleWorker do
+RSpec.describe RunPipelineScheduleWorker, feature_category: :continuous_integration do
it 'has an until_executed deduplicate strategy' do
expect(described_class.get_deduplicate_strategy).to eq(:until_executed)
end
@@ -21,6 +21,11 @@ RSpec.describe RunPipelineScheduleWorker do
end
end
+ it 'accepts an option' do
+ expect { worker.perform(pipeline_schedule.id, user.id, {}) }.not_to raise_error
+ expect { worker.perform(pipeline_schedule.id, user.id, {}, {}) }.to raise_error(ArgumentError)
+ end
+
context 'when a schedule not found' do
it 'does not call the Service' do
expect(Ci::CreatePipelineService).not_to receive(:new)
@@ -56,37 +61,91 @@ RSpec.describe RunPipelineScheduleWorker do
let(:create_pipeline_service) { instance_double(Ci::CreatePipelineService, execute: service_response) }
let(:service_response) { instance_double(ServiceResponse, payload: pipeline, error?: false) }
- before do
- expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: pipeline_schedule.ref).and_return(create_pipeline_service)
+ context 'when pipeline can be created' do
+ before do
+ expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: pipeline_schedule.ref).and_return(create_pipeline_service)
- expect(create_pipeline_service).to receive(:execute).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule).and_return(service_response)
- end
+ expect(create_pipeline_service).to receive(:execute).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule).and_return(service_response)
+ end
+
+ context "when pipeline is persisted" do
+ let(:pipeline) { instance_double(Ci::Pipeline, persisted?: true) }
- context "when pipeline is persisted" do
- let(:pipeline) { instance_double(Ci::Pipeline, persisted?: true) }
+ it "returns the service response" do
+ expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
+ end
- it "returns the service response" do
- expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
+ it "does not log errors" do
+ expect(worker).not_to receive(:log_extra_metadata_on_done)
+
+ expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
+ end
+
+ it "changes the next_run_at" do
+ expect { worker.perform(pipeline_schedule.id, user.id) }.to change { pipeline_schedule.reload.next_run_at }.by(1.day)
+ end
+
+ context 'when feature flag ci_use_run_pipeline_schedule_worker is disabled' do
+ before do
+ stub_feature_flags(ci_use_run_pipeline_schedule_worker: false)
+ end
+
+ it 'does not change the next_run_at' do
+ expect { worker.perform(pipeline_schedule.id, user.id) }.not_to change { pipeline_schedule.reload.next_run_at }
+ end
+ end
end
- it "does not log errors" do
- expect(worker).not_to receive(:log_extra_metadata_on_done)
+ context "when pipeline was not persisted" do
+ let(:service_response) { instance_double(ServiceResponse, error?: true, message: "Error", payload: pipeline) }
+ let(:pipeline) { instance_double(Ci::Pipeline, persisted?: false) }
+
+ it "logs a pipeline creation error" do
+ expect(worker)
+ .to receive(:log_extra_metadata_on_done)
+ .with(:pipeline_creation_error, service_response.message)
+ .and_call_original
- expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
+ expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response.message)
+ end
end
end
- context "when pipeline was not persisted" do
- let(:service_response) { instance_double(ServiceResponse, error?: true, message: "Error", payload: pipeline) }
- let(:pipeline) { instance_double(Ci::Pipeline, persisted?: false) }
+ context 'when schedule is already executed' do
+ let(:time_in_future) { 1.hour.since }
+
+ before do
+ pipeline_schedule.update_column(:next_run_at, time_in_future)
+ end
+
+ it 'does not change the next_run_at' do
+ expect { worker.perform(pipeline_schedule.id, user.id) }.to not_change { pipeline_schedule.reload.next_run_at }
+ end
+
+ it 'does not create a pipeline' do
+ expect(Ci::CreatePipelineService).not_to receive(:new)
+
+ worker.perform(pipeline_schedule.id, user.id)
+ end
+
+ context 'when feature flag ci_use_run_pipeline_schedule_worker is disabled' do
+ let(:pipeline) { instance_double(Ci::Pipeline, persisted?: true) }
+
+ before do
+ stub_feature_flags(ci_use_run_pipeline_schedule_worker: false)
+
+ expect(Ci::CreatePipelineService).to receive(:new).with(project, user, ref: pipeline_schedule.ref).and_return(create_pipeline_service)
+
+ expect(create_pipeline_service).to receive(:execute).with(:schedule, ignore_skip_ci: true, save_on_errors: false, schedule: pipeline_schedule).and_return(service_response)
+ end
- it "logs a pipeline creation error" do
- expect(worker)
- .to receive(:log_extra_metadata_on_done)
- .with(:pipeline_creation_error, service_response.message)
- .and_call_original
+ it 'does not change the next_run_at' do
+ expect { worker.perform(pipeline_schedule.id, user.id) }.to not_change { pipeline_schedule.reload.next_run_at }
+ end
- expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response.message)
+ it "returns the service response" do
+ expect(worker.perform(pipeline_schedule.id, user.id)).to eq(service_response)
+ end
end
end
end
diff --git a/spec/workers/wait_for_cluster_creation_worker_spec.rb b/spec/workers/wait_for_cluster_creation_worker_spec.rb
deleted file mode 100644
index 9079dff1afe..00000000000
--- a/spec/workers/wait_for_cluster_creation_worker_spec.rb
+++ /dev/null
@@ -1,47 +0,0 @@
-# frozen_string_literal: true
-
-require 'spec_helper'
-
-RSpec.describe WaitForClusterCreationWorker do
- describe '#perform' do
- context 'when provider type is gcp' do
- let(:cluster) { create(:cluster, provider_type: :gcp, provider_gcp: provider) }
- let(:provider) { create(:cluster_provider_gcp, :creating) }
-
- it 'provisions a cluster' do
- expect_any_instance_of(Clusters::Gcp::VerifyProvisionStatusService).to receive(:execute).with(provider)
-
- described_class.new.perform(cluster.id)
- end
- end
-
- context 'when provider type is aws' do
- let(:cluster) { create(:cluster, provider_type: :aws, provider_aws: provider) }
- let(:provider) { create(:cluster_provider_aws, :creating) }
-
- it 'provisions a cluster' do
- expect_any_instance_of(Clusters::Aws::VerifyProvisionStatusService).to receive(:execute).with(provider)
-
- described_class.new.perform(cluster.id)
- end
- end
-
- context 'when provider type is user' do
- let(:cluster) { create(:cluster, provider_type: :user) }
-
- it 'does not provision a cluster' do
- expect_any_instance_of(Clusters::Gcp::VerifyProvisionStatusService).not_to receive(:execute)
-
- described_class.new.perform(cluster.id)
- end
- end
-
- context 'when cluster does not exist' do
- it 'does not provision a cluster' do
- expect_any_instance_of(Clusters::Gcp::VerifyProvisionStatusService).not_to receive(:execute)
-
- described_class.new.perform(123)
- end
- end
- end
-end
diff --git a/spec/workers/wikis/git_garbage_collect_worker_spec.rb b/spec/workers/wikis/git_garbage_collect_worker_spec.rb
index 77c2e49a83a..2c6899848cf 100644
--- a/spec/workers/wikis/git_garbage_collect_worker_spec.rb
+++ b/spec/workers/wikis/git_garbage_collect_worker_spec.rb
@@ -2,7 +2,7 @@
require 'spec_helper'
-RSpec.describe Wikis::GitGarbageCollectWorker do
+RSpec.describe Wikis::GitGarbageCollectWorker, feature_category: :source_code_management do
it_behaves_like 'can collect git garbage' do
let_it_be(:resource) { create(:project_wiki) }
let_it_be(:page) { create(:wiki_page, wiki: resource) }